Learning how to time pulling data from multiple URLs...
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

59 lines
930 B

  1. package main
  2. import (
  3. "fmt"
  4. "io/ioutil"
  5. "net/http"
  6. "time"
  7. )
  8. type Data struct {
  9. length int
  10. duration time.Duration
  11. }
  12. func getPage(url string) (Data, error) {
  13. var d Data
  14. resp, err := http.Get(url)
  15. if err != nil {
  16. return d, err
  17. }
  18. defer resp.Body.Close()
  19. start := time.Now()
  20. body, err := ioutil.ReadAll(resp.Body)
  21. if err != nil {
  22. return d, err
  23. }
  24. dur := time.Since(start)
  25. d.length = len(body)
  26. d.duration = dur
  27. return d, nil
  28. }
  29. func main() {
  30. sites := map[string]string{
  31. "Google": "http://google.com",
  32. "Yahoo": "http://yahoo.com",
  33. "Bing": "http://bing.com",
  34. }
  35. start := time.Now()
  36. for name, url := range sites {
  37. data, err := getPage(url)
  38. if err != nil {
  39. fmt.Printf("%s %s\n", name, err)
  40. }
  41. fmt.Printf("%-10s %6.2f KB %7.2f ms\n",
  42. name,
  43. float32(data.length)/1024,
  44. float32(data.duration)/1000000)
  45. }
  46. fmt.Printf("Total time: %.3f seconds\n", time.Since(start).Seconds())
  47. }