Learning how to time pulling data from multiple URLs...
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

65 lines
1.1 KiB

  1. package main
  2. import (
  3. "fmt"
  4. "io/ioutil"
  5. "net/http"
  6. "time"
  7. )
  8. type Data struct {
  9. length int
  10. duration time.Duration
  11. }
  12. func getPage(url string) (Data, error) {
  13. var d Data
  14. resp, err := http.Get(url)
  15. if err != nil {
  16. return d, err
  17. }
  18. defer resp.Body.Close()
  19. start := time.Now()
  20. body, err := ioutil.ReadAll(resp.Body)
  21. if err != nil {
  22. return d, err
  23. }
  24. dur := time.Since(start)
  25. d.length = len(body)
  26. d.duration = dur
  27. return d, nil
  28. }
  29. func main() {
  30. sites := map[string]string{
  31. "Google": "http://google.com",
  32. "Yahoo": "http://yahoo.com",
  33. "Bing": "http://bing.com",
  34. "Apple": "http://apple.com",
  35. "Samsung": "http://samsung.com",
  36. "Microsoft": "http://microsoft.com",
  37. "Ubuntu": "http://ubuntu.com",
  38. "Verizon": "http://verizonwireless.com",
  39. "Comcast": "http://comcast.com",
  40. }
  41. start := time.Now()
  42. for name, url := range sites {
  43. data, err := getPage(url)
  44. if err != nil {
  45. fmt.Printf("%s %s\n", name, err)
  46. }
  47. fmt.Printf("%-10s %6.2f KB %7.2f ms\n",
  48. name,
  49. float32(data.length)/1024,
  50. float32(data.duration)/1000000)
  51. }
  52. fmt.Printf("Total time: %.3f seconds\n", time.Since(start).Seconds())
  53. }