014 Project 14: HTTP Load Tester
014 Build an HTTP Load Tester
Generate concurrent requests and report throughput and latency.
N workers -> request loop -> result channel -> aggregate metrics
Full main.go
package main
import (
"flag"
"fmt"
"net/http"
"os"
"sync"
"time"
)
type result struct {
ok bool
dur time.Duration
}
func main() {
url := flag.String("url", "http://localhost:8080/health", "target URL")
concurrency := flag.Int("c", 20, "concurrent workers")
requests := flag.Int("n", 500, "total requests")
timeout := flag.Duration("t", 3*time.Second, "request timeout")
flag.Parse()
client := &http.Client{Timeout: *timeout}
results := make(chan result, *requests)
start := time.Now()
var wg sync.WaitGroup
jobs := make(chan struct{}, *requests)
for i := 0; i < *requests; i++ {
jobs <- struct{}{}
}
close(jobs)
for w := 0; w < *concurrency; w++ {
wg.Add(1)
go func() {
defer wg.Done()
for range jobs {
t0 := time.Now()
resp, err := client.Get(*url)
d := time.Since(t0)
if err != nil {
results <- result{ok: false, dur: d}
continue
}
_ = resp.Body.Close()
results <- result{ok: resp.StatusCode < 500, dur: d}
}
}()
}
wg.Wait()
close(results)
var okCount int
var min, max, sum time.Duration
min = 1<<63 - 1
for r := range results {
if r.ok {
okCount++
}
if r.dur < min {
min = r.dur
}
if r.dur > max {
max = r.dur
}
sum += r.dur
}
elapsed := time.Since(start)
avg := time.Duration(int64(sum) / int64(*requests))
rps := float64(*requests) / elapsed.Seconds()
fmt.Printf("target: %s\n", *url)
fmt.Printf("requests: %d, success: %d, failed: %d\n", *requests, okCount, *requests-okCount)
fmt.Printf("latency min/avg/max: %v/%v/%v\n", min, avg, max)
fmt.Printf("elapsed: %v, throughput: %.2f req/s\n", elapsed, rps)
if okCount == 0 {
os.Exit(1)
}
}Run
go run . -url http://localhost:8080/health -c 50 -n 5000Step-by-Step Explanation
- Model jobs, workers, and outputs explicitly.
- Bound concurrency using worker pools and buffered channels.
- Use
sync.WaitGroupfor lifecycle control. - Aggregate worker results in one place.
- Verify behavior under both normal and failure paths.
Code Anatomy
- Producer pushes jobs into a channel.
- Workers consume jobs and emit results.
- Aggregator merges results and prints summary.
Learning Goals
- Build leak-free goroutine patterns.
- Balance throughput and resource limits.
- Understand fan-out/fan-in architecture.