Skip to content

curl & wget — Street-Level Ops

Real-world workflows for health checks, API testing, debugging SSL, and downloading artifacts in production.

Health Check Scripts

# Basic health check with exit code
curl -sf --connect-timeout 5 --max-time 10 \
  https://api.example.com/health || echo "UNHEALTHY"

# Health check with status code capture
STATUS=$(curl -s -o /dev/null -w "%{http_code}" \
  --connect-timeout 5 --max-time 10 \
  https://api.example.com/health)

if [ "$STATUS" -ne 200 ]; then
  echo "ALERT: /health returned $STATUS"
  # Send to Slack, PagerDuty, etc.
fi

# Check multiple endpoints
for endpoint in /health /ready /metrics; do
  CODE=$(curl -s -o /dev/null -w "%{http_code}" \
    --connect-timeout 3 "https://api.example.com${endpoint}")
  printf "%-15s %s\n" "$endpoint" "$CODE"
done

# Output:
# /health         200
# /ready          200
# /metrics        200

# Docker HEALTHCHECK in Dockerfile
HEALTHCHECK --interval=30s --timeout=5s --retries=3 \
  CMD curl -sf http://localhost:8000/health || exit 1

# Kubernetes liveness probe equivalent
# livenessProbe:
#   exec:
#     command: ["curl", "-sf", "http://localhost:8000/health"]
#   initialDelaySeconds: 10
#   periodSeconds: 30

API Smoke Tests

# Smoke test script for a REST API
#!/bin/bash
BASE="https://api.example.com"
TOKEN="Bearer $(cat /etc/app/token)"
FAIL=0

check() {
  local method=$1 path=$2 expected=$3 body=$4
  local args=(-s -o /dev/null -w "%{http_code}" --connect-timeout 5)
  args+=(-H "Authorization: $TOKEN")
  [ -n "$body" ] && args+=(-H "Content-Type: application/json" -d "$body")
  [ "$method" != "GET" ] && args+=(-X "$method")

  CODE=$(curl "${args[@]}" "${BASE}${path}")
  if [ "$CODE" -ne "$expected" ]; then
    echo "FAIL: $method $path expected $expected got $CODE"
    FAIL=1
  else
    echo "OK:   $method $path$CODE"
  fi
}

check GET  /health 200
check GET  /api/v1/users 200
check POST /api/v1/users 201 '{"name":"smoketest","email":"test@example.com"}'
check GET  /api/v1/users/smoketest 200
check DELETE /api/v1/users/smoketest 204
check GET  /api/v1/users/smoketest 404

exit $FAIL

Debugging SSL Certificate Issues

# View the full certificate chain
curl -vvI https://example.com 2>&1 | awk '/Server certificate/,/---/'

# Check certificate expiration
echo | openssl s_client -servername example.com -connect example.com:443 2>/dev/null | \
  openssl x509 -noout -dates

# Output:
# notBefore=Jan 15 00:00:00 2024 GMT
# notAfter=Apr 15 23:59:59 2024 GMT

# Test with a specific CA bundle
curl --cacert /etc/ssl/certs/internal-ca.pem https://internal.example.com

# Diagnose certificate chain issues
curl -v https://broken-chain.example.com 2>&1 | grep -i "verify\|certificate\|error"

# Check if a certificate matches its key
openssl x509 -noout -modulus -in cert.pem | md5sum
openssl rsa -noout -modulus -in key.pem | md5sum
# If the hashes match, cert and key are a pair

# Test mTLS connection
curl --cert client.pem --key client-key.pem \
  --cacert ca.pem \
  https://mtls.example.com/status

# Check TLS version and cipher negotiated
curl -vvI https://example.com 2>&1 | grep -i "ssl connection\|tls\|cipher"

Downloading with Retry

# Retry 3 times with delay
curl --retry 3 --retry-delay 5 -O \
  https://releases.example.com/app-v2.3.tar.gz

# Retry on all errors (including HTTP 5xx)
curl --retry 5 --retry-all-errors --retry-delay 2 \
  -O https://releases.example.com/app-v2.3.tar.gz

# Download with checksum verification
curl -sO https://releases.example.com/app-v2.3.tar.gz
curl -s https://releases.example.com/app-v2.3.tar.gz.sha256 | sha256sum -c -

# wget with retry and mirror-safe options
wget --retry-connrefused --waitretry=5 --tries=5 \
  https://releases.example.com/app-v2.3.tar.gz

# Resume a partial download
wget -c https://releases.example.com/large.iso
curl -C - -O https://releases.example.com/large.iso

Measuring TTFB and Connection Timing

# Create a timing format file for reuse
cat > /tmp/curl-timing.txt << 'EOF'
     DNS Lookup:  %{time_namelookup}s
  TCP Connect:  %{time_connect}s
  TLS Handshake:  %{time_appconnect}s
  TTFB:  %{time_starttransfer}s
  Total Time:  %{time_total}s
  Download Size:  %{size_download} bytes
  HTTP Status:  %{http_code}
  Remote IP:  %{remote_ip}
EOF

# Use it
curl -o /dev/null -s -w "@/tmp/curl-timing.txt" https://api.example.com/health

# Quick one-liner for TTFB
curl -o /dev/null -s -w "TTFB: %{time_starttransfer}s\n" https://api.example.com/

# Compare TTFB across multiple endpoints
for url in https://api.example.com/ https://api.eu.example.com/ https://api.ap.example.com/; do
  TTFB=$(curl -o /dev/null -s -w "%{time_starttransfer}" "$url")
  printf "%-40s TTFB: %ss\n" "$url" "$TTFB"
done

# Output:
# https://api.example.com/                 TTFB: 0.125s
# https://api.eu.example.com/              TTFB: 0.340s
# https://api.ap.example.com/              TTFB: 0.510s

# Repeated measurement for latency variance
for i in $(seq 1 10); do
  curl -o /dev/null -s -w "%{time_total}\n" https://api.example.com/health
done | awk '{ sum+=$1; a[NR]=$1 }
  END { printf "avg: %.3f  min: %.3f  max: %.3f\n", sum/NR, a[1], a[NR] }'

Sending Webhooks

# Slack webhook
curl -s -X POST https://hooks.slack.com/services/T00/B00/xxx \
  -H "Content-Type: application/json" \
  -d '{"text": "Deploy v2.3 complete on prod"}'

# PagerDuty event
curl -s -X POST https://events.pagerduty.com/v2/enqueue \
  -H "Content-Type: application/json" \
  -d '{
    "routing_key": "service-key-here",
    "event_action": "trigger",
    "payload": {
      "summary": "CPU > 95% on web-01",
      "severity": "critical",
      "source": "monitoring"
    }
  }'

# Generic webhook with retry
curl --retry 3 --retry-delay 2 --retry-all-errors \
  -s -X POST https://webhook.example.com/deploy \
  -H "Content-Type: application/json" \
  -H "X-Webhook-Secret: $WEBHOOK_SECRET" \
  -d "{\"version\": \"$VERSION\", \"env\": \"prod\", \"deployer\": \"$USER\"}"

Testing Load Balancers

# Send requests with specific Host header (test without DNS)
curl -H "Host: api.example.com" http://10.0.1.100/health

# Hit each backend directly to compare responses
for ip in 10.0.1.{10..14}; do
  echo "=== $ip ==="
  curl -s -H "Host: api.example.com" "http://${ip}/health" | jq .
done

# Check which backend is serving (look for X-Backend header)
for i in $(seq 1 20); do
  curl -sI https://api.example.com/health 2>/dev/null | grep -i "x-backend\|x-served-by"
done | sort | uniq -c

# Output:
#       7 X-Backend: web-01
#       6 X-Backend: web-02
#       7 X-Backend: web-03

# Test sticky sessions
curl -c cookies.txt -b cookies.txt -sI https://api.example.com/ | grep -i "set-cookie\|x-backend"
curl -c cookies.txt -b cookies.txt -sI https://api.example.com/ | grep -i "x-backend"
# Should hit the same backend if sticky sessions are configured

JWT Token Workflow

Gotcha: JWT exp claims are Unix timestamps in seconds, not milliseconds. If you compare exp against date +%s%3N (milliseconds), every token looks expired. Always use date +%s (seconds) for JWT expiry checks.

# Login and capture token
TOKEN=$(curl -s -X POST https://auth.example.com/login \
  -H "Content-Type: application/json" \
  -d '{"username": "admin", "password": "'"$PASS"'"}' | jq -r '.token')

# Use the token
curl -s -H "Authorization: Bearer $TOKEN" \
  https://api.example.com/protected/resource | jq .

# Decode JWT payload (middle part, base64)
echo "$TOKEN" | cut -d. -f2 | base64 -d 2>/dev/null | jq .

# Output:
# {
#   "sub": "admin",
#   "exp": 1710892800,
#   "iat": 1710806400,
#   "roles": ["admin"]
# }

# Check token expiration
EXP=$(echo "$TOKEN" | cut -d. -f2 | base64 -d 2>/dev/null | jq -r '.exp')
NOW=$(date +%s)
if [ "$NOW" -gt "$EXP" ]; then
  echo "Token expired $(( (NOW - EXP) / 60 )) minutes ago"
else
  echo "Token valid for $(( (EXP - NOW) / 60 )) more minutes"
fi

Bulk API Operations

# Create multiple resources from a file
while IFS= read -r line; do
  curl -s -X POST https://api.example.com/items \
    -H "Content-Type: application/json" \
    -H "Authorization: Bearer $TOKEN" \
    -d "$line"
  echo  # newline after each response
done < items.jsonl

# Parallel API calls with xargs
cat item_ids.txt | xargs -P 5 -I{} \
  curl -s "https://api.example.com/items/{}" \
    -H "Authorization: Bearer $TOKEN" \
    -o "responses/{}.json"

# Delete multiple resources
cat ids_to_delete.txt | xargs -P 3 -I{} \
  curl -s -X DELETE "https://api.example.com/items/{}" \
    -H "Authorization: Bearer $TOKEN" \
    -w "DELETE {} → %{http_code}\n" -o /dev/null

# Paginate through all results
PAGE=1
while true; do
  RESPONSE=$(curl -s "https://api.example.com/items?page=$PAGE&limit=100" \
    -H "Authorization: Bearer $TOKEN")
  COUNT=$(echo "$RESPONSE" | jq '.items | length')
  echo "$RESPONSE" | jq -r '.items[] | [.id, .name] | @tsv'
  [ "$COUNT" -lt 100 ] && break
  PAGE=$((PAGE + 1))
done

Mirroring a Website with wget

# Full mirror for offline browsing
wget --mirror \
  --convert-links \
  --adjust-extension \
  --page-requisites \
  --no-parent \
  --wait=1 \
  --random-wait \
  https://docs.example.com/

# Flags explained:
# --mirror          = -r -N -l inf --no-remove-listing
# --convert-links   = fix internal links for local viewing
# --adjust-extension = save .html extension
# --page-requisites = download CSS, JS, images
# --no-parent       = stay within the directory
# --wait/random-wait = be polite, do not hammer the server

# Mirror with bandwidth limit
wget --mirror --convert-links --page-requisites \
  --limit-rate=500k \
  https://docs.example.com/

# Download only specific file types
wget -r -A "*.pdf,*.doc" --no-parent \
  https://docs.example.com/reports/

# Resume an interrupted mirror
wget -c --mirror --convert-links https://docs.example.com/

curl in Docker HEALTHCHECK

# Simple health check
HEALTHCHECK --interval=30s --timeout=5s --start-period=10s --retries=3 \
  CMD curl -sf http://localhost:8000/health || exit 1

# Health check for a sidecar proxy setup
HEALTHCHECK --interval=15s --timeout=3s --retries=5 \
  CMD curl -sf http://localhost:15021/healthz/ready || exit 1

# Health check that verifies response content
HEALTHCHECK --interval=30s --timeout=5s --retries=3 \
  CMD curl -sf http://localhost:8000/health | grep -q '"status":"ok"' || exit 1

# Note: install curl in the image if using distroless/minimal
# Alpine: apk add --no-cache curl
# Debian: apt-get install -y --no-install-recommends curl
# Or use wget (often pre-installed on Alpine):
HEALTHCHECK CMD wget -qO- http://localhost:8000/health || exit 1

Power One-Liners

Get your external IP

curl -4s ifconfig.me       # IPv4
curl -6s ifconfig.me       # IPv6
curl -s ifconfig.me/all    # Full details

[!TIP] When to use: Verifying NAT, VPN routing, or cloud metadata vs actual egress IP.

Instant HTTP server from any directory

python3 -m http.server 8080                          # serve CWD on :8080
python3 -m http.server 8080 --directory /var/log     # serve specific dir

[!TIP] When to use: Sharing files across machines, quick log access, serving static assets during debugging.

Mirror an entire website

wget --mirror --convert-links --adjust-extension --page-requisites --no-parent --random-wait -e robots=off https://example.com

Breakdown: --mirror = recursive + timestamps + infinite depth. --convert-links rewrites for local browsing. --page-requisites grabs CSS/JS/images. --random-wait adds polite delays. -e robots=off ignores robots.txt.

[!TIP] When to use: Archiving documentation sites, creating offline copies, backing up content.

Download and extract in one shot (no temp file)

curl -sL https://example.com/release.tar.gz | tar xzf - -C /opt/

Breakdown: curl -sL follows redirects silently. Piped directly to tar which reads from stdin (-). -C extracts to target directory. Zero disk write for the archive itself.

[!TIP] When to use: Installing binaries, deploying artifacts, CI/CD pipelines.