Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
36 changes: 36 additions & 0 deletions .github/workflows/benchmark.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
name: CodSpeed

on:
push:
branches: ["main"]
pull_request:
branches: ["main"]

permissions:
id-token: write
contents: read

jobs:
benchmarks:
name: Run benchmarks
runs-on: codspeed-macro
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false

- name: Install uv
uses: astral-sh/setup-uv@08807647e7069bb48b6ef5acd8ec9567f424441b # v8.1.0
with:
python-version: "3.13"
enable-cache: true

- name: Install dependencies
run: scripts/install
shell: bash

- name: Run the benchmarks
uses: CodSpeedHQ/action@3194d9a39c4d46684cb44bf7207fc56626aad8fd # v4
with:
mode: walltime
run: uv run pytest tests/test_benchmark.py --codspeed
4 changes: 3 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ dev = [
"coverage[toml]==7.10.6",
"cryptography==46.0.7",
"pytest>=9.0.3",
"pytest-codspeed>=4.1.1",
"pytest-httpbin==2.0.0",
"pytest-trio==0.8.0",
"trio==0.31.0",
Expand Down Expand Up @@ -78,8 +79,9 @@ filterwarnings = [
markers = [
"copied_from(source, changes=None): mark test as copied from somewhere else, along with a description of changes made to accodomate e.g. our test setup",
"network: marks tests which require network connection. Used in 3rd-party build environments that have network disabled.",
"benchmark: marks CodSpeed benchmark tests under tests/test_benchmark.py.",
]

[tool.coverage.run]
source_pkgs = ["httpx2", "httpcore2", "tests"]
omit = ["src/httpcore2/httpcore2/_sync/*"]
omit = ["src/httpcore2/httpcore2/_sync/*", "tests/test_benchmark.py"]
128 changes: 128 additions & 0 deletions tests/test_benchmark.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,128 @@
from __future__ import annotations

import gzip
import io
import json
import socket
import threading

import pytest

import httpx2
from httpcore2._backends.sync import SyncStream

pytestmark = pytest.mark.benchmark

TYPICAL_URL = "https://www.example.org:8443/path/to/resource?key=value&other=1#frag"

HEADERS: list[tuple[str, str]] = [
("host", "example.org"),
("user-agent", "httpx2-bench/1.0"),
("accept", "*/*"),
("accept-encoding", "gzip, deflate, br"),
*[(f"x-custom-{i}", f"value-{i}") for i in range(16)],
]

SMALL_JSON: dict[str, object] = {
"id": 12345,
"items": [{"sku": f"SKU-{i}", "qty": i, "price": i * 1.5} for i in range(50)],
}
LARGE_JSON: dict[str, object] = {
"records": [
{"id": i, "name": f"record-{i}", "tags": [f"t{j}" for j in range(8)], "active": bool(i % 2)}
for i in range(2048)
],
}
SMALL_JSON_BODY = json.dumps(SMALL_JSON).encode()
LARGE_JSON_BODY = json.dumps(LARGE_JSON).encode()
GZIPPED_LARGE_JSON_BODY = gzip.compress(LARGE_JSON_BODY)


def test_bench_url_join() -> None:
base = httpx2.URL(TYPICAL_URL)
for _ in range(1024):
base.join("/path/to/resource?key=value")


def test_bench_request_json_post() -> None:
for _ in range(256):
httpx2.Request("POST", TYPICAL_URL, headers=HEADERS, json=SMALL_JSON)


def test_bench_request_multipart() -> None:
for _ in range(64):
request = httpx2.Request(
"POST",
"https://example.org/upload",
data={"name": "value", "other": "field", "description": "a longer text field"},
files={
"small": ("hello.txt", b"x" * 4096, "text/plain"),
"large": ("payload.bin", io.BytesIO(b"y" * 65536), "application/octet-stream"),
},
)
request.read()


def test_bench_response_gzip_decode_large() -> None:
for _ in range(64):
response = httpx2.Response(
200,
headers=[("content-type", "application/json"), ("content-encoding", "gzip")],
content=GZIPPED_LARGE_JSON_BODY,
)
response.read()


def _large_json_handler(request: httpx2.Request) -> httpx2.Response:
return httpx2.Response(200, content=LARGE_JSON_BODY, headers=[("content-type", "application/json")])


def _stream_handler(request: httpx2.Request) -> httpx2.Response:
return httpx2.Response(200, content=b"x" * 1024 * 1024)


def test_bench_client_post_large_json() -> None:
with httpx2.Client(transport=httpx2.MockTransport(_large_json_handler)) as client:
for _ in range(16):
client.post(TYPICAL_URL, json=LARGE_JSON).json()


def test_bench_client_stream_download() -> None:
with httpx2.Client(transport=httpx2.MockTransport(_stream_handler)) as client:
for _ in range(16):
with client.stream("GET", TYPICAL_URL) as response:
for _ in response.iter_bytes(chunk_size=8192):
pass


def test_bench_sync_stream_write_large() -> None:
payload = b"x" * 4 * 1024 * 1024 # 4 MB
reader_sock, writer_sock = socket.socketpair()
try:
# Small kernel buffers + small reader chunks force many partial sends on Linux,
# which is what exercises the buffer-slicing loop inside SyncStream.write.
writer_sock.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, 8192)
reader_sock.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 8192)

drained: list[int] = []

def drain() -> None:
total = 0
while True:
chunk = reader_sock.recv(8192)
if not chunk:
break
total += len(chunk)
drained.append(total)

thread = threading.Thread(target=drain)
thread.start()

stream = SyncStream(writer_sock)
stream.write(payload)
stream.close()
thread.join()

assert drained == [len(payload)]
finally:
reader_sock.close()
33 changes: 33 additions & 0 deletions uv.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.