Skip to content

Commit

Permalink
Merge pull request #3 from ericsnowcurrently/pyperformance
Browse files Browse the repository at this point in the history
Use pyperformance to run the benchmarks.
  • Loading branch information
kmod authored Jan 20, 2022
2 parents 96730b0 + 766642a commit 9dc9557
Show file tree
Hide file tree
Showing 115 changed files with 1,580 additions and 694 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -129,3 +129,4 @@ dmypy.json
.pyre/

results
benchmarks/bm_pytorch_alexnet_inference/data/dog.jpg
19 changes: 19 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,2 +1,21 @@
# python-macrobenchmarks
A collection of macro benchmarks for the Python programming language


## usage

```shell
# Run the default benchmarks:
python3 -m pyperformance run --manifest $PWD/benchmarks/MANIFEST
```

The benchmarks can still be run without pyperformance. This will produce
the old results format.

```shell
# Run the benchmarks:
sh ./run_all.sh

# Run the mypy benchmark using mypyc:
sh ./run_mypy.sh
```
22 changes: 22 additions & 0 deletions benchmarks/.libs/legacyutils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
import json
import sys


def maybe_handle_legacy(bench_func, *args, loopsarg='loops', legacyarg=None):
if '--legacy' not in sys.argv:
return
argv = list(sys.argv[1:])
argv.remove('--legacy')

kwargs = {}
if legacyarg:
kwargs[legacyarg] = True
if argv:
assert loopsarg
kwargs[loopsarg] = int(argv[0])

_, times = bench_func(*args, **kwargs)
if len(argv) > 1:
json.dump(times, open(argv[1], 'w'))

sys.exit(0)
88 changes: 88 additions & 0 deletions benchmarks/.libs/netutils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
import contextlib
import ipaddress
import os.path
import socket
import subprocess
import time


@contextlib.contextmanager
def serving(argv, sitedir, addr, *,
pause=None,
kill=False,
quiet=True,
):
if os.path.exists(addr):
sock = addr
addr = None
try:
os.remove(sock)
except FileNotFoundError:
pass
else:
sock = None

p = subprocess.Popen(
argv,
cwd=sitedir,
stdout=subprocess.DEVNULL if quiet else None,
stderr=subprocess.STDOUT if quiet else None,
)
try:
if pause:
time.sleep(pause)
if not sock:
try:
waitUntilUp(addr)
except NotImplementedError:
sock = addr
addr = None
if sock:
while not os.path.exists(sock):
time.sleep(0.001)
assert p.poll() is None, p.poll()
yield
assert p.poll() is None, p.poll()
finally:
p.terminate()
if kill:
p.kill()
p.wait()


def waitUntilUp(addr, timeout=10.0):
end = time.time() + timeout
addr = parse_socket_addr(addr)
started = False
current = time.time()
while not started or current <= end:
try:
with socket.create_connection(addr) as sock:
return
except ConnectionRefusedError:
time.sleep(0.001)
started = True
current = time.time()
raise Exception('Timeout reached when trying to connect')


def parse_socket_addr(addr, *, resolve=True):
if not isinstance(addr, str):
raise NotImplementedError(addr)
host, _, port = addr.partition(':')

if not host:
raise NotImplementedError(addr)
try:
host = ipaddress.ip_address(host)
except ValueError:
raise NotImplementedError(addr)
host = str(host)

if not port:
raise NotImplementedError(addr)
if not port.isdigit():
raise NotImplementedError(addr)
port = int(port)

return (host, port)
19 changes: 19 additions & 0 deletions benchmarks/MANIFEST
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
[benchmarks]

name metafile
aiohttp <local>
djangocms <local>
flaskblogging <local>
gevent_hub <local>
gunicorn <local>
json <local>
kinto <local>
mypy <local>
mypyc <local:mypy>
pycparser <local>
pylint <local>
pytorch_alexnet_inference <local>
thrift <local>

[group default]
-mypyc
41 changes: 0 additions & 41 deletions benchmarks/aiohttp.py

This file was deleted.

1 change: 1 addition & 0 deletions benchmarks/base.toml
File renamed without changes.
1 change: 1 addition & 0 deletions benchmarks/bm_aiohttp/legacyutils.py
1 change: 1 addition & 0 deletions benchmarks/bm_aiohttp/netutils.py
12 changes: 12 additions & 0 deletions benchmarks/bm_aiohttp/pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
[project]
name = "bm_aiohttp"
dependencies = [
"aiohttp",
"gunicorn",
"requests",
"uvloop",
]
dynamic = ["version"]

[tool.pyperformance]
inherits = ".."
File renamed without changes.
70 changes: 70 additions & 0 deletions benchmarks/bm_aiohttp/run_benchmark.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
import os.path
import requests
import sys

import pyperf
import netutils


DATADIR = os.path.join(
os.path.dirname(__file__),
"data",
)
ARGV = [sys.executable, "serve.py"]


#############################
# benchmarks

def bench_aiohttp_requests(loops=3000):
elapsed, _ = _bench_aiohttp_requests(loops)
return elapsed


def _bench_aiohttp_requests(loops=3000, legacy=False):
"""Measure N HTTP requests to a local server.
Note that the server is freshly started here.
Only the time for requests is measured here. The following are not:
* preparing the site the server will serve
* starting the server
* stopping the server
Hence this should be used with bench_time_func()
insted of bench_func().
"""
start = pyperf.perf_counter()
elapsed = 0
times = []
with netutils.serving(ARGV, DATADIR, "127.0.0.1:8080"):
requests_get = requests.get
for i in range(loops):
# This is a macro benchmark for a Python implementation
# so "elapsed" covers more than just how long a request takes.
t0 = pyperf.perf_counter()
requests_get("http://localhost:8080/blog/").text
t1 = pyperf.perf_counter()

elapsed += t1 - t0
times.append(t0)
if legacy and (i % 100 == 0):
print(i, t0 - start)
times.append(pyperf.perf_counter())
if legacy:
total = times[-1] - start
print("%.2fs (%.3freq/s)" % (total, loops / total))
return elapsed, times


#############################
# the script

if __name__ == "__main__":
from legacyutils import maybe_handle_legacy
maybe_handle_legacy(_bench_aiohttp_requests, legacyarg='legacy')

runner = pyperf.Runner()
runner.metadata['description'] = "Test the performance of aiohttp"
runner.bench_time_func("aiohttp", bench_aiohttp_requests)
1 change: 1 addition & 0 deletions benchmarks/bm_djangocms/legacyutils.py
1 change: 1 addition & 0 deletions benchmarks/bm_djangocms/netutils.py
18 changes: 18 additions & 0 deletions benchmarks/bm_djangocms/pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
[project]
name = "bm_djangocms"
dependencies = [
"Django",
"django-cms",
"djangocms-bootstrap4",
"djangocms-file",
"djangocms-googlemap",
"djangocms-installer",
"djangocms-snippet",
"djangocms-style",
"djangocms-video",
"requests",
]
dynamic = ["version"]

[tool.pyperformance]
inherits = ".."
File renamed without changes.
Loading

0 comments on commit 9dc9557

Please sign in to comment.