Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -189,4 +189,7 @@ cache
*.iml

# MacOS Finder
**/.DS_Store
**/.DS_Store

#VS Code files
.vscode/
2 changes: 1 addition & 1 deletion benchmarks/000.microbenchmarks/010.sleep/config.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"timeout": 120,
"memory": 128,
"languages": ["python", "nodejs"],
"languages": ["python", "nodejs", "pypy"],
"modules": []
}
9 changes: 9 additions & 0 deletions benchmarks/000.microbenchmarks/010.sleep/pypy/function.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@

from time import sleep

def handler(event):

# start timing
sleep_time = event.get('sleep')
sleep(sleep_time)
return { 'result': sleep_time }
2 changes: 1 addition & 1 deletion benchmarks/100.webapps/110.dynamic-html/config.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"timeout": 10,
"memory": 128,
"languages": ["python", "nodejs"],
"languages": ["python", "nodejs", "pypy"],
"modules": []
}
22 changes: 22 additions & 0 deletions benchmarks/100.webapps/110.dynamic-html/pypy/function.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
from datetime import datetime
from random import sample
from os import path
from time import time
import os

from jinja2 import Template

SCRIPT_DIR = path.abspath(path.join(path.dirname(__file__)))

def handler(event):

# start timing
name = event.get('username')
size = event.get('random_len')
cur_time = datetime.now()
random_numbers = sample(range(0, 1000000), size)
template = Template( open(path.join(SCRIPT_DIR, 'templates', 'template.html'), 'r').read())
html = template.render(username = name, cur_time = cur_time, random_numbers = random_numbers)
# end timing
# dump stats
return {'result': html}
10 changes: 10 additions & 0 deletions benchmarks/100.webapps/110.dynamic-html/pypy/init.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
#!/bin/bash

DIR=$1
VERBOSE=$2
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
path="${SCRIPT_DIR}/templates/"
if [ "$VERBOSE" = true ]; then
echo "Update ${DIR} with static templates ${path}"
fi
cp -r ${SCRIPT_DIR}/templates ${DIR}
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
jinja2>=2.10.3
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
<!DOCTYPE html>
<html>
<head>
<title>Randomly generated data.</title>
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<link href="http://netdna.bootstrapcdn.com/bootstrap/3.0.0/css/bootstrap.min.css" rel="stylesheet" media="screen">
<style type="text/css">
.container {
max-width: 500px;
padding-top: 100px;
}
</style>
</head>
<body>
<div class="container">
<p>Welcome {{username}}!</p>
<p>Data generated at: {{cur_time}}!</p>
<p>Requested random numbers:</p>
<ul>
{% for n in random_numbers %}
<li>{{n}}</li>
{% endfor %}
</ul>
</div>
</body>
</html>
12 changes: 9 additions & 3 deletions benchmarks/100.webapps/120.uploader/config.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,12 @@
{
"timeout": 30,
"memory": 128,
"languages": ["python", "nodejs"],
"modules": ["storage"]
}
"languages": [
"python",
"nodejs",
"pypy"
],
"modules": [
"storage"
]
}
48 changes: 48 additions & 0 deletions benchmarks/100.webapps/120.uploader/pypy/function.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@

import datetime
import os

import urllib.request

from . import storage
client = storage.storage.get_instance()

SEBS_USER_AGENT = "SeBS/1.2 (https://github.com/spcl/serverless-benchmarks) SeBS Benchmark Suite/1.2"

def handler(event):

bucket = event.get('bucket').get('bucket')
output_prefix = event.get('bucket').get('output')
url = event.get('object').get('url')
name = os.path.basename(url)
download_path = '/tmp/{}'.format(name)

process_begin = datetime.datetime.now()
req = urllib.request.Request(url)
req.add_header('User-Agent', SEBS_USER_AGENT)
with open(download_path, 'wb') as f:
with urllib.request.urlopen(req) as response:
f.write(response.read())
size = os.path.getsize(download_path)
process_end = datetime.datetime.now()

upload_begin = datetime.datetime.now()
key_name = client.upload(bucket, os.path.join(output_prefix, name), download_path)
upload_end = datetime.datetime.now()

process_time = (process_end - process_begin) / datetime.timedelta(microseconds=1)
upload_time = (upload_end - upload_begin) / datetime.timedelta(microseconds=1)
return {
'result': {
'bucket': bucket,
'url': url,
'key': key_name
},
'measurement': {
'download_time': 0,
'download_size': 0,
'upload_time': upload_time,
'upload_size': size,
'compute_time': process_time
}
}
Empty file.
12 changes: 9 additions & 3 deletions benchmarks/200.multimedia/210.thumbnailer/config.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,12 @@
{
"timeout": 60,
"memory": 256,
"languages": ["python", "nodejs"],
"modules": ["storage"]
}
"languages": [
"python",
"nodejs",
"pypy"
],
"modules": [
"storage"
]
}
12 changes: 12 additions & 0 deletions benchmarks/200.multimedia/210.thumbnailer/pypy/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
# Image Processing

A simple pipeline performing basic image operations with Pillow.

[Inspired by AWS Lambda tutorial code.](https://docs.aws.amazon.com/lambda/latest/dg/with-s3-example-deployment-pkg.htm)

### Instructions

1. Deploy Docker container with function code and input data.

2. Example of JSON payload: `{ "dir": "input_data", "id": "1" }'`.

70 changes: 70 additions & 0 deletions benchmarks/200.multimedia/210.thumbnailer/pypy/function.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
import datetime
import io
import os
import sys
import uuid
from urllib.parse import unquote_plus
from PIL import Image

from . import storage
client = storage.storage.get_instance()

# Disk-based solution
#def resize_image(image_path, resized_path, w, h):
# with Image.open(image_path) as image:
# image.thumbnail((w,h))
# image.save(resized_path)

# Memory-based solution
def resize_image(image_bytes, w, h):
with Image.open(io.BytesIO(image_bytes)) as image:
image.thumbnail((w,h))
out = io.BytesIO()
image.save(out, format='jpeg')
# necessary to rewind to the beginning of the buffer
out.seek(0)
return out

def handler(event):

bucket = event.get('bucket').get('bucket')
input_prefix = event.get('bucket').get('input')
output_prefix = event.get('bucket').get('output')
key = unquote_plus(event.get('object').get('key'))
width = event.get('object').get('width')
height = event.get('object').get('height')
# UUID to handle multiple calls
#download_path = '/tmp/{}-{}'.format(uuid.uuid4(), key)
#upload_path = '/tmp/resized-{}'.format(key)
#client.download(input_bucket, key, download_path)
#resize_image(download_path, upload_path, width, height)
#client.upload(output_bucket, key, upload_path)
download_begin = datetime.datetime.now()
img = client.download_stream(bucket, os.path.join(input_prefix, key))
download_end = datetime.datetime.now()

process_begin = datetime.datetime.now()
resized = resize_image(img, width, height)
resized_size = resized.getbuffer().nbytes
process_end = datetime.datetime.now()

upload_begin = datetime.datetime.now()
key_name = client.upload_stream(bucket, os.path.join(output_prefix, key), resized)
upload_end = datetime.datetime.now()

download_time = (download_end - download_begin) / datetime.timedelta(microseconds=1)
upload_time = (upload_end - upload_begin) / datetime.timedelta(microseconds=1)
process_time = (process_end - process_begin) / datetime.timedelta(microseconds=1)
return {
'result': {
'bucket': bucket,
'key': key_name
},
'measurement': {
'download_time': download_time,
'download_size': len(img),
'upload_time': upload_time,
'upload_size': resized_size,
'compute_time': process_time
}
}
Empty file.
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
pillow==10.3.0
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
pillow==10.3.0
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
pillow==10.3.0
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Pillow==7.0.0
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Pillow==8.0.0
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Pillow==9.0.0
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Pillow==9.0.0
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Pillow==10.0.0
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Pillow==10.0.0
14 changes: 10 additions & 4 deletions benchmarks/300.utilities/311.compression/config.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,12 @@
{
"timeout": 60,
"memory": 256,
"languages": ["python", "nodejs"],
"modules": ["storage"]
}
"memory": 512,
"languages": [
"python",
"nodejs",
"pypy"
],
"modules": [
"storage"
]
}
12 changes: 12 additions & 0 deletions benchmarks/300.utilities/311.compression/pypy/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
# Image Processing

A simple pipeline performing basic image operations with Pillow.

[Inspired by AWS Lambda tutorial code.](https://docs.aws.amazon.com/lambda/latest/dg/with-s3-example-deployment-pkg.htm)

### Instructions

1. Deploy Docker container with function code and input data.

2. Example of JSON payload: `{ "dir": "input_data", "id": "1" }'`.

59 changes: 59 additions & 0 deletions benchmarks/300.utilities/311.compression/pypy/function.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
import datetime
import io
import os
import shutil
import uuid
import zlib

from . import storage
client = storage.storage.get_instance()

def parse_directory(directory):

size = 0
for root, dirs, files in os.walk(directory):
for file in files:
size += os.path.getsize(os.path.join(root, file))
return size

def handler(event):

bucket = event.get('bucket').get('bucket')
input_prefix = event.get('bucket').get('input')
output_prefix = event.get('bucket').get('output')
key = event.get('object').get('key')
download_path = '/tmp/{}-{}'.format(key, uuid.uuid4())
os.makedirs(download_path)

s3_download_begin = datetime.datetime.now()
client.download_directory(bucket, os.path.join(input_prefix, key), download_path)
s3_download_stop = datetime.datetime.now()
size = parse_directory(download_path)

compress_begin = datetime.datetime.now()
shutil.make_archive(os.path.join(download_path, key), 'zip', root_dir=download_path)
compress_end = datetime.datetime.now()

s3_upload_begin = datetime.datetime.now()
archive_name = '{}.zip'.format(key)
archive_size = os.path.getsize(os.path.join(download_path, archive_name))
key_name = client.upload(bucket, os.path.join(output_prefix, archive_name), os.path.join(download_path, archive_name))
s3_upload_stop = datetime.datetime.now()

download_time = (s3_download_stop - s3_download_begin) / datetime.timedelta(microseconds=1)
upload_time = (s3_upload_stop - s3_upload_begin) / datetime.timedelta(microseconds=1)
process_time = (compress_end - compress_begin) / datetime.timedelta(microseconds=1)
return {
'result': {
'bucket': bucket,
'key': key_name
},
'measurement': {
'download_time': download_time,
'download_size': size,
'upload_time': upload_time,
'upload_size': archive_size,
'compute_time': process_time
}
}

Empty file.
11 changes: 11 additions & 0 deletions benchmarks/300.utilities/312.converter/config.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
{
"timeout": 60,
"memory": 2048,
"languages": [
"python",
"pypy"
],
"modules": [
"storage"
]
}
Loading