Skip to content

Commit 8c03e59

Browse files
committed
[add] updated dockerfile. [add] added circleci config
1 parent 2d64ad2 commit 8c03e59

15 files changed

+118
-15
lines changed

.circleci/config.yml

Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,29 @@
1+
# Python CircleCI 2.0 configuration file
2+
#
3+
# Check https://circleci.com/docs/2.0/language-python/ for more details
4+
#
5+
version: 2.1
6+
jobs:
7+
build-multiarch-docker:
8+
machine:
9+
enabled: true
10+
steps:
11+
- checkout
12+
- run: |
13+
echo "$DOCKER_REDISBENCH_PWD" | base64 --decode | docker login --username $DOCKER_REDISBENCH_USER --password-stdin
14+
- run:
15+
name: Build
16+
command: |
17+
make -C benchmark/redisgraph docker-release
18+
no_output_timeout: 20m
19+
20+
workflows:
21+
version: 2
22+
commit:
23+
jobs:
24+
- build-multiarch-docker:
25+
filters:
26+
tags:
27+
only: /.*/
28+
branches:
29+
only: master

.gitignore

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,7 @@ benchmark/redisgraph/result_redisgraph/**
3030
# Gradle
3131
.idea/**/gradle.xml
3232
.idea/**/libraries
33+
.idea/**
3334

3435
# Gradle and Maven with auto-import
3536
# When using Gradle or Maven with auto-import, you should exclude module files,
@@ -207,4 +208,7 @@ venv.bak/
207208
dmypy.json
208209

209210
# Pyre type checker
210-
.pyre/
211+
.pyre/
212+
213+
# Mac
214+
*.DS_Store

README.md

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,6 @@ This repo contains code for benchmarking Graph databases.
88
- each vendor's benchmark is under
99
/benchmark/vendor_name/
1010
- start with README under each folder
11-
- all test can be reproducible on EC2 or similar enviroment.
1211

1312
Contact: benchmark@tigergraph.com
1413

benchmark/redisgraph/Dockerfile

Lines changed: 16 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,21 @@
11
FROM python:2.7.10
2-
COPY . ./
32

4-
RUN ./scripts/retrieve_seed_and_unique_node_data_from_s3.sh
3+
COPY scripts ./scripts
4+
COPY bulk_insert.py ./
5+
COPY config.py ./
6+
COPY generate_graph500_inputs.py ./
7+
COPY generate_twitter_inputs.py ./
8+
COPY graph_query.py ./
9+
COPY kn.py ./
10+
COPY query_runner.py ./
11+
COPY requirements.txt ./
12+
COPY docker_entrypoint.sh ./
13+
14+
#RUN ./scripts/retrieve_seed_and_unique_node_data_from_s3.sh
515

616
RUN python -m pip install -r requirements.txt
717

8-
ENTRYPOINT ["./kn.py"]
18+
19+
RUN chmod -R 751 scripts
20+
RUN chmod 751 docker_entrypoint.sh
21+
ENTRYPOINT ["./docker_entrypoint.sh"]

benchmark/redisgraph/Makefile

Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
# Go parameters
2+
# DOCKER
3+
DOCKER_APP_NAME=graph-database-benchmark-redisgraph
4+
DOCKER_ORG=redisbench
5+
DOCKER_REPO:=${DOCKER_ORG}/${DOCKER_APP_NAME}
6+
#DOCKER_TAG:=$(shell git log -1 --pretty=format:"%h")
7+
DOCKER_TAG=edge
8+
DOCKER_IMG:="$(DOCKER_REPO):$(DOCKER_TAG)"
9+
DOCKER_LATEST:="${DOCKER_REPO}:latest"
10+
11+
# DOCKER TASKS
12+
# Build the container
13+
docker-build:
14+
docker build -t $(DOCKER_APP_NAME):latest -f Dockerfile .
15+
16+
# Build the container without caching
17+
docker-build-nc:
18+
docker build --no-cache -t $(DOCKER_APP_NAME):latest -f Dockerfile .
19+
20+
# Make a release by building and publishing the `{version}` ans `latest` tagged containers to ECR
21+
docker-release: docker-build-nc docker-publish
22+
23+
# Docker publish
24+
docker-publish: docker-publish-latest docker-publish-version ## Publish the `{version}` ans `latest` tagged containers to ECR
25+
26+
docker-repo-login: ## login to DockerHub with credentials found in env
27+
docker login -u ${DOCKER_USERNAME} -p ${DOCKER_PASSWORD}
28+
29+
docker-publish-latest: docker-tag-latest ## Publish the `latest` tagged container to ECR
30+
@echo 'publish latest to $(DOCKER_REPO)'
31+
docker push $(DOCKER_LATEST)
32+
33+
docker-publish-version: docker-tag-version ## Publish the `{version}` tagged container to ECR
34+
@echo 'publish $(DOCKER_IMG) to $(DOCKER_REPO)'
35+
docker push $(DOCKER_IMG)
36+
37+
# Docker tagging
38+
docker-tag: docker-tag-latest docker-tag-version ## Generate container tags for the `{version}` ans `latest` tags
39+
40+
docker-tag-latest: ## Generate container `{version}` tag
41+
@echo 'create tag latest'
42+
docker tag $(DOCKER_APP_NAME) $(DOCKER_LATEST)
43+
44+
docker-tag-version: ## Generate container `latest` tag
45+
@echo 'create tag $(DOCKER_APP_NAME) $(DOCKER_REPO):$(DOCKER_IMG)'
46+
docker tag $(DOCKER_APP_NAME) $(DOCKER_IMG)

benchmark/redisgraph/README.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ The easiest way to get and install the benchmark code is to use:
1919
```bash
2020
git clone https://github.com/RedisGraph/graph-database-benchmark.git
2121
cd graph-database-benchmark/benchmark/redisgraph
22-
sudo pip install -r requirements.txt
22+
pip install --user -r requirements.txt
2323
```
2424

2525
#### Data retrieval
@@ -32,7 +32,7 @@ Variables:
3232
1. `EDGE_FILE` (default: `dependent on the use case`)
3333
1. `NODE_FILE` (default: `dependent on the use case`)
3434

35-
The easiest way to get the datasets required for the benchmark is to use one of the helper scrips:
35+
The easiest way to get the datasets required for the benchmark is to use one of the helper scripts:
3636
1. `./scripts/get_graph500_dataset.sh`
3737
or
3838
1. `./scripts/get_twitter_dataset.sh`
Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
#!/bin/sh
2+
echo "--------------------------------------------------"
3+
echo "graph database benchmark RedisGraph - Docker Image"
4+
echo "--------------------------------------------------"
5+
echo "Checking if request binary $1 exists"
6+
if [ -f ./$1 ]; then
7+
./"$@"
8+
echo
9+
echo "...done."
10+
exit 0
11+
else
12+
echo "$1 binary does not exist."
13+
exit 1
14+
fi

benchmark/redisgraph/graph500_22_seed

Lines changed: 0 additions & 1 deletion
This file was deleted.

benchmark/redisgraph/scripts/get_twitter_dataset.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ if [ ! -f ${DATA_EDGE_FILE_NAME} ]; then
3030
curl -O ${EDGE_FILE} | tar zxf - > ${DATA_EDGE_FILE_NAME}
3131
else
3232
echo "Dataset found locally at ${DATA_EDGE_FILE_NAME}. No need to retrieve again."
33-
fi./
33+
fi
3434

3535
echo ""
3636
echo "---------------------------------------------------------------------------------"

benchmark/redisgraph/scripts/redisgraph_load_graph500.sh

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,8 @@ DATASET_DIR=${DATASET_DIR:-"graph500_22"}
55
EXE_DIR=${EXE_DIR:-$(dirname $0)}
66
source ${EXE_DIR}/common.sh
77

8-
# Prepare input files in RedisGraph bulk import format
9-
python generate_graph500_inputs.py --inputdir ${DATASET} || exit 1
8+
## Prepare input files in RedisGraph bulk import format
9+
#python generate_graph500_inputs.py --inputdir ${DATASET} || exit 1
1010

1111
# Run RedisGraph bulk import script
1212
python bulk_insert.py graph500_22 -n data/graph500_22_unique_node -r data/graph500_22 \

0 commit comments

Comments
 (0)