Skip to content

Commit

Permalink
setup development on GPU
Browse files Browse the repository at this point in the history
  • Loading branch information
LeoVS09 committed Sep 6, 2020
1 parent d26ea9e commit 9163ae1
Show file tree
Hide file tree
Showing 10 changed files with 41 additions and 26 deletions.
4 changes: 4 additions & 0 deletions .dvc/config
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
[core]
remote = gdrive
['remote "gdrive"']
url = gdrive://1TkUf745P5c4aHe1Xt2WFEDLBsUEyiPuA
3 changes: 1 addition & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM tensorflow/tensorflow:latest-py3 as base
FROM tensorflow/tensorflow:2.2.0-gpu as base

RUN apt update && \
apt install -y make git
Expand All @@ -13,7 +13,6 @@ RUN python -m spacy download en_core_web_sm && \
FROM base as second

RUN pip install \
tensorflow==2.2 \
tensorflow_datasets \
dvc pydrive2

Expand Down
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,6 @@ docker-build:

# Will start in docker develoment environment
docker-console:
docker run -it --rm -v ${PWD}:/work -w /work -p 8888:8888 $(DOCKER_IMAGE_TAG) bash
docker run --gpus all -it --rm -v ${PWD}:/work -w /work -p 8888:8888 $(DOCKER_IMAGE_TAG) bash


6 changes: 6 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,12 @@ Can be run in docker enviroment, or if you on linux:

All Python libraries and models described in Dockerfile

### Hardware

* NVidia videocard with DLSS version >= 10 - actually GPU optional, and learning can be run on CPU,
but models and enviroment configurated to run on GPU,
in base case tenserflow can fallback to CPU, so not need change anything for start development

## Development

If you on Windows, build and run in-Docker development enviroment
Expand Down
2 changes: 1 addition & 1 deletion data.dvc
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
outs:
- md5: a9204af33b6043bc2e0f4752452db9cc.dir
- md5: 5e1e1637ef6e7039a40a0ee22dd6f7e8.dir
path: data
18 changes: 9 additions & 9 deletions dvc.lock
Original file line number Diff line number Diff line change
Expand Up @@ -4,27 +4,27 @@ train:
- path: ./src/model.py
md5: f2d2fe914061e31ab9a9b126273b0032
- path: ./src/normalize.py
md5: 1e468f96edf82078bafa4fd6afe105b7
md5: 941426ea99ad2a81dee04ab8dba4b994
- path: ./src/train.py
md5: d93c994f261cf04723f5a0b373e5166b
- path: data
md5: a9204af33b6043bc2e0f4752452db9cc.dir
md5: 5e1e1637ef6e7039a40a0ee22dd6f7e8.dir
outs:
- path: ./metrics/training.csv
md5: af1be0b754ba0cb8a6f1a97ddb9bfa70
md5: f93e951f0d028511427dca798c6460ce
- path: ./saved_models/main.h5
md5: 04263a7f31aababf4949702b3f5ceb1a
md5: b1aee890497513c89cc7a736cb43d6e3
test:
cmd: python -m src.test
deps:
- path: data
md5: a9204af33b6043bc2e0f4752452db9cc.dir
md5: 5e1e1637ef6e7039a40a0ee22dd6f7e8.dir
- path: saved_models/main.h5
md5: 04263a7f31aababf4949702b3f5ceb1a
md5: b1aee890497513c89cc7a736cb43d6e3
- path: src/normalize.py
md5: 1e468f96edf82078bafa4fd6afe105b7
md5: 941426ea99ad2a81dee04ab8dba4b994
- path: src/test.py
md5: 1b04bc50552b3b16345ffe4e2dffe6db
md5: 2fd1d3c454d60d2cc0d5e1979667505d
outs:
- path: metrics/test.json
md5: c02593eb341008ae5f3d27e1190f0d98
md5: d4233d255efe51f0e8235b78f7072299
4 changes: 2 additions & 2 deletions metrics/test.json
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
{
"loss": 0.5036352872848511,
"accuracy": 0.7742400169372559
"loss": 0.5321113467216492,
"accuracy": 0.7470800280570984
}
20 changes: 10 additions & 10 deletions metrics/training.csv
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
epoch,accuracy,loss,val_accuracy,val_loss
0,0.6493499875068665,0.6274378299713135,0.7731999754905701,0.5212063789367676
1,0.7305999994277954,0.548753559589386,0.7833999991416931,0.49326327443122864
2,0.7640500068664551,0.5078993439674377,0.6394000053405762,0.6567526459693909
3,0.6689500212669373,0.6107459664344788,0.7024000287055969,0.5796387195587158
4,0.7591500282287598,0.5226651430130005,0.8190000057220459,0.44037047028541565
5,0.7748500108718872,0.4964852035045624,0.8181999921798706,0.43456318974494934
6,0.8187500238418579,0.43161866068840027,0.7376000285148621,0.492388516664505
7,0.8090000152587891,0.43665245175361633,0.8040000200271606,0.41991737484931946
8,0.7987499833106995,0.44822627305984497,0.5127999782562256,0.8335568308830261
9,0.6500499844551086,0.6251434683799744,0.7833999991416931,0.4926334619522095
0,0.6239500045776367,0.6509515047073364,0.7278000116348267,0.5713796615600586
1,0.7107999920845032,0.5822036862373352,0.7093999981880188,0.5925928950309753
2,0.7537000179290771,0.521277666091919,0.8041999936103821,0.44496265053749084
3,0.786050021648407,0.48704293370246887,0.8172000050544739,0.4280240833759308
4,0.7549999952316284,0.5206849575042725,0.5929999947547913,0.6776124238967896
5,0.6494500041007996,0.6242504119873047,0.7720000147819519,0.4978633224964142
6,0.7963500022888184,0.4657124876976013,0.758400022983551,0.5457758903503418
7,0.8179500102996826,0.4302661120891571,0.8230000138282776,0.4051404595375061
8,0.8256499767303467,0.4140107333660126,0.8015999794006348,0.44847530126571655
9,0.8198999762535095,0.41230010986328125,0.7512000203132629,0.5304372310638428
2 changes: 1 addition & 1 deletion src/normalize.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

VECTOR_SIZE = 300

nlp = spacy.load("en_core_web_md")
nlp = spacy.load("en_core_web_lg")

def extract_sentences(text):
doc = nlp(text)
Expand Down
6 changes: 6 additions & 0 deletions src/test.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,12 @@

print("Tensorflow:", tf.__version__)

# fix issue with "cannot find dnn implementation"
# https://github.com/tensorflow/tensorflow/issues/36508
physical_devices = tf.config.list_physical_devices('GPU')
tf.config.experimental.set_memory_growth(physical_devices[0], enable=True)
print("Enabled experimental memory growth for", physical_devices[0])

metrics_file='metrics/test.json'

# Load normalised datasets
Expand Down

0 comments on commit 9163ae1

Please sign in to comment.