-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathMakefile
187 lines (136 loc) · 4.99 KB
/
Makefile
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
# ----------------------------------
# GCP Variables and Commands
# ----------------------------------
# path of the file to upload to gcp (the path of the file should be absolute or should match the directory where the make command is run)
LOCAL_PATH=
# project id
PROJECT_ID=wagon-bootcamp-337804
# bucket name
BUCKET_NAME=02-crypto_prediction
# bucket directory in which to store the uploaded file (we choose to name this data as a convention)
BUCKET_FOLDER=data
# name for the uploaded file inside the bucket folder (here we choose to keep the name of the uploaded file)
# BUCKET_FILE_NAME=another_file_name_if_I_so_desire.csv
BUCKET_FILE_NAME=$(shell basename ${LOCAL_PATH})
# REGION=europe-west1
# REGION_UR=eu.gcr.io
REGION=asia-northeast3
REGION_UR=asia.gcr.io
set_project:
-@gcloud config set project ${PROJECT_ID}
create_bucket:
-@gsutil mb -l ${REGION} -p ${PROJECT_ID} gs://${BUCKET_NAME}
upload_data:
# -@gsutil cp train_1k.csv gs://wagon-ml-my-bucket-name/data/train_1k.csv
-@gsutil cp ${LOCAL_PATH} gs://${BUCKET_NAME}/${BUCKET_FOLDER}/${BUCKET_FILE_NAME}
### GCP configuration - - - - - - - - - - - - - - - - - - -
# /!\ you should fill these according to your account
### GCP Project - - - - - - - - - - - - - - - - - - - - - -
# not required here
### GCP Storage - - - - - - - - - - - - - - - - - - - - - -
# BUCKET_NAME=XXX
##### Data - - - - - - - - - - - - - - - - - - - - - - - -
# not required here
##### Training - - - - - - - - - - - - - - - - - - - - - -
# will store the packages uploaded to GCP for the training
BUCKET_TRAINING_FOLDER = 'trainings'
##### Model - - - - - - - - - - - - - - - - - - - - - - - -
# not required here
### GCP AI Platform - - - - - - - - - - - - - - - - - - - -
##### Machine configuration - - - - - - - - - - - - - - - -
# REGION=europe-west1
PYTHON_VERSION=3.7
FRAMEWORK=scikit-learn
RUNTIME_VERSION=1.15
##### Package params - - - - - - - - - - - - - - - - - - -
PACKAGE_NAME=crypto_backend
FILENAME=trainer
##### Job - - - - - - - - - - - - - - - - - - - - - - - - -
JOB_NAME=crypto_prediction_$(shell date +'%Y%m%d_%H%M%S')
#### GCS config - - - - - - - - - - - - - - - - - - - - - -
DOCKER_IMAGE_NAME=crypto_predict
## Save TJ's GCP credentials to roll back just in case if mine doesn't work
GOOGLE_APPLICATION_CREDENTIALS = /home/tjp1992/code/tjp1992/gcp/wagon-bootcamp-337804-fcbb82b2e82e.json
# Lam's GCP credentials
# GOOGLE_APPLICATION_CREDENTIALS = /home/lamtrinh259/.config/gcloud/application_default_credentials.json
run_locally:
@python -m ${PACKAGE_NAME}.${FILENAME}
gcp_submit_training:
gcloud ai-platform jobs submit training ${JOB_NAME} \
--job-dir gs://${BUCKET_NAME}/${BUCKET_TRAINING_FOLDER} \
--package-path ${PACKAGE_NAME} \
--module-name ${PACKAGE_NAME}.${FILENAME} \
--python-version=${PYTHON_VERSION} \
--runtime-version=${RUNTIME_VERSION} \
--region ${REGION} \
--stream-logs
docker_build:
docker build -t ${REGION_UR}/${PROJECT_ID}/${DOCKER_IMAGE_NAME} .
docker_local:
docker run -e PORT=8000 -p 8000:8000 ${REGION_UR}/${PROJECT_ID}/${DOCKER_IMAGE_NAME}
gcloud_push:
docker push ${REGION_UR}/${PROJECT_ID}/${DOCKER_IMAGE_NAME}
gcloud_run:
gcloud run deploy \
--image ${REGION_UR}/${PROJECT_ID}/${DOCKER_IMAGE_NAME} \
--memory 4Gi \
--timeout 600 \
--platform managed \
--region ${REGION}
gcloud_deploy:
gcloud run deploy \
--image ${REGION_UR}/${PROJECT_ID}/${DOCKER_IMAGE_NAME} \
--memory 4Gi \
--platform managed \
--region ${REGION} \
--set-env-vars "GOOGLE_APPLICATION_CREDENTIALS=/credentials.json"
# ----------------------------------
# INSTALL & TEST
# ----------------------------------
install_requirements:
@pip install -r requirements.txt
check_code:
@flake8 scripts/* crypto_backend/*.py
black:
@black scripts/* crypto_backend/*.py
test:
# @coverage run -m pytest tests/*test.py
@coverage report -i -m --omit="${VIRTUAL_ENV}/lib/python*" --omit="/tmp/*"
ftest:
@Write me
clean:
@rm -f */version.txt
@rm -f .coverage
@rm -fr */__pycache__ */*.pyc __pycache__
@rm -fr build dist
@rm -fr crypto_backend-*.dist-info
@rm -fr crypto_backend.egg-info
install:
@pip install . -U
all: clean install test black check_code
count_lines:
@find ./ -name '*.py' -exec wc -l {} \; | sort -n| awk \
'{printf "%4s %s\n", $$1, $$2}{s+=$$0}END{print s}'
@echo ''
@find ./scripts -name '*-*' -exec wc -l {} \; | sort -n| awk \
'{printf "%4s %s\n", $$1, $$2}{s+=$$0}END{print s}'
@echo ''
@find ./tests -name '*.py' -exec wc -l {} \; | sort -n| awk \
'{printf "%4s %s\n", $$1, $$2}{s+=$$0}END{print s}'
@echo ''
# ----------------------------------
# UPLOAD PACKAGE TO PYPI
# ----------------------------------
PYPI_USERNAME=<AUTHOR>
build:
@python setup.py sdist bdist_wheel
pypi_test:
@twine upload -r testpypi dist/* -u $(PYPI_USERNAME)
pypi:
@twine upload dist/* -u $(PYPI_USERNAME)
# ----------------------------------
# uvicorn commands
# ----------------------------------
run_api:
@pip install -r requirements.txt
uvicorn api.fast:app --reload