This repository has been archived by the owner on May 22, 2024. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 28
/
Makefile
215 lines (169 loc) · 6.94 KB
/
Makefile
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
PIPELINE_FAMILY := sec-filings
PIPELINE_PACKAGE := sec_filings
PACKAGE_NAME := prepline_${PIPELINE_PACKAGE}
PIP_VERSION := 23.1.2
.PHONY: help
help: Makefile
@sed -n 's/^\(## \)\([a-zA-Z]\)/\2/p' $<
###########
# Install #
###########
## install-base: installs minimum requirements to run the API
.PHONY: install-base
install-base: install-base-pip-packages install-nltk-models
## install: installs all test and dev requirements
.PHONY: install
install: install-base install-test install-dev
.PHONY: install-base-pip-packages
install-base-pip-packages:
python3 -m pip install pip==${PIP_VERSION}
pip install -r requirements/base.txt
.PHONY: install-nltk-models
install-nltk-models:
python -c "import nltk; nltk.download('punkt')"
python -c "import nltk; nltk.download('averaged_perceptron_tagger')"
.PHONY: install-test
install-test:
pip install -r requirements/test.txt
.PHONY: install-dev
install-dev:
pip install -r requirements/dev.txt
.PHONY: install-ipython-kernel
install-ipython-kernel:
ipython kernel install --name "python3" --sys-prefix
.PHONY: install-ci
install-ci: install-base install-test install-ipython-kernel
## pip-compile: compiles all base/dev/test requirements
.PHONY: pip-compile
pip-compile:
pip-compile --upgrade requirements/base.in
pip-compile --upgrade requirements/dev.in
pip-compile --upgrade requirements/test.in
#########
# Build #
#########
## generate-api: generates the FastAPI python APIs from notebooks
.PHONY: generate-api
generate-api:
PYTHONPATH=. unstructured_api_tools convert-pipeline-notebooks \
--input-directory ./pipeline-notebooks \
--output-directory ./${PACKAGE_NAME}/api
##########
# Docker #
##########
# Docker targets are provided for convenience only and are not required in a standard development environment
# Note that the image has notebooks baked in, however the current working directory
# is mounted under /home/notebook-user/local/ when the image is started with
# docker-start-api or docker-start-jupyter
.PHONY: docker-build
docker-build:
PIP_VERSION=${PIP_VERSION} PIPELINE_FAMILY=${PIPELINE_FAMILY} PIPELINE_PACKAGE=${PIPELINE_PACKAGE} ./scripts/docker-build.sh
.PHONY: docker-start-api
docker-start-api:
docker run -p 8000:8000 --mount type=bind,source=$(realpath .),target=/home/notebook-user/local -t --rm pipeline-family-${PIPELINE_FAMILY}-dev:latest uvicorn ${PACKAGE_NAME}.api.app:app --log-config logger_config.yaml --host 0.0.0.0 --port 8000
.PHONY: docker-start-jupyter
docker-start-jupyter:
docker run -p 8888:8888 --mount type=bind,source=$(realpath .),target=/home/notebook-user/local -t --rm pipeline-family-${PIPELINE_FAMILY}-dev:latest jupyter-notebook --port 8888 --ip 0.0.0.0 --no-browser --NotebookApp.token='' --NotebookApp.password=''
#########
# Local #
#########
## run-jupyter: starts jupyter notebook
.PHONY: run-jupyter
run-jupyter:
PYTHONPATH=$(realpath .) JUPYTER_PATH=$(realpath .) jupyter-notebook --NotebookApp.token='' --NotebookApp.password=''
## run-web-app: runs the FastAPI api with hot reloading
.PHONY: run-web-app
run-web-app:
PYTHONPATH=. uvicorn ${PACKAGE_NAME}.api.app:app --log-config logger_config.yaml --reload
#################
# Test and Lint #
#################
## test: runs core tests
.PHONY: test
test:
PYTHONPATH=. pytest test_${PIPELINE_PACKAGE} --cov=${PACKAGE_NAME} --cov-report term-missing
.PHONY: check-coverage
check-coverage:
coverage report --fail-under=93
## test-integration: runs integration tests
.PHONY: test-integration
test-integration:
PYTHONPATH=. pytest test_${PIPELINE_PACKAGE}_integration
## test-sample-docs: runs the pipeline on a set of sample SEC documents
.PHONY: test-sample-docs
test-sample-docs: verify-artifacts
PYTHONPATH=. pytest test_real_docs
## api-check: verifies auto-generated pipeline APIs match the existing ones
.PHONY: api-check
api-check:
PYTHONPATH=. PACKAGE_NAME=${PACKAGE_NAME} ./scripts/test-doc-pipeline-apis-consistent.sh
## dl-test-artifacts: downloads external artifacts used for testing
.PHONY: dl-test-artifacts
dl-test-artifacts:
wget -r -nH -O sample-docs/sample-sec-docs.tar.gz https://utic-dev-tech-fixtures.s3.us-east-2.amazonaws.com/sample-sec-docs/sample-sec-docs.tar.gz
tar -xf sample-docs/sample-sec-docs.tar.gz -C sample-docs/ && rm sample-docs/sample-sec-docs.tar.gz
$(MAKE) verify-artifacts
.PHONY: verify-artifacts
verify-artifacts:
sha256sum --check --status sample-docs/sample-sec-docs.sha256
.PHONY: dl-test-artifacts-source
dl-test-artifacts-source:
# Downloads directly from SEC website. Not normally needed, see script.
PYTHONPATH=. python3 test_utils/get_sec_docs_from_edgar.py
## check: runs linters (includes tests)
.PHONY: check
check: check-src check-tests check-version
## check-src: runs linters (source only, no tests)
.PHONY: check-src
check-src:
black --line-length 100 ${PACKAGE_NAME} --check --exclude ${PACKAGE_NAME}/api
flake8 ${PACKAGE_NAME}
mypy ${PACKAGE_NAME} --ignore-missing-imports --implicit-optional --install-types --non-interactive
.PHONY: check-tests
check-tests:
black --line-length 100 test_${PIPELINE_PACKAGE} --check
flake8 test_${PIPELINE_PACKAGE}
black --line-length 100 test_${PIPELINE_PACKAGE}_integration --check
flake8 test_${PIPELINE_PACKAGE}_integration
black --line-length 100 test_real_docs --check
flake8 test_real_docs
black --line-length 100 test_utils --check
flake8 test_utils
## check-scripts: run shellcheck
.PHONY: check-scripts
check-scripts:
# Fail if any of these files have warnings
scripts/shellcheck.sh
## check-version: run check to ensure version in CHANGELOG.md matches references in files
.PHONY: check-version
check-version:
# Fail if syncing version would produce changes
scripts/version-sync.sh -c \
-s CHANGELOG.md \
-f README.md api-release \
-f preprocessing-pipeline-family.yaml release \
-f exploration-notebooks/exploration-10q-amended.ipynb api-release
## check-notebooks: check that executing and cleaning notebooks doesn't produce changes
.PHONY: check-notebooks
check-notebooks:
scripts/check-and-format-notebooks.py --check
## tidy: run black
.PHONY: tidy
tidy:
black --line-length 100 ${PACKAGE_NAME}
black --line-length 100 test_${PIPELINE_PACKAGE}
black --line-length 100 test_${PIPELINE_PACKAGE}_integration
black --line-length 100 test_real_docs
black --line-length 100 test_utils
## tidy-notebooks: execute notebooks and remove metadata
.PHONY: tidy-notebooks
tidy-notebooks:
scripts/check-and-format-notebooks.py
## version-sync: update references to version with most recent version from CHANGELOG.md
.PHONY: version-sync
version-sync:
scripts/version-sync.sh \
-s CHANGELOG.md \
-f README.md api-release \
-f preprocessing-pipeline-family.yaml release \
-f exploration-notebooks/exploration-10q-amended.ipynb api-release