-
Notifications
You must be signed in to change notification settings - Fork 9
/
Copy pathbuild_with_packages.py
104 lines (87 loc) · 3.62 KB
/
build_with_packages.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
import os
import glob
import yaml
from distutils.dir_util import copy_tree
import shutil
import argparse
import boto3
import logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
def create_argparser():
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter
)
parser.add_argument('--lambda-path',
required=True,
help='path of src folder of lambda to build '
'e.g. src/hello_world',
)
parser.add_argument('--s3-upload',
default=False,
help='upload lambda to s3 based on config'
)
return parser
def build_zip_with_libs(*, lambda_src):
libs_to_exclude = ['boto3', 'botocore']
config_path = os.path.join(lambda_src, 'config.yml')
print(config_path)
with open(config_path, 'r') as stream:
lambda_cfg = yaml.load(stream)
conda_env_name = 'aws-python-lambdas'
conda_path = os.path.join(os.environ['HOME'], 'miniconda3')
conda_pkgs_path = os.path.join(os.environ['HOME'], 'miniconda3', 'pkgs')
conda_env_path = os.path.join(conda_path, 'envs', conda_env_name)
site_packages = os.path.join(conda_env_path, 'lib', lambda_cfg['runtime'], 'site-packages')
# create a tmp path for the lambda function
tmp_dir = os.path.join('tmp', lambda_src.replace('src/', ''))
os.makedirs(tmp_dir)
# copy lambda function content to tmp dir
r = copy_tree(lambda_src, tmp_dir)
libs_paths = []
libs = [l for l in lambda_cfg['libs'] if not l in libs_to_exclude]
for l in libs:
lib_path = os.path.join(site_packages, l)
logger.info(glob.glob(lib_path))
lib_files = glob.glob(os.path.join(site_packages, l))
if len(lib_files) == 0:
logger.debug('trying to add single file')
lib_path = os.path.join(site_packages, l) + '.py'
lib_files = glob.glob(lib_path)
logger.info('Found {}'.format(lib_files))
libs_paths.extend(lib_files)
# copy all libs to
for p in libs_paths:
if '.py' in p:
logger.info('Copying file {} to {}'.format(p, tmp_dir))
shutil.copy2(p, tmp_dir)
else:
dst = os.path.join(tmp_dir, p.split('/')[-1])
logger.info('Copying tree path {} to {}'.format(p, dst))
t = copy_tree(p, dst)
#if 'numpy' in libs:
# logger.info('Copy mkl because numpy is in libs')
# t = copy_tree(os.path.join(conda_pkgs_path, 'mkl-2017.0.1-0'), os.path.join(tmp_dir, 'mkl-2017.0.1-0'))
# # shutil.copy2(os.path.join(conda_pkgs_path, 'mkl-2017.0.1-0.tar.bz2'), tmp_dir)
zip_file_dst = os.path.join('dist', lambda_src.split('/')[-1])
shutil.make_archive(zip_file_dst, 'zip', tmp_dir)
shutil.rmtree(tmp_dir+'/')
zip_file = zip_file_dst + '.zip'
bucket = lambda_cfg['s3_bucket']
key = lambda_cfg['s3_key']
return zip_file, bucket, key
def upload_to_s3(*, lambda_zip, bucket, key):
session = boto3.Session(profile_name='nicor88-aws-dev')
s3 = session.client('s3')
res = s3.upload_file(lambda_zip, bucket, key)
return res
if __name__ == "__main__":
args = create_argparser().parse_args()
lambda_path = vars(args).get('lambda_path')
s3_upload = vars(args).get('s3_upload')
zip_file, bucket, key = build_zip_with_libs(lambda_src=lambda_path)
if s3_upload:
logger.info('Uploading to {}/{} ....'.format(bucket, key))
res = upload_to_s3(lambda_zip=zip_file, bucket=bucket, key=key)
logger.info(res)