Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add string representation for config and config override #390

Merged
merged 9 commits into from
Jul 8, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
66 changes: 64 additions & 2 deletions tilequeue/command.py
Original file line number Diff line number Diff line change
Expand Up @@ -2468,6 +2468,27 @@ def command_fn(cfg, args):
help='Tile coordinate as "z/x/y".')
subparser.add_argument('--run_id', required=False,
help='optional run_id used for logging')
subparser.add_argument('--postgresql_hosts', required=False,
help='optional string of a list of db hosts e.g. '
'`["aws.rds.url", "localhost"]`')
subparser.add_argument('--postgresql_dbnames', required=False,
help='optional string of a list of db names e.g. '
'`["gis"]`')
subparser.add_argument('--postgresql_user', required=False,
help='optional string of db user e.g. `gisuser`')
subparser.add_argument('--postgresql_password', required=False,
help='optional string of db password e.g. '
'`VHcDuAS0SYx2tlgTvtbuCXwlvO4pAtiGCuScJFjq7wersdfqwer`')
subparser.add_argument('--store_name', required=False,
help='optional string of a list of tile store '
'names e.g. `["my-meta-tiles-us-east-1"]`')
subparser.add_argument('--store_date_prefix', required=False,
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think we've been trying to change "date_prefix" to "build_id" elsewhere because they don't have to be date-related.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think we've been trying to change "date_prefix" to "build_id" elsewhere because they don't have to be date-related.

we can probably change that to "build_id" in a separate PR since the renaming may require more changes.

help='optional string of store bucket date prefix '
'e.g. `20210426`')
subparser.add_argument('--batch_check_metafile_exists', required=False,
help='optional string of a boolean indicating '
'whether to check metafile exists or not '
'e.g. `false`')
subparser.set_defaults(func=tilequeue_meta_tile)

subparser = subparsers.add_parser('meta-tile-low-zoom')
Expand All @@ -2477,6 +2498,20 @@ def command_fn(cfg, args):
help='Tile coordinate as "z/x/y".')
subparser.add_argument('--run_id', required=False,
help='optional run_id used for logging')
subparser.add_argument('--postgresql_hosts', required=False,
help='optional string of a list of db hosts e.g. `["aws.rds.url", "localhost"]`')
subparser.add_argument('--postgresql_dbnames', required=False,
help='optional string of a list of db names e.g. `["gis"]`')
subparser.add_argument('--postgresql_user', required=False,
help='optional string of db user e.g. `gisuser`')
subparser.add_argument('--postgresql_password', required=False,
help='optional string of db password e.g. `VHcDuAS0SYx2tlgTvtbuCXwlvO4pAtiGCuScJFjq7wersdfqwer`')
subparser.add_argument('--store_name', required=False,
help='optional string of a list of tile store names e.g. `["my-meta-tiles-us-east-1"]`')
subparser.add_argument('--store_date_prefix', required=False,
help='optional string of store bucket date prefix e.g. `20210426`')
subparser.add_argument('--batch_check_metafile_exists', required=False,
help='optional string of a boolean indicating whether to check metafile exists or not e.g. `false`')
subparser.set_defaults(func=tilequeue_meta_tile_low_zoom)

subparser = subparsers.add_parser('rawr-tile')
Expand All @@ -2486,6 +2521,27 @@ def command_fn(cfg, args):
help='Tile coordinate as "z/x/y".')
subparser.add_argument('--run_id', required=False,
help='optional run_id used for logging')
subparser.add_argument('--postgresql_hosts', required=False,
help='optional string of a list of db hosts e.g. '
'`["aws.rds.url", "localhost"]`')
subparser.add_argument('--postgresql_dbnames', required=False,
help='optional string of a list of db names e.g. '
'`["gis"]`')
subparser.add_argument('--postgresql_user', required=False,
help='optional string of db user e.g. `gisuser`')
subparser.add_argument('--postgresql_password', required=False,
help='optional string of db password e.g. '
'`VHcDuAS0SYx2tlgTvtbuCXwlvO4pAtiGCuScJFjq7wersdfqwer`')
subparser.add_argument('--store_name', required=False,
help='optional string of a list of tile store '
'names e.g. `["my-meta-tiles-us-east-1"]`')
subparser.add_argument('--store_date_prefix', required=False,
help='optional string of store bucket date prefix '
'e.g. `20210426`')
subparser.add_argument('--batch_check_metafile_exists', required=False,
help='optional string of a boolean indicating '
'whether to check metafile exists or not '
'e.g. `false`')
subparser.set_defaults(func=tilequeue_rawr_tile)

subparser = subparsers.add_parser('batch-enqueue')
Expand All @@ -2503,6 +2559,12 @@ def command_fn(cfg, args):
assert os.path.exists(args.config), \
'Config file {} does not exist!'.format(args.config)
with open(args.config) as fh:
cfg = make_config_from_argparse(fh)

cfg = make_config_from_argparse(fh,
postgresql_hosts=args.postgresql_hosts,
postgresql_dbnames=args.postgresql_dbnames,
postgresql_user=args.postgresql_user,
postgresql_password=args.postgresql_password,
store_name=args.store_name,
store_date_prefix=args.store_date_prefix,
batch_check_metafile_exists=args.batch_check_metafile_exists)
args.func(cfg, args)
183 changes: 182 additions & 1 deletion tilequeue/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,6 +137,133 @@ def __init__(self, yml):
self.tile_sizes = [256 * (1 << z) for z in
reversed(xrange(0, self.metatile_zoom + 1))]

def __repr__(self):
return 'yml: {yml},\n' \
'aws_access_key_id: {aws_access_key_id},\n' \
'aws_secret_access_key: {aws_secret_access_key},\n' \
'queue_cfg: {queue_cfg},\n' \
'store_type: {store_type},\n' \
's3_bucket: {s3_bucket},\n' \
's3_reduced_redundancy: {s3_reduced_redundancy},\n' \
's3_path: {s3_path},\n' \
's3_date_prefix: {s3_date_prefix},\n' \
's3_delete_retry_interval: {s3_delete_retry_interval},\n' \
'seed_all_zoom_start: {seed_all_zoom_start},\n' \
'seed_all_zoom_until: {seed_all_zoom_until},\n' \
'seed_n_threads: {seed_n_threads},\n' \
'seed_metro_extract_url: {seed_metro_extract_url},\n' \
'seed_metro_extract_zoom_start: {seed_metro_extract_zoom_start},\n' \
'seed_metro_extract_zoom_until: {seed_metro_extract_zoom_until},\n' \
'seed_metro_extract_cities: {seed_metro_extract_cities},\n' \
'seed_top_tiles_url: {seed_top_tiles_url},\n' \
'seed_top_tiles_zoom_start: {seed_top_tiles_zoom_start},\n' \
'seed_top_tiles_zoom_until: {seed_top_tiles_zoom_until},\n' \
'seed_top_tiles_url: {seed_top_tiles_url},\n' \
'toi_store_type: {toi_store_type},\n' \
'toi_store_s3_bucket: {toi_store_s3_bucket},\n' \
'toi_store_s3_key: {toi_store_s3_key},\n' \
'toi_store_file_name: {toi_store_file_name},\n' \
'seed_custom_zoom_start: {seed_custom_zoom_start},\n' \
'seed_should_add_to_tiles_of_interest: {seed_should_add_to_tiles_of_interest},\n' \
'seed_custom_zoom_until: {seed_custom_zoom_until},\n' \
'seed_unique: {seed_unique},\n' \
'intersect_expired_tiles_location: {intersect_expired_tiles_location},\n' \
'intersect_zoom_until: {intersect_zoom_until},\n' \
'logconfig: {logconfig},\n' \
'redis_type: {redis_type},\n' \
'redis_host: {redis_host},\n' \
'redis_port: {redis_port},\n' \
'redis_db: {redis_db},\n' \
'redis_cache_set_key: {redis_cache_set_key},\n' \
'statsd_host: {statsd_host},\n' \
'statsd_port: {statsd_port},\n' \
'statsd_prefix: {statsd_prefix},\n' \
'n_simultaneous_query_sets: {n_simultaneous_query_sets},\n' \
'n_simultaneous_s3_storage: {n_simultaneous_s3_storage},\n' \
'log_queue_sizes: {log_queue_sizes},\n' \
'log_queue_sizes_interval_seconds: {log_queue_sizes_interval_seconds},\n' \
'query_cfg: {query_cfg},\n' \
'template_path: {template_path},\n' \
'reload_templates: {reload_templates},\n' \
'output_formats: {output_formats},\n' \
'buffer_cfg: {buffer_cfg},\n' \
'process_yaml_cfg: {process_yaml_cfg},\n' \
'postgresql_conn_info: {postgresql_conn_info},\n' \
'metatile_size: {metatile_size},\n' \
'metatile_zoom: {metatile_zoom},\n' \
'metatile_start_zoom: {metatile_start_zoom},\n' \
'max_zoom_with_changes: {max_zoom_with_changes},\n' \
'max_zoom: {max_zoom},\n' \
'sql_queue_buffer_size: {sql_queue_buffer_size},\n' \
'proc_queue_buffer_size: {proc_queue_buffer_size},\n' \
's3_queue_buffer_size: {s3_queue_buffer_size},\n' \
'tile_traffic_log_path: {tile_traffic_log_path},\n' \
'group_by_zoom: {group_by_zoom},\n' \
'tile_sizes: {tile_sizes}\n'.format(
yml=self.yml,
aws_access_key_id=self.aws_access_key_id,
aws_secret_access_key=self.aws_secret_access_key,
queue_cfg=self.queue_cfg,
store_type=self.store_type,
s3_bucket=self.s3_bucket,
s3_reduced_redundancy=self.s3_reduced_redundancy,
s3_path=self.s3_path,
s3_date_prefix=self.s3_date_prefix,
s3_delete_retry_interval=self.s3_delete_retry_interval,
seed_all_zoom_start=self.seed_all_zoom_start,
seed_all_zoom_until=self.seed_all_zoom_until,
seed_n_threads=self.seed_n_threads,
seed_metro_extract_url=self.seed_metro_extract_url,
seed_metro_extract_zoom_start=self.seed_metro_extract_zoom_start,
seed_metro_extract_zoom_until=self.seed_metro_extract_zoom_until,
seed_metro_extract_cities=self.seed_metro_extract_cities,
seed_top_tiles_url=self.seed_top_tiles_url if self.seed_top_tiles_url is not None else 'None',
seed_top_tiles_zoom_start=self.seed_top_tiles_zoom_start if self.seed_top_tiles_zoom_start is not None else 'None',
seed_top_tiles_zoom_until=self.seed_top_tiles_zoom_until if self.seed_top_tiles_zoom_until is not None else 'None',
toi_store_type=self.toi_store_type if self.toi_store_type is not None else 'None',
toi_store_s3_bucket=self.toi_store_s3_bucket if hasattr(self, 'property') and self.toi_store_s3_bucket is not None else 'None',
toi_store_s3_key=self.toi_store_s3_key if hasattr(self, 'toi_store_s3_key') and self.toi_store_s3_key is not None else 'None',
toi_store_file_name=self.toi_store_file_name if hasattr(self, 'toi_store_file_name') and self.toi_store_file_name is not None else 'None',
seed_custom_zoom_start=self.seed_custom_zoom_start if hasattr(self, 'seed_custom_zoom_start') and self.seed_custom_zoom_start is not None else 'None',
seed_should_add_to_tiles_of_interest=self.seed_should_add_to_tiles_of_interest if hasattr(self, 'seed_should_add_to_tiles_of_interest') and self.seed_should_add_to_tiles_of_interest is not None else 'None',
seed_custom_zoom_until=self.seed_custom_zoom_until if hasattr(self, 'seed_custom_zoom_until') and self.seed_custom_zoom_until is not None else 'None',
seed_unique=self.seed_unique if hasattr(self, 'seed_unique') and self.seed_unique is not None else 'None',
intersect_expired_tiles_location=self.intersect_expired_tiles_location if hasattr(self, 'intersect_expired_tiles_location') and self.intersect_expired_tiles_location is not None else 'None',
intersect_zoom_until=self.intersect_zoom_until if hasattr(self, 'intersect_zoom_until') and self.intersect_zoom_until is not None else 'None',
logconfig=self.logconfig if hasattr(self, 'logconfig') and self.logconfig is not None else 'None',
redis_type=self.redis_type if hasattr(self, 'redis_type') and self.redis_type is not None else 'None',
redis_host=self.redis_host if hasattr(self, 'redis_host') and self.redis_host is not None else 'None',
redis_port=self.redis_port if hasattr(self, 'redis_port') and self.redis_port is not None else 'None',
redis_db=self.redis_db if hasattr(self, 'redis_db') and self.redis_db is not None else 'None',
redis_cache_set_key=self.redis_cache_set_key if hasattr(self, 'redis_cache_set_key') and self.redis_cache_set_key is not None else 'None',
statsd_host=self.statsd_host if hasattr(self, 'statsd_host') and self.statsd_host is not None else 'None',
statsd_port=self.statsd_port if hasattr(self, 'statsd_port') and self.statsd_port is not None else 'None',
statsd_prefix=self.statsd_prefix if hasattr(self, 'statsd_prefix') and self.statsd_prefix is not None else 'None',
n_simultaneous_query_sets=self.n_simultaneous_query_sets if hasattr(self, 'n_simultaneous_query_sets') and self.n_simultaneous_query_sets is not None else 'None',
n_simultaneous_s3_storage=self.n_simultaneous_s3_storage if hasattr(self, 'n_simultaneous_s3_storage') and self.n_simultaneous_s3_storage is not None else 'None',
log_queue_sizes=self.log_queue_sizes if hasattr(self, 'log_queue_sizes') and self.log_queue_sizes is not None else 'None',
log_queue_sizes_interval_seconds=self.log_queue_sizes_interval_seconds if hasattr(self, 'log_queue_sizes_interval_seconds') and self.log_queue_sizes_interval_seconds is not None else 'None',
query_cfg=self.query_cfg if hasattr(self, 'query_cfg') and self.query_cfg is not None else 'None',
template_path=self.template_path if hasattr(self, 'template_path') and self.template_path is not None else 'None',
reload_templates=self.reload_templates if hasattr(self, 'reload_templates') and self.reload_templates is not None else 'None',
output_formats=self.output_formats if hasattr(self, 'output_formats') and self.output_formats is not None else 'None',
buffer_cfg=self.buffer_cfg if hasattr(self, 'buffer_cfg') and self.buffer_cfg is not None else 'None',
process_yaml_cfg=self.process_yaml_cfg if hasattr(self, 'process_yaml_cfg') and self.process_yaml_cfg is not None else 'None',
postgresql_conn_info=self.postgresql_conn_info if hasattr(self, 'postgresql_conn_info') and self.postgresql_conn_info is not None else 'None',
metatile_size=self.metatile_size if hasattr(self, 'metatile_size') and self.metatile_size is not None else 'None',
metatile_zoom=self.metatile_zoom if hasattr(self, 'metatile_zoom') and self.metatile_zoom is not None else 'None',
metatile_start_zoom=self.metatile_start_zoom if hasattr(self, 'metatile_start_zoom') and self.metatile_start_zoom is not None else 'None',
max_zoom_with_changes=self.max_zoom_with_changes if hasattr(self, 'max_zoom_with_changes') and self.max_zoom_with_changes is not None else 'None',
max_zoom=self.max_zoom if hasattr(self, 'max_zoom') and self.max_zoom is not None else 'None',
sql_queue_buffer_size=self.sql_queue_buffer_size if hasattr(self, 'sql_queue_buffer_size') and self.sql_queue_buffer_size is not None else 'None',
proc_queue_buffer_size=self.proc_queue_buffer_size if hasattr(self, 'proc_queue_buffer_size') and self.proc_queue_buffer_size is not None else 'None',
s3_queue_buffer_size=self.s3_queue_buffer_size if hasattr(self, 's3_queue_buffer_size') and self.s3_queue_buffer_size is not None else 'None',
tile_traffic_log_path=self.tile_traffic_log_path if hasattr(self, 'tile_traffic_log_path') and self.tile_traffic_log_path is not None else 'None',
group_by_zoom=self.group_by_zoom if hasattr(self, 'group_by_zoom') and self.group_by_zoom is not None else 'None',
tile_sizes=self.tile_sizes if hasattr(self, 'tile_sizes') and 'self.tile_sizes' is not None else 'None')



def _cfg(self, yamlkeys_str):
yamlkeys = yamlkeys_str.split()
yamlval = self.yml
Expand Down Expand Up @@ -311,7 +438,25 @@ def _make_yaml_key(s):
return s.lower().replace("_", "-")


def make_config_from_argparse(config_file_handle, default_yml=None):
def make_config_from_argparse(config_file_handle, default_yml=None,
postgresql_hosts=None,
postgresql_dbnames=None,
postgresql_user=None,
postgresql_password=None,
store_name=None,
store_date_prefix=None,
batch_check_metafile_exists=None,
):
""" Generate config from various sources. The configurations chain
includes these in order:
1. a hardcoded default_yml_config
2. a passed-in config file
3. environment variables with prefix `TILEQUEUE__`
4. explicit override arguments such as postgresql_hosts

the configuration values at the end of the chain override the values
of those at the beginning of the chain
"""
if default_yml is None:
default_yml = default_yml_config()

Expand All @@ -331,6 +476,42 @@ def make_config_from_argparse(config_file_handle, default_yml=None):
value = load(os.environ[k])
_override_cfg(cfg, keys, value)

# override config values with explicit arguments if set
if postgresql_hosts is not None:
keys = ['postgresql', 'host'] # attention non-plural form `host`
value = load(postgresql_hosts)
_override_cfg(cfg, keys, value)

if postgresql_dbnames is not None:
keys = ['postgresql', 'dbnames']
value = load(postgresql_dbnames)
_override_cfg(cfg, keys, value)

if postgresql_user is not None:
keys = ['postgresql', 'user']
value = load(postgresql_user)
_override_cfg(cfg, keys, value)

if postgresql_password is not None:
keys = ['postgresql', 'password']
value = load(postgresql_password)
_override_cfg(cfg, keys, value)

if store_name is not None:
keys = ['store', 'name']
value = load(store_name)
_override_cfg(cfg, keys, value)

if store_date_prefix is not None:
keys = ['store', 'date-prefix']
value = load(store_date_prefix)
_override_cfg(cfg, keys, value)

if batch_check_metafile_exists is not None:
keys = ['batch', 'check-metatile-exists']
value = load(batch_check_metafile_exists)
_override_cfg(cfg, keys, value)

return Configuration(cfg)


Expand Down