diff --git a/datadog_checks_dev/README.md b/datadog_checks_dev/README.md index bb1316d4c6ee6e..78dc0eae2c0b59 100644 --- a/datadog_checks_dev/README.md +++ b/datadog_checks_dev/README.md @@ -20,7 +20,7 @@ Using a virtual environment is recommended. ## Installation `datadog-checks-dev` is distributed on [PyPI][6] as a universal wheel -and is available on Linux, macOS, and Windows, and supports Python 2.7/3.7+ and PyPy. +and is available on Linux, macOS, and Windows, and supports Python 3.7+ and PyPy. ```console $ pip install "datadog-checks-dev[cli]" diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/cli.py b/datadog_checks_dev/datadog_checks/dev/tooling/cli.py index 777fa05b198dc6..8c6a9523c43a92 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/cli.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/cli.py @@ -5,7 +5,6 @@ import click -from ..compat import PermissionError from ..utils import dir_exists from .commands import ALL_COMMANDS from .commands.console import CONTEXT_SETTINGS, echo_success, echo_waiting, echo_warning, set_color, set_debug @@ -33,9 +32,7 @@ def ddev(ctx, core, extras, agent, here, color, quiet, debug): # TODO: Remove IOError (and noqa: B014) when Python 2 is removed # In Python 3, IOError have been merged into OSError except (IOError, OSError, PermissionError): # noqa: B014 - echo_warning( - 'Unable to create config file located at `{}`. ' 'Please check your permissions.'.format(CONFIG_FILE) - ) + echo_warning(f'Unable to create config file located at `{CONFIG_FILE}`. Please check your permissions.') # Load and store configuration for sub-commands. config = load_config() @@ -52,8 +49,8 @@ def ddev(ctx, core, extras, agent, here, color, quiet, debug): root = os.path.expanduser(config.get(repo_choice, '')) if here or not dir_exists(root): if not here and not quiet: - repo = 'datadog-agent' if repo_choice == 'agent' else 'integrations-{}'.format(repo_choice) - echo_warning('`{}` directory `{}` does not exist, defaulting to the current location.'.format(repo, root)) + repo = 'datadog-agent' if repo_choice == 'agent' else f'integrations-{repo_choice}' + echo_warning(f'`{repo}` directory `{root}` does not exist, defaulting to the current location.') root = os.getcwd() diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/commands/agent/changelog.py b/datadog_checks_dev/datadog_checks/dev/tooling/commands/agent/changelog.py index cfe0175ad3ae71..856e823506bf5b 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/commands/agent/changelog.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/commands/agent/changelog.py @@ -3,10 +3,9 @@ # Licensed under a 3-clause BSD style license (see LICENSE) import json import os -from collections import OrderedDict +from io import StringIO import click -from six import StringIO, iteritems from ....utils import read_file, write_file from ...constants import get_agent_changelog, get_agent_release_requirements, get_root @@ -41,7 +40,7 @@ def changelog(since, to, write, force): agent_tags = get_agent_tags(since, to) # store the changes in a mapping {agent_version --> {check_name --> current_version}} - changes_per_agent = OrderedDict() + changes_per_agent = {} # to keep indexing easy, we run the loop off-by-one for i in range(1, len(agent_tags)): @@ -54,9 +53,9 @@ def changelog(since, to, write, force): file_contents = git_show_file(req_file_name, agent_tags[i]) catalog_prev = parse_agent_req_file(file_contents) - changes_per_agent[current_tag] = OrderedDict() + changes_per_agent[current_tag] = {} - for name, ver in iteritems(catalog_now): + for name, ver in catalog_now.items(): # at some point in the git history, the requirements file erroneusly # contained the folder name instead of the package name for each check, # let's be resilient @@ -86,27 +85,25 @@ def changelog(since, to, write, force): check_changelog_url = 'https://github.com/DataDog/integrations-core/blob/master/{}/CHANGELOG.md' # go through all the agent releases - for agent, version_changes in iteritems(changes_per_agent): + for agent, version_changes in changes_per_agent.items(): url = agent_changelog_url.format(agent.replace('.', '')) # Github removes dots from the anchor - changelog_contents.write('## Datadog Agent version [{}]({})\n\n'.format(agent, url)) + changelog_contents.write(f'## Datadog Agent version [{agent}]({url})\n\n') if not version_changes: changelog_contents.write('* There were no integration updates for this version of the Agent.\n\n') else: - for name, ver in iteritems(version_changes): + for name, ver in version_changes.items(): # get the "display name" for the check manifest_file = os.path.join(get_root(), name, 'manifest.json') if os.path.exists(manifest_file): - decoded = json.loads(read_file(manifest_file).strip(), object_pairs_hook=OrderedDict) + decoded = json.loads(read_file(manifest_file).strip()) display_name = decoded.get('display_name') else: display_name = name breaking_notice = " **BREAKING CHANGE**" if ver[1] else "" changelog_url = check_changelog_url.format(name) - changelog_contents.write( - '* {} [{}]({}){}\n'.format(display_name, ver[0], changelog_url, breaking_notice) - ) + changelog_contents.write(f'* {display_name} [{ver[0]}]({changelog_url}){breaking_notice}\n') # add an extra line to separate the release block changelog_contents.write('\n') diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/commands/agent/integrations.py b/datadog_checks_dev/datadog_checks/dev/tooling/commands/agent/integrations.py index a0509405687256..ca28e54adf4297 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/commands/agent/integrations.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/commands/agent/integrations.py @@ -2,9 +2,9 @@ # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) import os +from io import StringIO import click -from six import StringIO, iteritems from ....utils import write_file from ...constants import get_agent_integrations_file, get_agent_release_requirements @@ -39,11 +39,11 @@ def integrations(since, to, write, force): integrations_contents = StringIO() for tag in agent_tags: - integrations_contents.write('## Datadog Agent version {}\n\n'.format(tag)) + integrations_contents.write(f'## Datadog Agent version {tag}\n\n') # Requirements for current tag file_contents = git_show_file(req_file_name, tag) - for name, ver in iteritems(parse_agent_req_file(file_contents)): - integrations_contents.write('* {}: {}\n'.format(name, ver)) + for name, ver in parse_agent_req_file(file_contents).items(): + integrations_contents.write(f'* {name}: {ver}\n') integrations_contents.write('\n') # save the changelog on disk if --write was passed diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/commands/agent/requirements.py b/datadog_checks_dev/datadog_checks/dev/tooling/commands/agent/requirements.py index af760b60bac8c3..d0d3cfb4f95e81 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/commands/agent/requirements.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/commands/agent/requirements.py @@ -29,18 +29,18 @@ def requirements(ctx): entries = [] for check in checks: if check in AGENT_V5_ONLY: - echo_info('Check `{}` is only shipped with Agent 5, skipping'.format(check)) + echo_info(f'Check `{check}` is only shipped with Agent 5, skipping') continue try: version = get_version_string(check) - entries.append('{}\n'.format(get_agent_requirement_line(check, version))) + entries.append(f'{get_agent_requirement_line(check, version)}\n') except Exception as e: - echo_failure('Error generating line: {}'.format(e)) + echo_failure(f'Error generating line: {e}') continue lines = sorted(entries) req_file = get_agent_release_requirements() write_file_lines(req_file, lines) - echo_success('Successfully wrote to `{}`!'.format(req_file)) + echo_success(f'Successfully wrote to `{req_file}`!') diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/commands/clean.py b/datadog_checks_dev/datadog_checks/dev/tooling/commands/clean.py index 6fcb99048d13a9..365b3e1c94077b 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/commands/clean.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/commands/clean.py @@ -14,7 +14,7 @@ @click.command(context_settings=CONTEXT_SETTINGS, short_help="Remove a project's build artifacts") @click.argument('check', required=False) @click.option( - '--compiled-only', '-c', is_flag=True, help='Remove compiled files only ({}).'.format(', '.join(DELETE_EVERYWHERE)), + '--compiled-only', '-c', is_flag=True, help=f"Remove compiled files only ({', '.join(DELETE_EVERYWHERE)}).", ) @click.option( '--all', @@ -54,7 +54,7 @@ def clean(ctx, check, compiled_only, all_matches, force, verbose): path = os.getcwd() if compiled_only: - echo_waiting('Cleaning compiled artifacts in `{}`...'.format(path)) + echo_waiting(f'Cleaning compiled artifacts in `{path}`...') removed_paths = remove_compiled_scripts(path, detect_project=not all_matches) else: force_clean_root = False @@ -78,14 +78,14 @@ def clean(ctx, check, compiled_only, all_matches, force, verbose): else: target_description = 'artifacts (excluding those listed above)' - echo_waiting('Cleaning {} in `{}`...'.format(target_description, path)) + echo_waiting(f'Cleaning {target_description} in `{path}`...') removed_paths = clean_package(path, detect_project=not all_matches, force_clean_root=force_clean_root) if verbose: if removed_paths: echo_success('Removed paths:') for p in removed_paths: - echo_info(' {}'.format(p)) + echo_info(f' {p}') if removed_paths: echo_success('Cleaned!') diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/commands/config.py b/datadog_checks_dev/datadog_checks/dev/tooling/commands/config.py index 6add5c87b993b2..ee195990cf4ffb 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/commands/config.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/commands/config.py @@ -37,7 +37,7 @@ def explore(): def find(): """Show the location of the config file.""" if ' ' in CONFIG_FILE: - echo_info('"{}"'.format(CONFIG_FILE)) + echo_info(f'"{CONFIG_FILE}"') else: echo_info(CONFIG_FILE) @@ -86,7 +86,7 @@ def set_value(ctx, key, value): scrubbing = False if value is None: scrubbing = any(fnmatch(key, pattern) for pattern in SECRET_KEYS) - value = click.prompt('Value for `{}`'.format(key), hide_input=scrubbing) + value = click.prompt(f'Value for `{key}`', hide_input=scrubbing) if key in ('core', 'extras', 'agent') and not value.startswith('~'): value = os.path.abspath(value) diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/commands/console.py b/datadog_checks_dev/datadog_checks/dev/tooling/commands/console.py index 54492cdc65ba65..158da72e35ed93 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/commands/console.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/commands/console.py @@ -71,11 +71,11 @@ def echo_debug(text, nl=True, cr=False, err=False, indent=None): if not DEBUG_OUTPUT: return - text = 'DEBUG: %s' % text + text = f'DEBUG: {text}' if indent: text = indent_text(text, indent) if cr: - text = '\n%s' % text + text = f'\n{text}' click.secho(text, bold=True, nl=nl, err=err, color=DISPLAY_COLOR) diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/commands/create.py b/datadog_checks_dev/datadog_checks/dev/tooling/commands/create.py index 265782f7cf2e24..54bd34ef934125 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/commands/create.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/commands/create.py @@ -24,15 +24,13 @@ def tree(): def construct_output_info(path, depth, last, is_dir=False): if depth == 0: - return u'', path, is_dir + return '', path, is_dir else: if depth == 1: - return (u'{}{} '.format(PIPE_END if last else PIPE_MIDDLE, HYPHEN), path, is_dir) + return (f'{PIPE_END if last else PIPE_MIDDLE}{HYPHEN} ', path, is_dir) else: return ( - u'{} {}{}'.format( - PIPE, u' ' * 4 * (depth - 2), u'{}{} '.format(PIPE_END if last or is_dir else PIPE_MIDDLE, HYPHEN) - ), + f"{PIPE} {' ' * 4 * (depth - 2)}{PIPE_END if last or is_dir else PIPE_MIDDLE}{HYPHEN} ", path, is_dir, ) @@ -104,7 +102,7 @@ def create(ctx, name, integration_type, location, non_interactive, quiet, dry_ru integration_dir = os.path.join(root, normalize_package_name(name)) if os.path.exists(integration_dir): - abort('Path `{}` already exists!'.format(integration_dir)) + abort(f'Path `{integration_dir}` already exists!') template_fields = {} if repo_choice != 'core' and not non_interactive and not dry_run: @@ -116,7 +114,7 @@ def create(ctx, name, integration_type, location, non_interactive, quiet, dry_ru config = construct_template_fields(name, repo_choice, **template_fields) files = create_template_files(integration_type, root, config, read=not dry_run) - file_paths = [file.file_path.replace('{}{}'.format(root, path_sep), '', 1) for file in files] + file_paths = [file.file_path.replace(f'{root}{path_sep}', '', 1) for file in files] path_tree = tree() for file_path in file_paths: @@ -127,9 +125,9 @@ def create(ctx, name, integration_type, location, non_interactive, quiet, dry_ru if dry_run: if quiet: - echo_info('Will create `{}`'.format(integration_dir)) + echo_info(f'Will create `{integration_dir}`') else: - echo_info('Will create in `{}`:'.format(root)) + echo_info(f'Will create in `{root}`:') display_path_tree(path_tree) return @@ -137,7 +135,7 @@ def create(ctx, name, integration_type, location, non_interactive, quiet, dry_ru file.write() if quiet: - echo_info('Created `{}`'.format(integration_dir)) + echo_info(f'Created `{integration_dir}`') else: - echo_info('Created in `{}`:'.format(root)) + echo_info(f'Created in `{root}`:') display_path_tree(path_tree) diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/commands/dep.py b/datadog_checks_dev/datadog_checks/dev/tooling/commands/dep.py index 0b32add385161a..7cba2922a05f9d 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/commands/dep.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/commands/dep.py @@ -4,7 +4,6 @@ import os import click -from six import itervalues from ...utils import write_file_lines from ..constants import REQUIREMENTS_IN, get_agent_requirements, get_root @@ -30,22 +29,22 @@ def display_package_changes(pre_packages, post_packages, indent=''): changed.append((pre_package_names[package_name], post_package_names[package_name])) if not (added or removed or changed): - echo_info('{}No changes'.format(indent)) + echo_info(f'{indent}No changes') if added: - echo_success('{}Added packages:'.format(indent)) + echo_success(f'{indent}Added packages:') for package_name in sorted(added): - echo_info('{} {}'.format(indent, post_package_names[package_name])) + echo_info(f'{indent} {post_package_names[package_name]}') if removed: - echo_failure('{}Removed packages:'.format(indent)) + echo_failure(f'{indent}Removed packages:') for package_name in sorted(removed): - echo_info('{} {}'.format(indent, pre_package_names[package_name])) + echo_info(f'{indent} {pre_package_names[package_name]}') if changed: - echo_warning('{}Changed packages:'.format(indent)) + echo_warning(f'{indent}Changed packages:') for pre, post in changed: - echo_info('{} {} -> {}'.format(indent, pre, post)) + echo_info(f'{indent} {pre} -> {post}') @click.group(context_settings=CONTEXT_SETTINGS, short_help='Manage dependencies') @@ -71,7 +70,7 @@ def resolve(checks, lazy, quiet): if os.path.isfile(pinned_reqs_file): if not quiet: - echo_info('Check `{}`:'.format(check_name)) + echo_info(f'Check `{check_name}`:') if not quiet: echo_waiting(' Resolving dependencies...') @@ -118,18 +117,18 @@ def pin(package, version, checks, marker, resolving, lazy, quiet): if resolving: pre_packages = list(read_packages(resolved_reqs_file)) else: - pre_packages = list(itervalues(pinned_packages)) + pre_packages = list(pinned_packages.values()) if not quiet: - echo_info('Check `{}`:'.format(check_name)) + echo_info(f'Check `{check_name}`:') if version == 'none': del pinned_packages[package_name] else: pinned_packages[package_name] = Package(package_name, version, marker) - package_list = sorted(itervalues(pinned_packages)) - write_file_lines(pinned_reqs_file, ('{}\n'.format(package) for package in package_list)) + package_list = sorted(pinned_packages.values()) + write_file_lines(pinned_reqs_file, (f'{package}\n' for package in package_list)) if not quiet: echo_waiting(' Resolving dependencies...') @@ -158,7 +157,7 @@ def freeze(): static_file = get_agent_requirements() - echo_info('Static file: {}'.format(static_file)) + echo_info(f'Static file: {static_file}') pre_packages = list(read_packages(static_file)) diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/commands/env/check.py b/datadog_checks_dev/datadog_checks/dev/tooling/commands/env/check.py index 0901445d434ab8..fa815c7ad80118 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/commands/env/check.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/commands/env/check.py @@ -39,20 +39,20 @@ def check_run(check, env, rate, times, pause, delay, log_level, as_json, break_p """Run an Agent check.""" envs = get_configured_envs(check) if not envs: - echo_failure('No active environments found for `{}`.'.format(check)) - echo_info('See what is available to start via `ddev env ls {}`.'.format(check)) + echo_failure(f'No active environments found for `{check}`.') + echo_info(f'See what is available to start via `ddev env ls {check}`.') abort() if not env: if len(envs) > 1: - echo_failure('Multiple active environments found for `{}`, please specify one.'.format(check)) + echo_failure(f'Multiple active environments found for `{check}`, please specify one.') echo_info('See what is active via `ddev env ls`.') abort() env = envs[0] if env not in envs: - echo_failure('`{}` is not an active environment.'.format(env)) + echo_failure(f'`{env}` is not an active environment.') echo_info('See what is active via `ddev env ls`.') abort() diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/commands/env/ls.py b/datadog_checks_dev/datadog_checks/dev/tooling/commands/env/ls.py index 7cea6aab4a3b7c..10a90a889fa399 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/commands/env/ls.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/commands/env/ls.py @@ -20,7 +20,7 @@ def ls(checks): envs = get_available_tox_envs(check, e2e_only=True) if envs: - echo_success('{}:'.format(check)) + echo_success(f'{check}:') for env in envs: echo_info(env, indent=True) @@ -29,6 +29,6 @@ def ls(checks): envs = get_configured_envs(check) if envs: - echo_success('{}:'.format(check)) + echo_success(f'{check}:') for env in envs: echo_info(env, indent=True) diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/commands/env/prune.py b/datadog_checks_dev/datadog_checks/dev/tooling/commands/env/prune.py index 280bcb0731469a..4468ecad38dfc6 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/commands/env/prune.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/commands/env/prune.py @@ -24,9 +24,9 @@ def prune(force): envs = get_configured_envs(check) if envs: - echo_info('{}:'.format(check)) + echo_info(f'{check}:') for env in envs: - echo_waiting('Removing `{}`... '.format(env), nl=False, indent=True) + echo_waiting(f'Removing `{env}`... ', nl=False, indent=True) remove_env_data(check, env) echo_success('success!') diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/commands/env/reload.py b/datadog_checks_dev/datadog_checks/dev/tooling/commands/env/reload.py index b45c02cdbc3947..0ea1c3de556050 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/commands/env/reload.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/commands/env/reload.py @@ -14,20 +14,20 @@ def reload_env(check, env): """Restart an Agent to detect environment changes.""" envs = get_configured_envs(check) if not envs: - echo_failure('No active environments found for `{}`.'.format(check)) - echo_info('See what is available to start via `ddev env ls {}`.'.format(check)) + echo_failure(f'No active environments found for `{check}`.') + echo_info(f'See what is available to start via `ddev env ls {check}`.') abort() if not env: if len(envs) > 1: - echo_failure('Multiple active environments found for `{}`, please specify one.'.format(check)) + echo_failure(f'Multiple active environments found for `{check}`, please specify one.') echo_info('See what is active via `ddev env ls`.') abort() env = envs[0] if env not in envs: - echo_failure('`{}` is not an active environment.'.format(env)) + echo_failure(f'`{env}` is not an active environment.') echo_info('See what is active via `ddev env ls`.') abort() @@ -38,4 +38,4 @@ def reload_env(check, env): if result.code: abort(result.stdout + result.stderr, code=result.code) else: - echo_success('Successfully reloaded environment `{}`!'.format(env)) + echo_success(f'Successfully reloaded environment `{env}`!') diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/commands/env/start.py b/datadog_checks_dev/datadog_checks/dev/tooling/commands/env/start.py index d7a26ac13a4962..8292229ad79655 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/commands/env/start.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/commands/env/start.py @@ -33,7 +33,7 @@ '--python', '-py', type=click.INT, - help='The version of Python to use. Defaults to {} if no tox Python is specified.'.format(DEFAULT_PYTHON_VERSION), + help=f'The version of Python to use. Defaults to {DEFAULT_PYTHON_VERSION} if no tox Python is specified.', ) @click.option('--dev/--prod', help='Whether to use the latest version of a check or what is shipped') @click.option('--base', is_flag=True, help='Whether to use the latest version of the base check or what is shipped') @@ -51,7 +51,7 @@ def start(ctx, check, env, agent, python, dev, base, env_vars, profile_memory): """Start an environment.""" if not file_exists(get_tox_file(check)): - abort('`{}` is not a testable check.'.format(check)) + abort(f'`{check}` is not a testable check.') on_ci = running_on_ci() @@ -60,7 +60,7 @@ def start(ctx, check, env, agent, python, dev, base, env_vars, profile_memory): core_dir = os.path.expanduser(ctx.obj.get('core', '')) if not dir_exists(core_dir): if core_dir: - abort('`{}` directory does not exist.'.format(core_dir)) + abort(f'`{core_dir}` directory does not exist.') else: abort('`core` config setting does not exist.') @@ -71,9 +71,9 @@ def start(ctx, check, env, agent, python, dev, base, env_vars, profile_memory): envs = get_available_tox_envs(check, e2e_only=True) if env not in envs: - echo_failure('`{}` is not an available environment.'.format(env)) + echo_failure(f'`{env}` is not an available environment.') echo_info('Available environments for {}:\n {}'.format(check, '\n '.join(envs))) - echo_info('You can also use `ddev env ls {}` to see available environments.'.format(check)) + echo_info(f'You can also use `ddev env ls {check}` to see available environments.') abort() env_python_version = get_tox_env_python_version(env) @@ -102,7 +102,7 @@ def start(ctx, check, env, agent, python, dev, base, env_vars, profile_memory): profile_memory = False echo_warning('No API key is set; collecting metrics about memory usage will be disabled.') - echo_waiting('Setting up environment `{}`... '.format(env), nl=False) + echo_waiting(f'Setting up environment `{env}`... ', nl=False) config, metadata, error = start_environment(check, env) if error: @@ -119,13 +119,13 @@ def start(ctx, check, env, agent, python, dev, base, env_vars, profile_memory): env_type = metadata['env_type'] agent_ver = agent or os.getenv('DDEV_E2E_AGENT', '6') - agent_build = ctx.obj.get('agent{}'.format(agent_ver), agent_ver) + agent_build = ctx.obj.get(f'agent{agent_ver}', agent_ver) if isinstance(agent_build, dict): agent_build = agent_build.get(env_type, env_type) interface = derive_interface(env_type) if interface is None: - echo_failure('`{}` is an unsupported environment type.'.format(env_type)) + echo_failure(f'`{env_type}` is an unsupported environment type.') echo_waiting('Stopping the environment...') stop_environment(check, env, metadata=metadata) abort() @@ -146,7 +146,7 @@ def start(ctx, check, env, agent, python, dev, base, env_vars, profile_memory): branch = get_current_branch() except Exception: branch = 'unknown' - echo_warning('Unable to detect the current Git branch, defaulting to `{}`.'.format(branch)) + echo_warning(f'Unable to detect the current Git branch, defaulting to `{branch}`.') env_vars['DD_TRACEMALLOC_DEBUG'] = '1' env_vars['DD_TRACEMALLOC_WHITELIST'] = check @@ -161,9 +161,9 @@ def start(ctx, check, env, agent, python, dev, base, env_vars, profile_memory): for instance in instances: instance['__memory_profiling_tags'] = [ - 'platform:{}'.format(plat), - 'env:{}'.format(env), - 'branch:{}'.format(branch), + f'platform:{plat}', + f'env:{env}', + f'branch:{branch}', ] if on_ci: @@ -175,15 +175,15 @@ def start(ctx, check, env, agent, python, dev, base, env_vars, profile_memory): check, env, base_package, config, env_vars, metadata, agent_build, api_key, python, not bool(agent) ) - echo_waiting('Updating `{}`... '.format(agent_build), nl=False) + echo_waiting(f'Updating `{agent_build}`... ', nl=False) environment.update_agent() echo_success('success!') echo_waiting('Detecting the major version... ', nl=False) environment.detect_agent_version() - echo_info('Agent {} detected'.format(environment.agent_version)) + echo_info(f'Agent {environment.agent_version} detected') - echo_waiting('Writing configuration for `{}`... '.format(env), nl=False) + echo_waiting(f'Writing configuration for `{env}`... ', nl=False) environment.write_config() echo_success('success!') @@ -239,7 +239,7 @@ def start(ctx, check, env, agent, python, dev, base, env_vars, profile_memory): echo_success('success!') if dev: - echo_waiting('Upgrading `{}` check to the development version... '.format(check), nl=False) + echo_waiting(f'Upgrading `{check}` check to the development version... ', nl=False) if environment.ENV_TYPE == 'local' and not click.confirm(editable_warning.format(environment.check)): echo_success('skipping') else: @@ -283,10 +283,10 @@ def start(ctx, check, env, agent, python, dev, base, env_vars, profile_memory): echo_info(environment.config_file) echo_success('To run this check, do: ', nl=False) - echo_info('ddev env check {} {}'.format(check, env)) + echo_info(f'ddev env check {check} {env}') echo_success('To stop this check, do: ', nl=False) if ctx.obj['repo_choice'] == 'extras' and not ctx.obj.get('repo') == 'extras': - echo_info('ddev -e env stop {} {}'.format(check, env)) + echo_info(f'ddev -e env stop {check} {env}') else: - echo_info('ddev env stop {} {}'.format(check, env)) + echo_info(f'ddev env stop {check} {env}') diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/commands/env/stop.py b/datadog_checks_dev/datadog_checks/dev/tooling/commands/env/stop.py index 73cd5818ab3fc1..8a8cefe75bc844 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/commands/env/stop.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/commands/env/stop.py @@ -31,12 +31,12 @@ def stop(check, env): if all_checks: envs = get_configured_envs(check) if envs: - echo_success('{}:'.format(check)) + echo_success(f'{check}:') else: envs = [env] if env else get_configured_envs(check) for env in envs: - echo_info('{}:'.format(env), indent=env_indent) + echo_info(f'{env}:', indent=env_indent) environment = create_interface(check, env) if on_ci and 'sampling_start_time' in environment.metadata: diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/commands/env/test.py b/datadog_checks_dev/datadog_checks/dev/tooling/commands/env/test.py index 273c2d861ce3d2..5707aba394fc14 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/commands/env/test.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/commands/env/test.py @@ -28,7 +28,7 @@ '--python', '-py', type=click.INT, - help='The version of Python to use. Defaults to {} if no tox Python is specified.'.format(DEFAULT_PYTHON_VERSION), + help=f'The version of Python to use. Defaults to {DEFAULT_PYTHON_VERSION} if no tox Python is specified.', ) @click.option('--dev/--prod', default=None, help='Whether to use the latest version of a check or what is shipped') @click.option('--base', is_flag=True, help='Whether to use the latest version of the base check or what is shipped') @@ -65,7 +65,7 @@ def test(ctx, checks, agent, python, dev, base, env_vars, new_env, profile_memor for check, envs in check_envs: if not envs: - echo_warning('No end-to-end environments found for `{}`'.format(check)) + echo_warning(f'No end-to-end environments found for `{check}`') continue config_envs = get_configured_envs(check) @@ -97,7 +97,7 @@ def test(ctx, checks, agent, python, dev, base, env_vars, new_env, profile_memor with EnvVars(persisted_env_vars): ctx.invoke( test_command, - checks=['{}:{}'.format(check, env)], + checks=[f'{check}:{env}'], debug=DEBUG_OUTPUT, e2e=True, passenv=' '.join(persisted_env_vars) if persisted_env_vars else None, diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/commands/meta/changes.py b/datadog_checks_dev/datadog_checks/dev/tooling/commands/meta/changes.py index cbe76c68f8037f..d51c757ae06e7c 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/commands/meta/changes.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/commands/meta/changes.py @@ -5,7 +5,6 @@ from collections import defaultdict, deque import click -from six import iteritems from ....subprocess import run_command from ....utils import chdir, write_file @@ -50,7 +49,7 @@ def changes(since, out_file, eager): continue result = run_command( - 'git show "--pretty=format:%an%n" -U0 {} */CHANGELOG.md'.format(commit_hash), capture=True, check=True + f'git show "--pretty=format:%an%n" -U0 {commit_hash} */CHANGELOG.md', capture=True, check=True ) # Example: @@ -95,7 +94,7 @@ def changes(since, out_file, eager): line = line[1:] # Demote releases to h3 if line.startswith('##'): - line = '#{}'.format(line) + line = f'#{line}' additions.append(line) elif line.startswith('@@'): break @@ -115,12 +114,12 @@ def changes(since, out_file, eager): history_data[integration]['lines'].appendleft('') history_data[integration]['lines'].extendleft(additions) - output_lines = ['# Changes since {}'.format(since), ''] + output_lines = [f'# Changes since {since}', ''] - for integration, history in sorted(iteritems(history_data)): + for integration, history in sorted(history_data.items()): display_name = load_manifest(integration).get('display_name', integration) - output_lines.append('## {}'.format(display_name)) - output_lines.append('released by: {}'.format(', '.join(sorted(history['releasers'])))) + output_lines.append(f'## {display_name}') + output_lines.append(f"released by: {', '.join(sorted(history['releasers']))}") output_lines.append('') output_lines.extend(history['lines']) diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/commands/meta/dashboard.py b/datadog_checks_dev/datadog_checks/dev/tooling/commands/meta/dashboard.py index 7d0a37035a4fd6..5e3e9248467635 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/commands/meta/dashboard.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/commands/meta/dashboard.py @@ -28,28 +28,28 @@ def dash(): @click.pass_context def export(ctx, url, integration): if integration and integration not in get_valid_integrations(): - abort('Unknown integration `{}`'.format(integration)) + abort(f'Unknown integration `{integration}`') org = ctx.obj['org'] if not org: abort('No `org` has been set') if org not in ctx.obj['orgs']: - abort('Selected org {} is not in `orgs`'.format(org)) + abort(f'Selected org {org} is not in `orgs`') org = ctx.obj['orgs'][org] api_key = org.get('api_key') if not api_key: - abort('No `api_key` has been set for org `{}`'.format(org)) + abort(f'No `api_key` has been set for org `{org}`') app_key = org.get('app_key') if not app_key: - abort('No `app_key` has been set for org `{}`'.format(org)) + abort(f'No `app_key` has been set for org `{org}`') site = org.get('site') if not site: - abort('No `site` has been set for org `{}`'.format(org)) + abort(f'No `site` has been set for org `{org}`') match = re.search(BOARD_ID_PATTERN.format(site=re.escape(site)), url) if match: @@ -92,14 +92,14 @@ def export(ctx, url, integration): if new_file_name: file_name = new_file_name - file_name = '{}.json'.format(file_name.replace(' ', '_')) + file_name = f"{file_name.replace(' ', '_')}.json" location = path_join(get_root(), integration, 'assets', 'dashboards') ensure_dir_exists(location) - manifest['assets']['dashboards'][payload['board_title']] = 'assets/dashboards/{}'.format(file_name) + manifest['assets']['dashboards'][payload['board_title']] = f'assets/dashboards/{file_name}' write_manifest(manifest, integration) else: - file_name = '{}.json'.format(file_name.replace(' ', '_')) + file_name = f"{file_name.replace(' ', '_')}.json" location = os.getcwd() file_path = path_join(location, file_name) diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/commands/meta/prometheus.py b/datadog_checks_dev/datadog_checks/dev/tooling/commands/meta/prometheus.py index 9e96e11feeb64a..87148a73459b7e 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/commands/meta/prometheus.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/commands/meta/prometheus.py @@ -8,10 +8,8 @@ import click import pyperclip import requests -from six import iteritems, itervalues -from six.moves import input, range -from ....utils import dir_exists, ensure_unicode, path_join, write_file_lines +from ....utils import dir_exists, path_join, write_file_lines from ...constants import get_root from ..console import CONTEXT_SETTINGS, abort, echo_info, echo_success, echo_waiting, echo_warning @@ -24,7 +22,7 @@ def sanitize_endpoint(endpoint): if not endpoint.startswith('http'): - endpoint = 'http://{}{}'.format('localhost' if endpoint.startswith(':') else '', endpoint) + endpoint = f"http://{'localhost' if endpoint.startswith(':') else ''}{endpoint}" return endpoint @@ -78,7 +76,7 @@ def info(endpoint): num_counter = 0 num_histogram = 0 - for data in itervalues(metrics): + for data in metrics.values(): metric_type = data.get('type') if metric_type == 'gauge': @@ -89,19 +87,19 @@ def info(endpoint): num_histogram += 1 if num_metrics: - echo_success('Number of metrics: {}'.format(num_metrics)) + echo_success(f'Number of metrics: {num_metrics}') else: echo_warning('No metrics!') return if num_gauge: - echo_info('Type `gauge`: {}'.format(num_gauge)) + echo_info(f'Type `gauge`: {num_gauge}') if num_counter: - echo_info('Type `counter`: {}'.format(num_counter)) + echo_info(f'Type `counter`: {num_counter}') if num_histogram: - echo_info('Type `histogram`: {}'.format(num_histogram)) + echo_info(f'Type `histogram`: {num_histogram}') @prom.command( @@ -126,7 +124,7 @@ def parse(ctx, endpoint, check, here): endpoint = sanitize_endpoint(endpoint) - echo_waiting('Scraping `{}`...'.format(endpoint)) + echo_waiting(f'Scraping `{endpoint}`...') metrics = parse_metrics(endpoint) num_metrics = len(metrics) @@ -135,7 +133,7 @@ def parse(ctx, endpoint, check, here): echo_info(' s - Skip') echo_info(' q - Quit') - for i, (metric, data) in enumerate(sorted(iteritems(metrics)), 1): + for i, (metric, data) in enumerate(sorted(metrics.items()), 1): metric_parts = metric.split('_') metric_template = '{}'.join(metric_parts) num_separators = len(metric_parts) - 1 @@ -147,16 +145,16 @@ def parse(ctx, endpoint, check, here): num_options = len(metric_options) default_option = num_options - options_prompt = 'Choose an option (default {}, as-is): '.format(default_option) + options_prompt = f'Choose an option (default {default_option}, as-is): ' options_text = get_options_text(metric_options) finished = False choice_error = '' - progress_status = '({} of {}) '.format(i, num_metrics) + progress_status = f'({i} of {num_metrics}) ' indent = ' ' * len(progress_status) while not finished: - echo_success('\n{}{}'.format(progress_status, metric)) + echo_success(f'\n{progress_status}{metric}') echo_success('Type: ', nl=False, indent=indent) echo_info(data.get('type', 'None')) @@ -172,16 +170,12 @@ def parse(ctx, endpoint, check, here): echo_waiting(options_prompt, nl=False) if num_options >= 9: - choice = ensure_unicode(input()) + choice = input() else: # Terminals are odd and sometimes produce an erroneous null byte choice = '\x00' while choice == '\x00': choice = click.getchar().strip() - try: - choice = ensure_unicode(choice) - except UnicodeDecodeError: - choice = repr(choice) if not choice: choice = default_option @@ -194,11 +188,11 @@ def parse(ctx, endpoint, check, here): continue elif choice == 's': echo_info('Skip') - echo_info('Skipped {}'.format(metric)) + echo_info(f'Skipped {metric}') break elif choice == 'q': echo_info('Exit') - echo_warning('Exited at {}'.format(metric)) + echo_warning(f'Exited at {metric}') return try: @@ -207,8 +201,8 @@ def parse(ctx, endpoint, check, here): pass if choice not in range(1, num_options + 1): - echo_info(u'{}'.format(choice)) - choice_error = u'`{}` is not a valid option.'.format(choice) + echo_info(f'{choice}') + choice_error = f'`{choice}` is not a valid option.' continue else: choice_error = '' @@ -221,16 +215,16 @@ def parse(ctx, endpoint, check, here): finished = True metadata_file = path_join(output_dir, 'metadata.csv') - echo_waiting('\nWriting `{}`... '.format(metadata_file), nl=False) + echo_waiting(f'\nWriting `{metadata_file}`... ', nl=False) - metric_items = sorted(iteritems(metrics), key=lambda item: item[1]['dd_name']) - output_lines = ['{}\n'.format(METADATA_CSV_HEADER)] + metric_items = sorted(metrics.items(), key=lambda item: item[1]['dd_name']) + output_lines = [f'{METADATA_CSV_HEADER}\n'] for _, data in metric_items: metric_name = data['dd_name'] metric_type = TYPE_MAP.get(data.get('type'), '') metric_description = data.get('description', '') if ',' in metric_description: - metric_description = '"{}"'.format(metric_description) + metric_description = f'"{metric_description}"' output_lines.append( '{check}.{metric_name},{metric_type},,,,{metric_description},0,{check},\n'.format( diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/commands/meta/scripts/metrics2md.py b/datadog_checks_dev/datadog_checks/dev/tooling/commands/meta/scripts/metrics2md.py index 7787c148732090..2727e235d8b067 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/commands/meta/scripts/metrics2md.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/commands/meta/scripts/metrics2md.py @@ -39,7 +39,7 @@ def metrics2md(check, fields): else: chosen_fields = set(fields) if chosen_fields - VALID_FIELDS: - abort('You must select only from the following fields: {}'.format(', '.join(VALID_FIELDS))) + abort(f"You must select only from the following fields: {', '.join(VALID_FIELDS)}") # Deduplicate and retain order old_fields = fields @@ -69,4 +69,4 @@ def metrics2md(check, fields): md_table_rows.extend(rows) pyperclip.copy('\n'.join(md_table_rows)) - echo_success('Successfully copied table with {} metric{}'.format(num_metrics, 's' if num_metrics > 1 else '')) + echo_success(f"Successfully copied table with {num_metrics} metric{'s' if num_metrics > 1 else ''}") diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/commands/release/build.py b/datadog_checks_dev/datadog_checks/dev/tooling/commands/release/build.py index 4b016dca6a9f45..80fcff4573ce3c 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/commands/release/build.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/commands/release/build.py @@ -22,11 +22,11 @@ def build(check, sdist): else: check_dir = resolve_path(check) if not dir_exists(check_dir): - abort('`{}` is not an Agent-based Integration or Python package'.format(check)) + abort(f'`{check}` is not an Agent-based Integration or Python package') check = basepath(check_dir) - echo_waiting('Building `{}`...'.format(check)) + echo_waiting(f'Building `{check}`...') dist_dir = os.path.join(check_dir, 'dist') remove_path(dist_dir) @@ -35,5 +35,5 @@ def build(check, sdist): if result.code != 0: abort(result.stdout, result.code) - echo_info('Build done, artifact(s) in: {}'.format(dist_dir)) + echo_info(f'Build done, artifact(s) in: {dist_dir}') echo_success('Success!') diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/commands/release/changelog.py b/datadog_checks_dev/datadog_checks/dev/tooling/commands/release/changelog.py index 0f561449466d3a..e915c000d8a536 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/commands/release/changelog.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/commands/release/changelog.py @@ -4,10 +4,10 @@ import os from collections import namedtuple from datetime import datetime +from io import StringIO import click from semver import parse_version_info -from six import StringIO from ....utils import stream_file_lines, write_file from ...constants import CHANGELOG_TYPE_NONE, get_root @@ -34,15 +34,15 @@ def changelog(ctx, check, version, old_version, initial, quiet, dry_run): This method is supposed to be used by other tasks and not directly. """ if check and check not in get_valid_checks(): - abort('Check `{}` is not an Agent-based Integration'.format(check)) + abort(f'Check `{check}` is not an Agent-based Integration') # sanity check on the version provided cur_version = old_version or get_version_string(check) if parse_version_info(version.lstrip('v')) <= parse_version_info(cur_version.lstrip('v')): - abort('Current version is {}, cannot bump to {}'.format(cur_version, version)) + abort(f'Current version is {cur_version}, cannot bump to {version}') if not quiet: - echo_info('Current version of check {}: {}, bumping to: {}'.format(check, cur_version, version)) + echo_info(f'Current version of check {check}: {cur_version}, bumping to: {version}') # get the name of the current release tag target_tag = get_release_tag_string(check, cur_version) @@ -53,7 +53,7 @@ def changelog(ctx, check, version, old_version, initial, quiet, dry_run): # for each PR get the title, we'll use it to populate the changelog pr_numbers = parse_pr_numbers(diff_lines) if not quiet: - echo_info('Found {} PRs merged since tag: {}'.format(len(pr_numbers), target_tag)) + echo_info(f'Found {len(pr_numbers)} PRs merged since tag: {target_tag}') if initial: # Only use the first one @@ -65,26 +65,26 @@ def changelog(ctx, check, version, old_version, initial, quiet, dry_run): try: payload = get_pr(pr_num, user_config) except Exception as e: - echo_failure('Unable to fetch info for PR #{}: {}'.format(pr_num, e)) + echo_failure(f'Unable to fetch info for PR #{pr_num}: {e}') continue changelog_labels = get_changelog_types(payload) if not changelog_labels: - abort('No valid changelog labels found attached to PR #{}, please add one!'.format(pr_num)) + abort(f'No valid changelog labels found attached to PR #{pr_num}, please add one!') elif len(changelog_labels) > 1: - abort('Multiple changelog labels found attached to PR #{}, please only use one!'.format(pr_num)) + abort(f'Multiple changelog labels found attached to PR #{pr_num}, please only use one!') changelog_type = changelog_labels[0] if changelog_type == CHANGELOG_TYPE_NONE: if not quiet: # No changelog entry for this PR - echo_info('Skipping PR #{} from changelog due to label'.format(pr_num)) + echo_info(f'Skipping PR #{pr_num} from changelog due to label') continue author = payload.get('user', {}).get('login') author_url = payload.get('user', {}).get('html_url') - title = '[{}] {}'.format(changelog_type, payload.get('title')) + title = f"[{changelog_type}] {payload.get('title')}" entry = ChangelogEntry(pr_num, title, payload.get('html_url'), author, author_url, from_contributor(payload)) @@ -94,7 +94,7 @@ def changelog(ctx, check, version, old_version, initial, quiet, dry_run): new_entry = StringIO() # the header contains version and date - header = '## {} / {}\n'.format(version, datetime.utcnow().strftime('%Y-%m-%d')) + header = f"## {version} / {datetime.utcnow().strftime('%Y-%m-%d')}\n" new_entry.write(header) # one bullet point for each PR @@ -102,8 +102,8 @@ def changelog(ctx, check, version, old_version, initial, quiet, dry_run): for entry in entries: thanks_note = '' if entry.from_contributor: - thanks_note = ' Thanks [{}]({}).'.format(entry.author, entry.author_url) - new_entry.write('* {}. See [#{}]({}).{}\n'.format(entry.title, entry.number, entry.url, thanks_note)) + thanks_note = f' Thanks [{entry.author}]({entry.author_url}).' + new_entry.write(f'* {entry.title}. See [#{entry.number}]({entry.url}).{thanks_note}\n') new_entry.write('\n') # read the old contents diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/commands/release/make.py b/datadog_checks_dev/datadog_checks/dev/tooling/commands/release/make.py index bef87e199553b8..79e8a8a68ecb75 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/commands/release/make.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/commands/release/make.py @@ -48,7 +48,7 @@ def make(ctx, checks, version, initial_release, skip_sign, sign_only): if not releasing_all: for check in checks: if check not in valid_checks: - abort('Check `{}` is not an Agent-based Integration'.format(check)) + abort(f'Check `{check}` is not an Agent-based Integration') # don't run the task on the master branch if get_current_branch() == 'master': @@ -75,7 +75,7 @@ def make(ctx, checks, version, initial_release, skip_sign, sign_only): # Initial releases will only bump if not already 1.0.0 so no need to always output if not initial_release: - echo_success('Check `{}`'.format(check)) + echo_success(f'Check `{check}`') if version: # sanity check on the version provided @@ -101,21 +101,21 @@ def make(ctx, checks, version, initial_release, skip_sign, sign_only): if initial_release: continue else: - abort('Current version is {}, cannot bump to {}'.format(cur_version, version)) + abort(f'Current version is {cur_version}, cannot bump to {version}') else: cur_version, changelog_types = ctx.invoke(changes, check=check, dry_run=True) if not changelog_types: - echo_warning('No changes for {}, skipping...'.format(check)) + echo_warning(f'No changes for {check}, skipping...') continue bump_function = get_bump_function(changelog_types) version = bump_function(cur_version) if initial_release: - echo_success('Check `{}`'.format(check)) + echo_success(f'Check `{check}`') # update the version number - echo_info('Current version of check {}: {}'.format(check, cur_version)) - echo_waiting('Bumping to {}... '.format(version), nl=False) + echo_info(f'Current version of check {check}: {cur_version}') + echo_waiting(f'Bumping to {version}... ', nl=False) update_version_module(check, cur_version, version) echo_success('success!') @@ -147,7 +147,7 @@ def make(ctx, checks, version, initial_release, skip_sign, sign_only): # commit the changes. # do not use [ci skip] so releases get built https://docs.gitlab.com/ee/ci/yaml/#skipping-jobs - msg = '[Release] Bumped {} version to {}'.format(check, version) + msg = f'[Release] Bumped {check} version to {version}' git_commit(commit_targets, msg) if not initial_release: @@ -163,7 +163,7 @@ def make(ctx, checks, version, initial_release, skip_sign, sign_only): commit_targets = update_link_metadata(updated_checks) git_commit(commit_targets, '[Release] Update metadata', force=True) except YubikeyException as e: - abort('A problem occurred while signing metadata: {}'.format(e)) + abort(f'A problem occurred while signing metadata: {e}') # done echo_success('All done, remember to push to origin and open a PR to merge these changes on master') diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/commands/release/show/changes.py b/datadog_checks_dev/datadog_checks/dev/tooling/commands/release/show/changes.py index 900f4dfc91c71d..6d620f5536fc86 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/commands/release/show/changes.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/commands/release/show/changes.py @@ -17,7 +17,7 @@ def changes(ctx, check, dry_run): """Show all the pending PRs for a given check.""" if not dry_run and check and check not in get_valid_checks(): - abort('Check `{}` is not an Agent-based Integration'.format(check)) + abort(f'Check `{check}` is not an Agent-based Integration') # get the name of the current release tag cur_version = get_version_string(check) @@ -29,7 +29,7 @@ def changes(ctx, check, dry_run): # for each PR get the title, we'll use it to populate the changelog pr_numbers = parse_pr_numbers(diff_lines) if not dry_run: - echo_info('Found {} PRs merged since tag: {}'.format(len(pr_numbers), target_tag)) + echo_info(f'Found {len(pr_numbers)} PRs merged since tag: {target_tag}') user_config = ctx.obj if dry_run: @@ -39,14 +39,14 @@ def changes(ctx, check, dry_run): try: payload = get_pr(pr_num, user_config) except Exception as e: - echo_failure('Unable to fetch info for PR #{}: {}'.format(pr_num, e)) + echo_failure(f'Unable to fetch info for PR #{pr_num}: {e}') continue current_changelog_types = get_changelog_types(payload) if not current_changelog_types: - abort('No valid changelog labels found attached to PR #{}, please add one!'.format(pr_num)) + abort(f'No valid changelog labels found attached to PR #{pr_num}, please add one!') elif len(current_changelog_types) > 1: - abort('Multiple changelog labels found attached to PR #{}, please only use one!'.format(pr_num)) + abort(f'Multiple changelog labels found attached to PR #{pr_num}, please only use one!') current_changelog_type = current_changelog_types[0] if current_changelog_type != 'no-changelog': @@ -58,18 +58,18 @@ def changes(ctx, check, dry_run): try: payload = get_pr(pr_num, user_config) except Exception as e: - echo_failure('Unable to fetch info for PR #{}: {}'.format(pr_num, e)) + echo_failure(f'Unable to fetch info for PR #{pr_num}: {e}') continue changelog_types = get_changelog_types(payload) echo_success(payload.get('title')) - echo_info(' * Url: {}'.format(payload.get('html_url'))) + echo_info(f" * Url: {payload.get('html_url')}") echo_info(' * Changelog status: ', nl=False) if not changelog_types: echo_warning('WARNING! No changelog labels attached.\n') elif len(changelog_types) > 1: - echo_warning('WARNING! Too many changelog labels attached: {}\n'.format(', '.join(changelog_types))) + echo_warning(f"WARNING! Too many changelog labels attached: {', '.join(changelog_types)}\n") else: - echo_success('{}\n'.format(changelog_types[0])) + echo_success(f'{changelog_types[0]}\n') diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/commands/release/show/ready.py b/datadog_checks_dev/datadog_checks/dev/tooling/commands/release/show/ready.py index 574a64cbd3ce80..66c749b1e92042 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/commands/release/show/ready.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/commands/release/show/ready.py @@ -44,9 +44,7 @@ def ready(ctx, quiet): cached_prs[pr_num] = changelog_labels if not changelog_labels: - echo_warning( - 'PR #{} has no changelog label attached, please add one! Skipping...'.format(pr_num) - ) + echo_warning(f'PR #{pr_num} has no changelog label attached, please add one! Skipping...') continue if len(changelog_labels) > 1: @@ -59,7 +57,7 @@ def ready(ctx, quiet): if changelog_labels[0] != CHANGELOG_TYPE_NONE: shippable_prs += 1 except Exception as e: - echo_failure('Unable to fetch info for PR #{}: {}'.format(pr_num, e)) + echo_failure(f'Unable to fetch info for PR #{pr_num}: {e}') continue if shippable_prs: diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/commands/release/tag.py b/datadog_checks_dev/datadog_checks/dev/tooling/commands/release/tag.py index b98621593438ae..38b0d9dc521bb8 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/commands/release/tag.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/commands/release/tag.py @@ -28,7 +28,7 @@ def tag(check, version, push, dry_run): valid_checks = get_valid_checks() if not tagging_all and check not in valid_checks: - abort('Check `{}` is not an Agent-based Integration'.format(check)) + abort(f'Check `{check}` is not an Agent-based Integration') if tagging_all: if version: @@ -41,7 +41,7 @@ def tag(check, version, push, dry_run): tagged = False for check in checks: - echo_info('{}:'.format(check)) + echo_info(f'{check}:') # get the current version if not version: @@ -49,7 +49,7 @@ def tag(check, version, push, dry_run): # get the tag name release_tag = get_release_tag_string(check, version) - echo_waiting('Tagging HEAD with {}... '.format(release_tag), indent=True, nl=False) + echo_waiting(f'Tagging HEAD with {release_tag}... ', indent=True, nl=False) if dry_run: version = None @@ -61,7 +61,7 @@ def tag(check, version, push, dry_run): if result.code == 128 or 'already exists' in result.stderr: echo_warning('already exists') elif result.code != 0: - abort('\n{}{}'.format(result.stdout, result.stderr), code=result.code) + abort(f'\n{result.stdout}{result.stderr}', code=result.code) else: tagged = True echo_success('success!') diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/commands/release/testable.py b/datadog_checks_dev/datadog_checks/dev/tooling/commands/release/testable.py index 67812efefe72f7..123198a014efc9 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/commands/release/testable.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/commands/release/testable.py @@ -2,14 +2,12 @@ # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) import time -from collections import OrderedDict import click from semver import parse_version_info -from six import iteritems from ....subprocess import run_command -from ....utils import basepath, chdir, ensure_unicode, get_next +from ....utils import basepath, chdir, get_next from ...constants import CHANGELOG_LABEL_PREFIX, CHANGELOG_TYPE_NONE, get_root from ...github import get_pr, get_pr_from_hash, get_pr_labels, get_pr_milestone, parse_pr_number from ...trello import TrelloClient @@ -26,17 +24,17 @@ def validate_version(ctx, param, value): if len(parts) == 2: parts.append('0') version_info = parse_version_info('.'.join(parts)) - return '{}.{}'.format(version_info.major, version_info.minor) + return f'{version_info.major}.{version_info.minor}' except ValueError: raise click.BadParameter('needs to be in semver format x.y[.z]') def create_trello_card(client, teams, pr_title, pr_url, pr_body, dry_run): - body = u'Pull request: {}\n\n{}'.format(pr_url, pr_body) + body = f'Pull request: {pr_url}\n\n{pr_body}' for team in teams: if dry_run: - echo_success('Will create a card for team {}: '.format(team), nl=False) + echo_success(f'Will create a card for team {team}: ', nl=False) echo_info(pr_title) continue creation_attempts = 3 @@ -51,7 +49,7 @@ def create_trello_card(client, teams, pr_title, pr_url, pr_body, dry_run): time.sleep(wait_time) elif error: if attempt + 1 == creation_attempts: - echo_failure('Error: {}'.format(error)) + echo_failure(f'Error: {error}') break wait_time = 2 @@ -61,7 +59,7 @@ def create_trello_card(client, teams, pr_title, pr_url, pr_body, dry_run): ) time.sleep(wait_time) else: - echo_success('Created card for team {}: '.format(team), nl=False) + echo_success(f'Created card for team {team}: ', nl=False) echo_info(response.json().get('url')) break @@ -95,7 +93,7 @@ def testable(ctx, start_id, agent_version, milestone, dry_run): root = get_root() repo = basepath(root) if repo not in ('integrations-core', 'datadog-agent'): - abort('Repo `{}` is unsupported.'.format(repo)) + abort(f'Repo `{repo}` is unsupported.') if agent_version: current_agent_version = agent_version @@ -104,9 +102,9 @@ def testable(ctx, start_id, agent_version, milestone, dry_run): current_agent_version = get_current_agent_version() echo_success(current_agent_version) - current_release_branch = '{}.x'.format(current_agent_version) + current_release_branch = f'{current_agent_version}.x' diff_target_branch = 'origin/master' - echo_info('Branch `{}` will be compared to `{}`.'.format(current_release_branch, diff_target_branch)) + echo_info(f'Branch `{current_release_branch}` will be compared to `{diff_target_branch}`.') echo_waiting('Getting diff... ', nl=False) diff_command = 'git --no-pager log "--pretty=format:%H %s" {}..{}' @@ -115,7 +113,7 @@ def testable(ctx, start_id, agent_version, milestone, dry_run): fetch_command = 'git fetch --dry' result = run_command(fetch_command, capture=True) if result.code: - abort('Unable to run {}.'.format(fetch_command)) + abort(f'Unable to run {fetch_command}.') if current_release_branch in result.stderr or diff_target_branch in result.stderr: abort( @@ -125,11 +123,11 @@ def testable(ctx, start_id, agent_version, milestone, dry_run): ) # compare with the local tag first - reftag = '{}{}'.format('refs/tags/', current_release_branch) + reftag = f"{'refs/tags/'}{current_release_branch}" result = run_command(diff_command.format(reftag, diff_target_branch), capture=True) if result.code: # if it didn't work, compare with a branch. - origin_release_branch = 'origin/{}'.format(current_release_branch) + origin_release_branch = f'origin/{current_release_branch}' echo_failure('failed!') echo_waiting( 'Local branch `{}` might not exist, trying `{}`... '.format( @@ -151,26 +149,22 @@ def testable(ctx, start_id, agent_version, milestone, dry_run): num_changes = len(diff_data) if repo == 'integrations-core': - options = OrderedDict( - (('1', 'Integrations'), ('2', 'Containers'), ('3', 'Core'), ('4', 'Platform'), ('s', 'Skip'), ('q', 'Quit')) - ) + options = {'1': 'Integrations', '2': 'Containers', '3': 'Core', '4': 'Platform', 's': 'Skip', 'q': 'Quit'} else: - options = OrderedDict( - ( - ('1', 'Core'), - ('2', 'Containers'), - ('3', 'Logs'), - ('4', 'Platform'), - ('5', 'Process'), - ('6', 'Trace'), - ('7', 'Integrations'), - ('s', 'Skip'), - ('q', 'Quit'), - ) - ) + options = { + '1': 'Core', + '2': 'Containers', + '3': 'Logs', + '4': 'Platform', + '5': 'Process', + '6': 'Trace', + '7': 'Integrations', + 's': 'Skip', + 'q': 'Quit', + } default_option = get_next(options) - options_prompt = 'Choose an option (default {}): '.format(options[default_option]) - options_text = '\n' + '\n'.join('{} - {}'.format(key, value) for key, value in iteritems(options)) + options_prompt = f'Choose an option (default {options[default_option]}): ' + options_text = '\n' + '\n'.join('{} - {}'.format(key, value) for key, value in options.items()) commit_ids = set() user_config = ctx.obj @@ -190,7 +184,7 @@ def testable(ctx, start_id, agent_version, milestone, dry_run): ) continue elif api_response.status_code == 404: - echo_info('Skipping #{}, not a pull request...'.format(commit_id)) + echo_info(f'Skipping #{commit_id}, not a pull request...') continue api_response.raise_for_status() @@ -214,12 +208,12 @@ def testable(ctx, start_id, agent_version, milestone, dry_run): except IndexError: pr_data = { 'number': commit_hash, - 'html_url': 'https://github.com/DataDog/{}/commit/{}'.format(repo, commit_hash), + 'html_url': f'https://github.com/DataDog/{repo}/commit/{commit_hash}', } commit_id = str(pr_data.get('number', '')) if commit_id and commit_id in commit_ids: - echo_info('Already seen PR #{}, skipping it.'.format(commit_id)) + echo_info(f'Already seen PR #{commit_id}, skipping it.') continue commit_ids.add(commit_id) @@ -227,9 +221,7 @@ def testable(ctx, start_id, agent_version, milestone, dry_run): if start_id == commit_id or start_id == commit_hash: found_start_id = True else: - echo_info( - 'Looking for {}, skipping {}.'.format(format_commit_id(start_id), format_commit_id(commit_id)) - ) + echo_info(f'Looking for {format_commit_id(start_id)}, skipping {format_commit_id(commit_id)}.') continue pr_labels = sorted(get_pr_labels(pr_data)) @@ -243,15 +235,15 @@ def testable(ctx, start_id, agent_version, milestone, dry_run): nochangelog_pr = False if documentation_pr and nochangelog_pr: - echo_info('Skipping documentation {}.'.format(format_commit_id(commit_id))) + echo_info(f'Skipping documentation {format_commit_id(commit_id)}.') continue pr_milestone = get_pr_milestone(pr_data) if milestone and pr_milestone != milestone: - echo_info('Looking for milestone {}, skipping {}.'.format(milestone, format_commit_id(commit_id))) + echo_info(f'Looking for milestone {milestone}, skipping {format_commit_id(commit_id)}.') continue - pr_url = pr_data.get('html_url', 'https://github.com/DataDog/{}/pull/{}'.format(repo, commit_id)) + pr_url = pr_data.get('html_url', f'https://github.com/DataDog/{repo}/pull/{commit_id}') pr_title = pr_data.get('title', commit_subject) pr_author = pr_data.get('user', {}).get('login', '') pr_body = pr_data.get('body', '') @@ -263,11 +255,11 @@ def testable(ctx, start_id, agent_version, milestone, dry_run): finished = False choice_error = '' - progress_status = '({} of {}) '.format(i, num_changes) + progress_status = f'({i} of {num_changes}) ' indent = ' ' * len(progress_status) while not finished: - echo_success('\n{}{}'.format(progress_status, pr_title)) + echo_success(f'\n{progress_status}{pr_title}') echo_success('Url: ', nl=False, indent=indent) echo_info(pr_url) @@ -296,17 +288,13 @@ def testable(ctx, start_id, agent_version, milestone, dry_run): choice = '\x00' while choice == '\x00': choice = click.getchar().strip() - try: - choice = ensure_unicode(choice) - except UnicodeDecodeError: - choice = repr(choice) if not choice: choice = default_option if choice not in options: echo_info(choice) - choice_error = u'`{}` is not a valid option.'.format(choice) + choice_error = f'`{choice}` is not a valid option.' continue else: choice_error = '' @@ -315,10 +303,10 @@ def testable(ctx, start_id, agent_version, milestone, dry_run): echo_info(value) if value == 'Skip': - echo_info('Skipped {}'.format(format_commit_id(commit_id))) + echo_info(f'Skipped {format_commit_id(commit_id)}') break elif value == 'Quit': - echo_warning('Exited at {}'.format(format_commit_id(commit_id))) + echo_warning(f'Exited at {format_commit_id(commit_id)}') return else: create_trello_card(trello, [value], pr_title, pr_url, pr_body, dry_run) diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/commands/release/upload.py b/datadog_checks_dev/datadog_checks/dev/tooling/commands/release/upload.py index 9add18f62965c1..2e05ffe0610d61 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/commands/release/upload.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/commands/release/upload.py @@ -25,7 +25,7 @@ def upload(ctx, check, sdist, dry_run): else: check_dir = resolve_path(check) if not dir_exists(check_dir): - abort('`{}` is not an Agent-based Integration or Python package'.format(check)) + abort(f'`{check}` is not an Agent-based Integration or Python package') check = basepath(check_dir) @@ -37,7 +37,7 @@ def upload(ctx, check, sdist, dry_run): abort('This requires pypi.user and pypi.pass configuration. Please see `ddev config -h`.') auth_env_vars = {'TWINE_USERNAME': username, 'TWINE_PASSWORD': password} - echo_waiting('Building and publishing `{}` to PyPI...'.format(check)) + echo_waiting(f'Building and publishing `{check}` to PyPI...') with chdir(check_dir, env_vars=auth_env_vars): result = build_package(check_dir, sdist) @@ -45,7 +45,7 @@ def upload(ctx, check, sdist, dry_run): abort(result.stdout, result.code) echo_waiting('Uploading the package...') if not dry_run: - result = run_command('twine upload --skip-existing dist{}*'.format(os.path.sep)) + result = run_command(f'twine upload --skip-existing dist{os.path.sep}*') if result.code != 0: abort(code=result.code) diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/commands/test.py b/datadog_checks_dev/datadog_checks/dev/tooling/commands/test.py index c99ce66d955c3e..78c57393891c81 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/commands/test.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/commands/test.py @@ -16,9 +16,9 @@ def display_envs(check_envs): for check, envs in check_envs: - echo_success('`{}`:'.format(check)) + echo_success(f'`{check}`:') for e in envs: - echo_info(' {}'.format(e)) + echo_info(f' {e}') @click.command(context_settings=CONTEXT_SETTINGS, short_help='Run tests') @@ -106,16 +106,16 @@ def test( } if passenv: - test_env_vars['TOX_TESTENV_PASSENV'] += ' {}'.format(passenv) + test_env_vars['TOX_TESTENV_PASSENV'] += f' {passenv}' - test_env_vars['TOX_TESTENV_PASSENV'] += ' {}'.format(' '.join(get_ci_env_vars())) + test_env_vars['TOX_TESTENV_PASSENV'] += f" {' '.join(get_ci_env_vars())}" if color is not None: test_env_vars['PY_COLORS'] = '1' if color else '0' if e2e: test_env_vars[E2E_PARENT_PYTHON] = sys.executable - test_env_vars['TOX_TESTENV_PASSENV'] += ' {}'.format(E2E_PARENT_PYTHON) + test_env_vars['TOX_TESTENV_PASSENV'] += f' {E2E_PARENT_PYTHON}' check_envs = get_tox_envs(checks, style=style, format_style=format_style, benchmark=bench, changed_only=changed) tests_ran = False @@ -153,7 +153,7 @@ def test( test_env_vars['PYTEST_ADDOPTS'] = pytest_options if verbose: - echo_info('pytest options: `{}`'.format(test_env_vars['PYTEST_ADDOPTS'])) + echo_info(f"pytest options: `{test_env_vars['PYTEST_ADDOPTS']}`") with chdir(os.path.join(root, check), env_vars=test_env_vars): if format_style: @@ -167,7 +167,7 @@ def test( else: test_type_display = 'tests' - wait_text = '{}Running {} for `{}`'.format(output_separator, test_type_display, check) + wait_text = f'{output_separator}Running {test_type_display} for `{check}`' echo_waiting(wait_text) echo_waiting('-' * len(wait_text)) diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/commands/validate/agent_reqs.py b/datadog_checks_dev/datadog_checks/dev/tooling/commands/validate/agent_reqs.py index f4199dc9d2d464..2ea569ce208bab 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/commands/validate/agent_reqs.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/commands/validate/agent_reqs.py @@ -30,17 +30,17 @@ def agent_reqs(): pinned_version = agent_reqs_content.get(package_name) if package_name not in agent_reqs_content: unreleased_checks += 1 - echo_warning('{} has not yet been released'.format(check_name)) + echo_warning(f'{check_name} has not yet been released') elif check_version != pinned_version: failed_checks += 1 - echo_failure("{} has version {} but is pinned to {}".format(check_name, check_version, pinned_version)) + echo_failure(f"{check_name} has version {check_version} but is pinned to {pinned_version}") else: ok_checks += 1 if ok_checks: - echo_success("{} correctly pinned checks".format(ok_checks)) + echo_success(f"{ok_checks} correctly pinned checks") if unreleased_checks: - echo_warning("{} unreleased checks".format(unreleased_checks)) + echo_warning(f"{unreleased_checks} unreleased checks") if failed_checks: - echo_failure("{} checks out of sync".format(failed_checks)) + echo_failure(f"{failed_checks} checks out of sync") abort() diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/commands/validate/config.py b/datadog_checks_dev/datadog_checks/dev/tooling/commands/validate/config.py index 210c7da644bc60..a6bcbaf0cb918b 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/commands/validate/config.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/commands/validate/config.py @@ -3,7 +3,6 @@ # Licensed under a 3-clause BSD style license (see LICENSE) import click import yaml -from six import PY2 from datadog_checks.dev.tooling.config_validator.validator import validate_config from datadog_checks.dev.tooling.config_validator.validator_errors import SEVERITY_ERROR, SEVERITY_WARNING @@ -55,12 +54,6 @@ def config(ctx, check, sync): display() continue - # Just use six to make it easier to search for occurrences of text we need to remove when we drop Python 2 - if PY2: - check_display_queue.append( - lambda **kwargs: echo_failure('Dictionary key order is only guaranteed in Python 3.7.0+', **kwargs) - ) - file_counter.append(None) # source is the default file name @@ -85,7 +78,7 @@ def config(ctx, check, sync): files_failed[spec_path] = True check_display_queue.append( lambda **kwargs: echo_failure( - 'Spec name `{}` should be `{}`'.format(spec.data['name'], display_name), **kwargs + f"Spec name `{spec.data['name']}` should be `{display_name}`", **kwargs ) ) @@ -106,12 +99,12 @@ def config(ctx, check, sync): files_failed[example_file_path] = True check_display_queue.append( lambda example_file=example_file, **kwargs: echo_failure( - 'File `{}` needs to be synced'.format(example_file), **kwargs + f'File `{example_file}` needs to be synced', **kwargs ) ) if check_display_queue: - echo_info('{}:'.format(check)) + echo_info(f'{check}:') for display in check_display_queue: display(indent=True) @@ -124,16 +117,16 @@ def config(ctx, check, sync): click.echo() if files_failed: - echo_failure('Files with errors: {}'.format(files_failed)) + echo_failure(f'Files with errors: {files_failed}') if files_warned: - echo_warning('Files with warnings: {}'.format(files_warned)) + echo_warning(f'Files with warnings: {files_warned}') if files_passed: if files_failed or files_warned: - echo_success('Files valid: {}'.format(files_passed)) + echo_success(f'Files valid: {files_passed}') else: - echo_success('All {} configuration files are valid!'.format(num_files)) + echo_success(f'All {num_files} configuration files are valid!') if files_failed: abort() @@ -154,7 +147,7 @@ def validate_config_legacy(check, check_display_queue, files_failed, files_warne # We must convert to text here to free Exception object before it goes out of scope error = str(e) - check_display_queue.append(lambda: echo_info('{}:'.format(file_name), indent=True)) + check_display_queue.append(lambda: echo_info(f'{file_name}:', indent=True)) check_display_queue.append(lambda: echo_failure('Invalid YAML -', indent=FILE_INDENT)) check_display_queue.append(lambda: echo_info(error, indent=FILE_INDENT * 2)) continue @@ -184,5 +177,5 @@ def validate_config_legacy(check, check_display_queue, files_failed, files_warne file_display_queue.append(lambda: echo_failure('No default instance', indent=FILE_INDENT)) if file_display_queue: - check_display_queue.append(lambda x=file_name: echo_info('{}:'.format(x), indent=True)) + check_display_queue.append(lambda x=file_name: echo_info(f'{x}:', indent=True)) check_display_queue.extend(file_display_queue) diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/commands/validate/dashboards.py b/datadog_checks_dev/datadog_checks/dev/tooling/commands/validate/dashboards.py index efe054cacf3f52..2138dbfb754640 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/commands/validate/dashboards.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/commands/validate/dashboards.py @@ -3,11 +3,9 @@ # Licensed under a 3-clause BSD style license (see LICENSE) import json import os -from collections import OrderedDict import click -from ....compat import JSONDecodeError from ....utils import file_exists, read_file from ...constants import get_root from ...utils import get_valid_integrations, load_manifest @@ -34,19 +32,19 @@ def dashboards(): dashboard_file = os.path.join(root, check_name, *dashboard_relative_location.split('/')) if not file_exists(dashboard_file): - echo_info('{}... '.format(check_name), nl=False) + echo_info(f'{check_name}... ', nl=False) echo_info(' FAILED') - echo_failure(' {} does not exist'.format(dashboard_file)) + echo_failure(f' {dashboard_file} does not exist') failed_checks += 1 continue try: - decoded = json.loads(read_file(dashboard_file).strip(), object_pairs_hook=OrderedDict) - except JSONDecodeError as e: + decoded = json.loads(read_file(dashboard_file).strip()) + except json.JSONDecodeError as e: failed_checks += 1 - echo_info('{}... '.format(check_name), nl=False) + echo_info(f'{check_name}... ', nl=False) echo_failure(' FAILED') - echo_failure(' invalid json: {}'.format(e)) + echo_failure(f' invalid json: {e}') continue # Confirm the dashboard payload comes from the old API for now @@ -65,7 +63,7 @@ def dashboards(): if file_failed: failed_checks += 1 # Display detailed info if file is invalid - echo_info('{}... '.format(check_name), nl=False) + echo_info(f'{check_name}... ', nl=False) echo_failure(' FAILED'.format(check_name)) for display_func, message in display_queue: display_func(message) @@ -73,7 +71,7 @@ def dashboards(): ok_checks += 1 if ok_checks: - echo_success("{} valid files".format(ok_checks)) + echo_success(f"{ok_checks} valid files") if failed_checks: - echo_failure("{} invalid files".format(failed_checks)) + echo_failure(f"{failed_checks} invalid files") abort() diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/commands/validate/dep.py b/datadog_checks_dev/datadog_checks/dev/tooling/commands/validate/dep.py index c543ea0510c2de..17e92658fdd421 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/commands/validate/dep.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/commands/validate/dep.py @@ -4,7 +4,6 @@ import os import click -from six import iteritems from ...constants import get_agent_requirements, get_root from ...requirements import make_catalog, read_packages @@ -13,11 +12,11 @@ def display_multiple_attributes(attributes, message): echo_failure(message) - for attribute, checks in sorted(iteritems(attributes)): + for attribute, checks in sorted(attributes.items()): if len(checks) == 1: - echo_info(' {}: {}'.format(attribute, checks[0])) + echo_info(f' {attribute}: {checks[0]}') elif len(checks) == 2: - echo_info(' {}: {} and {}'.format(attribute, checks[0], checks[1])) + echo_info(f' {attribute}: {checks[0]} and {checks[1]}') else: remaining = len(checks) - 2 echo_info( @@ -58,7 +57,7 @@ def dep(): failed = True have_multiple_versions.add(package.name) - display_multiple_attributes(versions, 'Multiple versions found for package `{}`:'.format(package.name)) + display_multiple_attributes(versions, f'Multiple versions found for package `{package.name}`:') markers = catalog.get_package_markers(package) if len(markers) > 1: @@ -68,7 +67,7 @@ def dep(): failed = True have_multiple_markers.add(package.name) - display_multiple_attributes(markers, 'Multiple markers found for package `{}`:'.format(package)) + display_multiple_attributes(markers, f'Multiple markers found for package `{package}`:') # Check embedded env compatibility agent_req_file = get_agent_requirements() @@ -89,8 +88,8 @@ def dep(): package.name, check_name ) ) - echo_info(' have: {}'.format(embedded_deps[package.name])) - echo_info(' want: {}'.format(package)) + echo_info(f' have: {embedded_deps[package.name]}') + echo_info(f' want: {package}') if failed: abort() diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/commands/validate/manifest.py b/datadog_checks_dev/datadog_checks/dev/tooling/commands/validate/manifest.py index 71b1ebb689c665..e88b02e783e4f9 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/commands/validate/manifest.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/commands/validate/manifest.py @@ -5,12 +5,9 @@ import os import re import uuid -from collections import OrderedDict import click -from six import string_types -from ....compat import JSONDecodeError from ....utils import file_exists, read_file, write_file from ...constants import get_root from ...utils import parse_version_parts @@ -76,44 +73,44 @@ def manifest(ctx, fix, include_extras): file_fixed = False try: - decoded = json.loads(read_file(manifest_file).strip(), object_pairs_hook=OrderedDict) - except JSONDecodeError as e: + decoded = json.loads(read_file(manifest_file).strip()) + except json.JSONDecodeError as e: failed_checks += 1 - echo_info("{}/manifest.json... ".format(check_name), nl=False) + echo_info(f"{check_name}/manifest.json... ", nl=False) echo_failure("FAILED") - echo_failure(' invalid json: {}'.format(e)) + echo_failure(f' invalid json: {e}') continue # attributes are valid attrs = set(decoded) for attr in sorted(attrs - ALL_ATTRIBUTES): file_failures += 1 - display_queue.append((echo_failure, ' Attribute `{}` is invalid'.format(attr))) + display_queue.append((echo_failure, f' Attribute `{attr}` is invalid')) for attr in sorted(REQUIRED_ATTRIBUTES - attrs): file_failures += 1 - display_queue.append((echo_failure, ' Attribute `{}` is required'.format(attr))) + display_queue.append((echo_failure, f' Attribute `{attr}` is required')) for attr in sorted(REQUIRED_ASSET_ATTRIBUTES - set(decoded.get('assets', {}))): file_failures += 1 - display_queue.append((echo_failure, ' Attribute `{}` under `assets` is required'.format(attr))) + display_queue.append((echo_failure, f' Attribute `{attr}` under `assets` is required')) # guid guid = decoded.get('guid') if guid in all_guids: file_failures += 1 - output = ' duplicate `guid`: `{}` from `{}`'.format(guid, all_guids[guid]) + output = f' duplicate `guid`: `{guid}` from `{all_guids[guid]}`' if fix: new_guid = uuid.uuid4() all_guids[new_guid] = check_name decoded['guid'] = new_guid display_queue.append((echo_warning, output)) - display_queue.append((echo_success, ' new `guid`: {}'.format(new_guid))) + display_queue.append((echo_success, f' new `guid`: {new_guid}')) file_failures -= 1 file_fixed = True else: display_queue.append((echo_failure, output)) - elif not guid or not isinstance(guid, string_types): + elif not guid or not isinstance(guid, str): file_failures += 1 output = ' required non-null string: guid' if fix: @@ -122,7 +119,7 @@ def manifest(ctx, fix, include_extras): decoded['guid'] = new_guid display_queue.append((echo_warning, output)) - display_queue.append((echo_success, ' new `guid`: {}'.format(new_guid))) + display_queue.append((echo_success, f' new `guid`: {new_guid}')) file_failures -= 1 file_fixed = True @@ -141,16 +138,14 @@ def manifest(ctx, fix, include_extras): if not manifest_version: output = ' required non-null string: manifest_version' else: - output = ' invalid `manifest_version`: {}'.format(manifest_version) + output = f' invalid `manifest_version`: {manifest_version}' if fix: version_parts = parse_version_parts(correct_manifest_version) decoded['manifest_version'] = correct_manifest_version display_queue.append((echo_warning, output)) - display_queue.append( - (echo_success, ' new `manifest_version`: {}'.format(correct_manifest_version)) - ) + display_queue.append((echo_success, f' new `manifest_version`: {correct_manifest_version}')) file_failures -= 1 file_fixed = True @@ -179,15 +174,13 @@ def manifest(ctx, fix, include_extras): elif about_exists: file_failures += 1 - output = ' outdated `manifest_version`: {}'.format(manifest_version) + output = f' outdated `manifest_version`: {manifest_version}' if fix: decoded['manifest_version'] = correct_manifest_version display_queue.append((echo_warning, output)) - display_queue.append( - (echo_success, ' new `manifest_version`: {}'.format(correct_manifest_version)) - ) + display_queue.append((echo_success, f' new `manifest_version`: {correct_manifest_version}')) if 'version' in decoded: del decoded['version'] @@ -206,7 +199,7 @@ def manifest(ctx, fix, include_extras): if not version: display_queue.append((echo_failure, ' required non-null string: version')) else: - display_queue.append((echo_failure, ' invalid `version`: {}'.format(version))) + display_queue.append((echo_failure, f' invalid `version`: {version}')) # integration_id integration_id = decoded.get('integration_id') @@ -221,13 +214,13 @@ def manifest(ctx, fix, include_extras): maintainer = decoded.get('maintainer') if maintainer != correct_maintainer: file_failures += 1 - output = ' incorrect `maintainer`: {}'.format(maintainer) + output = f' incorrect `maintainer`: {maintainer}' if fix: decoded['maintainer'] = correct_maintainer display_queue.append((echo_warning, output)) - display_queue.append((echo_success, ' new `maintainer`: {}'.format(correct_maintainer))) + display_queue.append((echo_success, f' new `maintainer`: {correct_maintainer}')) file_failures -= 1 file_fixed = True @@ -237,15 +230,15 @@ def manifest(ctx, fix, include_extras): # name correct_name = check_name name = decoded.get('name') - if not isinstance(name, string_types) or name.lower() != correct_name.lower(): + if not isinstance(name, str) or name.lower() != correct_name.lower(): file_failures += 1 - output = ' incorrect `name`: {}'.format(name) + output = f' incorrect `name`: {name}' if fix: decoded['name'] = correct_name display_queue.append((echo_warning, output)) - display_queue.append((echo_success, ' new `name`: {}'.format(correct_name))) + display_queue.append((echo_success, f' new `name`: {correct_name}')) file_failures -= 1 file_fixed = True @@ -254,7 +247,7 @@ def manifest(ctx, fix, include_extras): # short_description short_description = decoded.get('short_description') - if not short_description or not isinstance(short_description, string_types): + if not short_description or not isinstance(short_description, str): file_failures += 1 display_queue.append((echo_failure, ' required non-null string: short_description')) if len(short_description) > 80: @@ -266,13 +259,13 @@ def manifest(ctx, fix, include_extras): support = decoded.get('support') if support != correct_support: file_failures += 1 - output = ' incorrect `support`: {}'.format(support) + output = f' incorrect `support`: {support}' if fix: decoded['support'] = correct_support display_queue.append((echo_warning, output)) - display_queue.append((echo_success, ' new `support`: {}'.format(correct_support))) + display_queue.append((echo_success, f' new `support`: {correct_support}')) file_failures -= 1 file_fixed = True @@ -290,13 +283,11 @@ def manifest(ctx, fix, include_extras): unknown_systems = sorted(set(supported_os) - known_systems) if unknown_systems: file_failures += 1 - display_queue.append( - (echo_failure, ' unknown `supported_os`: {}'.format(', '.join(unknown_systems))) - ) + display_queue.append((echo_failure, f" unknown `supported_os`: {', '.join(unknown_systems)}")) # public_title public_title = decoded.get('public_title') - if not public_title or not isinstance(public_title, string_types): + if not public_title or not isinstance(public_title, str): file_failures += 1 display_queue.append((echo_failure, ' required non-null string: public_title')) else: @@ -312,7 +303,7 @@ def manifest(ctx, fix, include_extras): if not (correct_start and correct_end and overlap_enough): file_failures += 1 - display_queue.append((echo_failure, ' invalid `public_title`: {}'.format(public_title))) + display_queue.append((echo_failure, f' invalid `public_title`: {public_title}')) # categories categories = decoded.get('categories') @@ -323,13 +314,13 @@ def manifest(ctx, fix, include_extras): # type correct_integration_types = ['check', 'crawler'] integration_type = decoded.get('type') - if not integration_type or not isinstance(integration_type, string_types): + if not integration_type or not isinstance(integration_type, str): file_failures += 1 output = ' required non-null string: type' display_queue.append((echo_failure, output)) elif integration_type not in correct_integration_types: file_failures += 1 - output = ' invalid `type`: {}'.format(integration_type) + output = f' invalid `type`: {integration_type}' display_queue.append((echo_failure, output)) # is_public @@ -343,7 +334,7 @@ def manifest(ctx, fix, include_extras): decoded['is_public'] = correct_is_public display_queue.append((echo_warning, output)) - display_queue.append((echo_success, ' new `is_public`: {}'.format(correct_is_public))) + display_queue.append((echo_success, f' new `is_public`: {correct_is_public}')) file_failures -= 1 file_fixed = True @@ -353,7 +344,7 @@ def manifest(ctx, fix, include_extras): if file_failures > 0: failed_checks += 1 # Display detailed info if file invalid - echo_info("{}/manifest.json... ".format(check_name), nl=False) + echo_info(f"{check_name}/manifest.json... ", nl=False) echo_failure("FAILED") for display_func, message in display_queue: display_func(message) @@ -361,20 +352,20 @@ def manifest(ctx, fix, include_extras): ok_checks += 1 if fix and file_fixed: - new_manifest = '{}\n'.format(json.dumps(decoded, indent=2, separators=(',', ': '))) + new_manifest = f"{json.dumps(decoded, indent=2, separators=(',', ': '))}\n" write_file(manifest_file, new_manifest) # Display detailed info if file has been completely fixed if file_failures == 0: fixed_checks += 1 - echo_info("{}/manifest.json... ".format(check_name), nl=False) + echo_info(f"{check_name}/manifest.json... ", nl=False) echo_success("FIXED") for display_func, message in display_queue: display_func(message) if ok_checks: - echo_success("{} valid files".format(ok_checks)) + echo_success(f"{ok_checks} valid files") if fixed_checks: - echo_info("{} fixed files".format(fixed_checks)) + echo_info(f"{fixed_checks} fixed files") if failed_checks: - echo_failure("{} invalid files".format(failed_checks)) + echo_failure(f"{failed_checks} invalid files") abort() diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/commands/validate/metadata.py b/datadog_checks_dev/datadog_checks/dev/tooling/commands/validate/metadata.py index c48eb102d2679b..262d0cc8574004 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/commands/validate/metadata.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/commands/validate/metadata.py @@ -7,7 +7,6 @@ from io import open import click -from six import PY2, iteritems, string_types from ...utils import get_metadata_file, get_metric_sources, load_manifest from ..console import CONTEXT_SETTINGS, abort, echo_failure, echo_warning @@ -178,7 +177,7 @@ def normalize_metric_name(metric_name): Function not exported as a util, as this is different than AgentCheck.normalize. This function just makes sure that whatever is in the metadata.csv is understandable by the backend. """ - if not isinstance(metric_name, string_types): + if not isinstance(metric_name, str): metric_name = str(metric_name) metric_name = METRIC_REPLACEMENT.sub("_", metric_name) return METRIC_DOTUNDERSCORE_CLEANUP.sub(".", metric_name).strip("_") @@ -196,7 +195,7 @@ def metadata(check): if check: if check not in metric_sources: - abort('Metadata file `{}` does not exist.'.format(get_metadata_file(check))) + abort(f'Metadata file `{get_metadata_file(check)}` does not exist.') metric_sources = [check] else: metric_sources = sorted(metric_sources) @@ -223,42 +222,31 @@ def metadata(check): duplicate_set = set() metric_prefix_error_shown = False - # Python 2 csv module does not support unicode - with open(metadata_file, 'rb' if PY2 else 'r', encoding=None if PY2 else 'utf-8') as f: + with open(metadata_file, 'r', encoding='utf-8') as f: reader = csv.DictReader(f, delimiter=',') # Read header - if PY2: - reader._fieldnames = [key.decode('utf-8') for key in reader.fieldnames] - else: - reader._fieldnames = reader.fieldnames + reader._fieldnames = reader.fieldnames for line, row in enumerate(reader, 2): # Number of rows is correct. Since metric is first in the list, should be safe to access if len(row) != len(ALL_HEADERS): errors = True - echo_failure( - '{}:{} {} Has the wrong amount of columns'.format(current_check, line, row['metric_name']) - ) + echo_failure(f"{current_check}:{line} {row['metric_name']} Has the wrong amount of columns") continue - if PY2: - for key, value in iteritems(row): - if value is not None: - row[key] = value.decode('utf-8') - # all headers exist, no invalid headers all_keys = set(row) if all_keys != ALL_HEADERS: invalid_headers = all_keys.difference(ALL_HEADERS) if invalid_headers: errors = True - echo_failure('{}:{} Invalid column {}'.format(current_check, line, invalid_headers)) + echo_failure(f'{current_check}:{line} Invalid column {invalid_headers}') missing_headers = ALL_HEADERS.difference(all_keys) if missing_headers: errors = True - echo_failure('{}:{} Missing columns {}'.format(current_check, line, missing_headers)) + echo_failure(f'{current_check}:{line} Missing columns {missing_headers}') continue @@ -267,9 +255,7 @@ def metadata(check): duplicate_set.add(row['metric_name']) else: errors = True - echo_failure( - '{}:{} `{}` is a duplicate metric_name'.format(current_check, line, row['metric_name']) - ) + echo_failure(f"{current_check}:{line} `{row['metric_name']}` is a duplicate metric_name") normalized_metric_name = normalize_metric_name(row['metric_name']) if row['metric_name'] != normalized_metric_name: @@ -289,26 +275,22 @@ def metadata(check): errors = True if not metric_prefix_error_shown and current_check not in PROVIDER_INTEGRATIONS: metric_prefix_error_shown = True - echo_failure('{}:{} metric_prefix does not exist in manifest'.format(current_check, line)) + echo_failure(f'{current_check}:{line} metric_prefix does not exist in manifest') # metric_type header if row['metric_type'] and row['metric_type'] not in VALID_METRIC_TYPE: errors = True - echo_failure( - '{}:{} `{}` is an invalid metric_type.'.format(current_check, line, row['metric_type']) - ) + echo_failure(f"{current_check}:{line} `{row['metric_type']}` is an invalid metric_type.") # unit_name header if row['unit_name'] and row['unit_name'] not in VALID_UNIT_NAMES: errors = True - echo_failure('{}:{} `{}` is an invalid unit_name.'.format(current_check, line, row['unit_name'])) + echo_failure(f"{current_check}:{line} `{row['unit_name']}` is an invalid unit_name.") # orientation header if row['orientation'] and row['orientation'] not in VALID_ORIENTATION: errors = True - echo_failure( - '{}:{} `{}` is an invalid orientation.'.format(current_check, line, row['orientation']) - ) + echo_failure(f"{current_check}:{line} `{row['orientation']}` is an invalid orientation.") # empty required fields for header in REQUIRED_HEADERS: @@ -330,14 +312,14 @@ def metadata(check): errors = True echo_failure('{}: interval should be an int, found "{}"'.format(current_check, row['interval'])) - for header, count in iteritems(empty_count): + for header, count in empty_count.items(): errors = True - echo_failure('{}: {} is empty in {} rows.'.format(current_check, header, count)) + echo_failure(f'{current_check}: {header} is empty in {count} rows.') - for header, count in iteritems(empty_warning_count): - echo_warning('{}: {} is empty in {} rows.'.format(current_check, header, count)) + for header, count in empty_warning_count.items(): + echo_warning(f'{current_check}: {header} is empty in {count} rows.') - for prefix, count in iteritems(metric_prefix_count): + for prefix, count in metric_prefix_count.items(): # Don't spam this warning when we're validating everything if check: echo_warning( diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/commands/validate/service_checks.py b/datadog_checks_dev/datadog_checks/dev/tooling/commands/validate/service_checks.py index 83ccdabf94034b..0fbd07b1a5ac54 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/commands/validate/service_checks.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/commands/validate/service_checks.py @@ -3,12 +3,9 @@ # Licensed under a 3-clause BSD style license (see LICENSE) import json import os -from collections import OrderedDict import click -from six import string_types -from ....compat import JSONDecodeError from ....utils import file_exists, read_file from ...constants import get_root from ...utils import get_valid_integrations, load_manifest, parse_version_parts @@ -33,19 +30,19 @@ def service_checks(): service_checks_file = os.path.join(root, check_name, *service_check_relative.split('/')) if not file_exists(service_checks_file): - echo_info('{}/service_checks.json... '.format(check_name), nl=False) + echo_info(f'{check_name}/service_checks.json... ', nl=False) echo_failure('FAILED') echo_failure(' service_checks.json file does not exist') failed_checks += 1 continue try: - decoded = json.loads(read_file(service_checks_file).strip(), object_pairs_hook=OrderedDict) - except JSONDecodeError as e: + decoded = json.loads(read_file(service_checks_file).strip()) + except json.JSONDecodeError as e: failed_checks += 1 - echo_info('{}/service_checks.json... '.format(check_name), nl=False) + echo_info(f'{check_name}/service_checks.json... ', nl=False) echo_failure('FAILED') - echo_failure(' invalid json: {}'.format(e)) + echo_failure(f' invalid json: {e}') continue unique_names = set() @@ -55,10 +52,10 @@ def service_checks(): attrs = set(service_check) for attr in sorted(attrs - REQUIRED_ATTRIBUTES): file_failed = True - display_queue.append((echo_failure, ' Attribute `{}` is invalid'.format(attr))) + display_queue.append((echo_failure, f' Attribute `{attr}` is invalid')) for attr in sorted(REQUIRED_ATTRIBUTES - attrs): file_failed = True - display_queue.append((echo_failure, ' Attribute `{}` is required'.format(attr))) + display_queue.append((echo_failure, f' Attribute `{attr}` is required')) # agent_version agent_version = service_check.get('agent_version') @@ -69,25 +66,25 @@ def service_checks(): if not agent_version: output = ' required non-null string: agent_version' else: - output = ' invalid `agent_version`: {}'.format(agent_version) + output = f' invalid `agent_version`: {agent_version}' display_queue.append((echo_failure, output)) # check check = service_check.get('check') - if not check or not isinstance(check, string_types): + if not check or not isinstance(check, str): file_failed = True display_queue.append((echo_failure, ' required non-null string: check')) else: if check in unique_checks: file_failed = True - display_queue.append((echo_failure, ' {} is not a unique check'.format(check))) + display_queue.append((echo_failure, f' {check} is not a unique check')) else: unique_checks.add(check) # description description = service_check.get('description') - if not description or not isinstance(description, string_types): + if not description or not isinstance(description, str): file_failed = True display_queue.append((echo_failure, ' required non-null string: description')) @@ -99,19 +96,19 @@ def service_checks(): # integration integration = service_check.get('integration') - if integration is None or not isinstance(integration, string_types): + if integration is None or not isinstance(integration, str): file_failed = True display_queue.append((echo_failure, ' required non-null string: integration')) # name name = service_check.get('name') - if not name or not isinstance(name, string_types): + if not name or not isinstance(name, str): file_failed = True display_queue.append((echo_failure, ' required non-null string: name')) else: if name in unique_names: file_failed = True - display_queue.append((echo_failure, ' {} is not a unique name'.format(name))) + display_queue.append((echo_failure, f' {name} is not a unique name')) else: unique_names.add(name) @@ -124,7 +121,7 @@ def service_checks(): if file_failed: failed_checks += 1 # Display detailed info if file invalid - echo_info("{}/service_checks.json... ".format(check_name), nl=False) + echo_info(f"{check_name}/service_checks.json... ", nl=False) echo_failure("FAILED") for display_func, message in display_queue: display_func(message) @@ -132,7 +129,7 @@ def service_checks(): ok_checks += 1 if ok_checks: - echo_success("{} valid files".format(ok_checks)) + echo_success(f"{ok_checks} valid files") if failed_checks: - echo_failure("{} invalid files".format(failed_checks)) + echo_failure(f"{failed_checks} invalid files") abort() diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/config.py b/datadog_checks_dev/datadog_checks/dev/tooling/config.py index bc18f1d49192f3..0917125ee63776 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/config.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/config.py @@ -2,15 +2,13 @@ # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) import os -from collections import OrderedDict, deque +from collections import deque from copy import deepcopy import toml from appdirs import user_data_dir from atomicwrites import atomic_write -from six import string_types -from ..compat import FileNotFoundError from ..utils import ensure_parent_dir_exists, file_exists, read_file APP_DIR = user_data_dir('dd-checks-dev', '') @@ -27,40 +25,28 @@ 'trello.token', } -DEFAULT_CONFIG = OrderedDict( - [ - ('core', os.path.join('~', 'dd', 'integrations-core')), - ('extras', os.path.join('~', 'dd', 'integrations-extras')), - ('agent', os.path.join('~', 'dd', 'datadog-agent')), - ('repo', 'core'), - ('color', bool(int(os.environ['DDEV_COLOR'])) if 'DDEV_COLOR' in os.environ else None), - ('dd_api_key', os.getenv('DD_API_KEY')), - ('dd_app_key', os.getenv('DD_APP_KEY')), - ('org', 'default'), - ('agent6', OrderedDict((('docker', 'datadog/agent-dev:master'), ('local', 'latest')))), - ('agent5', OrderedDict((('docker', 'datadog/dev-dd-agent:master'), ('local', 'latest')))), - ('github', OrderedDict((('user', ''), ('token', '')))), - ('pypi', OrderedDict((('user', ''), ('pass', '')))), - ('trello', OrderedDict((('key', ''), ('token', '')))), - ( - 'orgs', - OrderedDict( - ( - ( - 'default', - OrderedDict( - ( - ('api_key', os.getenv('DD_API_KEY')), - ('app_key', os.getenv('DD_APP_KEY')), - ('site', os.getenv('DD_SITE')), - ) - ), - ), - ) - ), - ), - ] -) +DEFAULT_CONFIG = { + 'core': os.path.join('~', 'dd', 'integrations-core'), + 'extras': os.path.join('~', 'dd', 'integrations-extras'), + 'agent': os.path.join('~', 'dd', 'datadog-agent'), + 'repo': 'core', + 'color': bool(int(os.environ['DDEV_COLOR'])) if 'DDEV_COLOR' in os.environ else None, + 'dd_api_key': os.getenv('DD_API_KEY'), + 'dd_app_key': os.getenv('DD_APP_KEY'), + 'org': 'default', + 'agent6': {'docker': 'datadog/agent-dev:master', 'local': 'latest'}, + 'agent5': {'docker': 'datadog/dev-dd-agent:master', 'local': 'latest'}, + 'github': {'user': '', 'token': ''}, + 'pypi': {'user': '', 'pass': ''}, + 'trello': {'key': '', 'token': ''}, + 'orgs': { + 'default': { + 'api_key': os.getenv('DD_API_KEY'), + 'app_key': os.getenv('DD_APP_KEY'), + 'site': os.getenv('DD_SITE'), + } + }, +} def config_file_exists(): @@ -81,7 +67,7 @@ def load_config(): config = copy_default_config() try: - config.update(toml.loads(read_config_file(), OrderedDict)) + config.update(toml.loads(read_config_file())) except FileNotFoundError: pass @@ -107,10 +93,10 @@ def update_config(): config.update(load_config()) # Support legacy config where agent5 and agent6 were strings - if isinstance(config['agent6'], string_types): - config['agent6'] = OrderedDict((('docker', config['agent6']), ('local', 'latest'))) - if isinstance(config['agent5'], string_types): - config['agent5'] = OrderedDict((('docker', config['agent5']), ('local', 'latest'))) + if isinstance(config['agent6'], str): + config['agent6'] = {'docker': config['agent6'], 'local': 'latest'} + if isinstance(config['agent5'], str): + config['agent5'] = {'docker': config['agent5'], 'local': 'latest'} save_config(config) return config @@ -129,7 +115,7 @@ def scrub_secrets(config): if path in branch: if not paths: old_value = branch[path] - if isinstance(old_value, string_types): + if isinstance(old_value, str): branch[path] = '*' * len(old_value) else: branch = branch[path] diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/config_validator/config_block.py b/datadog_checks_dev/datadog_checks/dev/tooling/config_validator/config_block.py index 9b8520b396eac8..76a82e05483ff3 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/config_validator/config_block.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/config_validator/config_block.py @@ -108,11 +108,11 @@ def _validate_description(self, errors): if self.description.strip() == '': param_name = self.param_prop.var_name - errors.append(ValidatorError("Empty description for {}".format(param_name), self.line, SEVERITY_WARNING)) + errors.append(ValidatorError(f"Empty description for {param_name}", self.line, SEVERITY_WARNING)) for i, line in enumerate(self.description.splitlines()): if len(line) > MAX_COMMENT_LENGTH and not line.endswith("#noqa"): - err_string = "Description too long [{}...] ({}/{})".format(line[:30], len(line), MAX_COMMENT_LENGTH) + err_string = f"Description too long [{line[:30]}...] ({len(line)}/{MAX_COMMENT_LENGTH})" errors.append(ValidatorError(err_string, self.line + i + 1)) def _validate_type(self, errors): @@ -124,7 +124,7 @@ def _validate_type(self, errors): if re.match(regex, self.param_prop.type_name): break else: - errors.append(ValidatorError("Type {} is not accepted".format(self.param_prop.type_name), self.line)) + errors.append(ValidatorError(f"Type {self.param_prop.type_name} is not accepted", self.line)) @classmethod def parse_from_strings(cls, start, config_lines, indent, errors): @@ -192,7 +192,7 @@ def _get_end_of_param_declaration_block(start, end, config_lines, indent, errors if not is_exactly_indented(config_lines[start], indent): other_indent = get_indent(config_lines[start]) - errors.append(ValidatorError("Unexpected indentation, expecting {} not {}".format(indent, other_indent), start)) + errors.append(ValidatorError(f"Unexpected indentation, expecting {indent} not {other_indent}", start)) return None if not config_lines[start].startswith(' ' * indent + "## @param"): @@ -207,7 +207,7 @@ def _get_end_of_param_declaration_block(start, end, config_lines, indent, errors return None if not is_exactly_indented(config_lines[idx], indent): other_indent = get_indent(config_lines[idx]) - err_string = "Unexpected indentation, expecting {} not {}".format(indent, other_indent) + err_string = f"Unexpected indentation, expecting {indent} not {other_indent}" errors.append(ValidatorError(err_string, idx)) return None @@ -220,7 +220,7 @@ def _get_end_of_param_declaration_block(start, end, config_lines, indent, errors idx += 1 break else: - errors.append(ValidatorError("Cannot find end of block starting at line {}".format(start), idx)) + errors.append(ValidatorError(f"Cannot find end of block starting at line {start}", idx)) return None # Now analyze the actual content @@ -299,7 +299,7 @@ def _is_object(idx, config_lines, indent, param_prop, errors): if param_prop.type_name == 'object': # The variable to be parsed is an object and thus requires to go recursively if re.match(OBJECT_REGEX, current_line) is None: - err_string = "Parameter {} is declared as object but isn't one".format(param_prop.var_name) + err_string = f"Parameter {param_prop.var_name} is declared as object but isn't one" errors.append(ValidatorError(err_string, idx)) return False return True diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/config_validator/validator.py b/datadog_checks_dev/datadog_checks/dev/tooling/config_validator/validator.py index 578554499d010d..b77493ec3883d5 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/config_validator/validator.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/config_validator/validator.py @@ -121,7 +121,7 @@ def _parse_for_config_blocks(config_lines, start, end, errors): new_end = get_end_of_part(config_lines, idx, indent=indent) if new_end is None: block_name = cfg_block.param_prop.var_name if cfg_block.param_prop else "?" - err_string = "The object {} cannot be parsed correctly, check indentation".format(block_name) + err_string = f"The object {block_name} cannot be parsed correctly, check indentation" errors.append(ValidatorError(err_string, idx)) return blocks if new_end > end: @@ -141,7 +141,7 @@ def _check_no_duplicate_names(blocks, errors): names_list = [b.param_prop.var_name for b in same_level_blocks if b.param_prop] duplicates = set([x for x in names_list if names_list.count(x) > 1]) for dup in duplicates: - errors.append(ValidatorError("Duplicate variable with name {}".format(dup), None)) + errors.append(ValidatorError(f"Duplicate variable with name {dup}", None)) sub_lists_of_other_blocks = [b for b in blocks if isinstance(b, list)] for l in sub_lists_of_other_blocks: diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/config_validator/validator_errors.py b/datadog_checks_dev/datadog_checks/dev/tooling/config_validator/validator_errors.py index cb5ac50c1bfa2b..70716fb6ec48d8 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/config_validator/validator_errors.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/config_validator/validator_errors.py @@ -14,7 +14,7 @@ def __init__(self, error_str, line_number, severity=SEVERITY_ERROR): def __repr__(self): if self.line_number is None: return self.error_str - return "(L{}) {}".format(self.line_number + 1, self.error_str) + return f"(L{self.line_number + 1}) {self.error_str}" def __str__(self): return self.__repr__() diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/configuration/consumers/example.py b/datadog_checks_dev/datadog_checks/dev/tooling/configuration/consumers/example.py index 185d2e37785e97..4ebc0ec0cb0b37 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/configuration/consumers/example.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/configuration/consumers/example.py @@ -1,10 +1,9 @@ # (C) Datadog, Inc. 2019-present # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) -from collections import OrderedDict +from io import StringIO import yaml -from six import StringIO DESCRIPTION_LINE_LENGTH_LIMIT = 120 @@ -47,7 +46,7 @@ def value_type_string(value): elif item_type == 'array': return 'list of lists' else: - return 'list of {}s'.format(item_type) + return f'list of {item_type}s' else: return value_type @@ -58,7 +57,7 @@ def write_description(option, writer, indent, option_type): if deprecation: description += '\n\n<<< DEPRECATED >>>\n\n' for key, info in option['deprecation'].items(): - key_part = '{}: '.format(key) + key_part = f'{key}: ' info_pad = ' ' * len(key_part) description += key_part @@ -66,11 +65,11 @@ def write_description(option, writer, indent, option_type): if i > 0: description += info_pad - description += '{}\n'.format(line) + description += f'{line}\n' for line in description.splitlines(): if line: - line = '{}## {}'.format(indent, line) + line = f'{indent}## {line}' if len(line) > DESCRIPTION_LINE_LENGTH_LIMIT: extra_characters = len(line) - DESCRIPTION_LINE_LENGTH_LIMIT writer.new_error( @@ -180,7 +179,7 @@ def __init__(self, spec): self.spec = spec def render(self): - files = OrderedDict() + files = {} for file in self.spec['files']: with OptionWriter() as writer: diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/configuration/core.py b/datadog_checks_dev/datadog_checks/dev/tooling/configuration/core.py index f0588f984d27c3..b36e4f7de2a7ac 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/configuration/core.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/configuration/core.py @@ -30,7 +30,7 @@ def load(self): try: self.data = yaml.safe_load(self.contents) except Exception as e: - self.errors.append('{}: Unable to parse the configuration specification: {}'.format(self.source, e)) + self.errors.append(f'{self.source}: Unable to parse the configuration specification: {e}') return return spec_validator(self.data, self) diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/configuration/spec.py b/datadog_checks_dev/datadog_checks/dev/tooling/configuration/spec.py index c7e2ea132e6448..0b6c13b8730054 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/configuration/spec.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/configuration/spec.py @@ -6,39 +6,37 @@ def spec_validator(spec, loader): if not isinstance(spec, dict): - loader.errors.append('{}: Configuration specifications must be a mapping object'.format(loader.source)) + loader.errors.append(f'{loader.source}: Configuration specifications must be a mapping object') return if 'name' not in spec: - loader.errors.append( - '{}: Configuration specifications must contain a top-level `name` attribute'.format(loader.source) - ) + loader.errors.append(f'{loader.source}: Configuration specifications must contain a top-level `name` attribute') return name = spec['name'] if not isinstance(name, str): - loader.errors.append('{}: The top-level `name` attribute must be a string'.format(loader.source)) + loader.errors.append(f'{loader.source}: The top-level `name` attribute must be a string') return release_version = spec.setdefault('version', loader.version) if not release_version: loader.errors.append( - '{}: Configuration specifications must contain a top-level `version` attribute'.format(loader.source) + f'{loader.source}: Configuration specifications must contain a top-level `version` attribute' ) return elif not isinstance(release_version, str): - loader.errors.append('{}: The top-level `version` attribute must be a string'.format(loader.source)) + loader.errors.append(f'{loader.source}: The top-level `version` attribute must be a string') return if 'files' not in spec: loader.errors.append( - '{}: Configuration specifications must contain a top-level `files` attribute'.format(loader.source) + f'{loader.source}: Configuration specifications must contain a top-level `files` attribute' ) return files = spec['files'] if not isinstance(files, list): - loader.errors.append('{}: The top-level `files` attribute must be an array'.format(loader.source)) + loader.errors.append(f'{loader.source}: The top-level `files` attribute must be an array') return files_validator(files, loader) @@ -50,9 +48,7 @@ def files_validator(files, loader): example_file_names_origin = {} for file_index, config_file in enumerate(files, 1): if not isinstance(config_file, dict): - loader.errors.append( - '{}, file #{}: File attribute must be a mapping object'.format(loader.source, file_index) - ) + loader.errors.append(f'{loader.source}, file #{file_index}: File attribute must be a mapping object') continue if 'name' not in config_file: @@ -64,7 +60,7 @@ def files_validator(files, loader): file_name = config_file['name'] if not isinstance(file_name, str): - loader.errors.append('{}, file #{}: Attribute `name` must be a string'.format(loader.source, file_index)) + loader.errors.append(f'{loader.source}, file #{file_index}: Attribute `name` must be a string') continue if file_name in file_names_origin: @@ -87,7 +83,7 @@ def files_validator(files, loader): example_file_name = config_file.setdefault('example_name', file_name) else: if num_files == 1: - expected_name = '{}.yaml'.format(normalize_source_name(loader.source or 'conf')) + expected_name = f"{normalize_source_name(loader.source or 'conf')}.yaml" if file_name != expected_name: loader.errors.append( '{}, file #{}: File name `{}` should be `{}`'.format( @@ -98,9 +94,7 @@ def files_validator(files, loader): example_file_name = config_file.setdefault('example_name', 'conf.yaml.example') if not isinstance(example_file_name, str): - loader.errors.append( - '{}, file #{}: Attribute `example_name` must be a string'.format(loader.source, file_index) - ) + loader.errors.append(f'{loader.source}, file #{file_index}: Attribute `example_name` must be a string') if example_file_name in example_file_names_origin: loader.errors.append( @@ -112,14 +106,12 @@ def files_validator(files, loader): example_file_names_origin[example_file_name] = file_index if 'options' not in config_file: - loader.errors.append( - '{}, {}: Every file must contain an `options` attribute'.format(loader.source, file_name) - ) + loader.errors.append(f'{loader.source}, {file_name}: Every file must contain an `options` attribute') continue options = config_file['options'] if not isinstance(options, list): - loader.errors.append('{}, {}: The `options` attribute must be an array'.format(loader.source, file_name)) + loader.errors.append(f'{loader.source}, {file_name}: The `options` attribute must be an array') continue options_validator(options, loader, file_name) @@ -148,9 +140,7 @@ def options_validator(options, loader, file_name, *sections): try: template = loader.templates.load(option.pop('template'), parameters) except Exception as e: - loader.errors.append( - '{}, {}, {}option #{}: {}'.format(loader.source, file_name, sections_display, option_index, e) - ) + loader.errors.append(f'{loader.source}, {file_name}, {sections_display}option #{option_index}: {e}') continue if isinstance(template, dict): diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/configuration/template.py b/datadog_checks_dev/datadog_checks/dev/tooling/configuration/template.py index d83454c7850d64..9665e1a28b0b0a 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/configuration/template.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/configuration/template.py @@ -29,16 +29,14 @@ def load(self, template, parameters=None): path_parts.append(branches.pop(0)) possible_template_paths = ( - '{}.{}'.format(path_join(path, *path_parts), extension) - for path in self.paths - for extension in VALID_EXTENSIONS + f'{path_join(path, *path_parts)}.{extension}' for path in self.paths for extension in VALID_EXTENSIONS ) for template_path in possible_template_paths: if file_exists(template_path): break else: - raise ValueError('Template `{}` does not exist'.format('/'.join(path_parts))) + raise ValueError(f"Template `{'/'.join(path_parts)}` does not exist") if template_path in self.templates: data = self.templates[template_path] @@ -46,7 +44,7 @@ def load(self, template, parameters=None): try: data = yaml.safe_load(read_file(template_path)) except Exception as e: - raise ValueError('Unable to parse template `{}`: {}'.format(template_path, e)) + raise ValueError(f'Unable to parse template `{template_path}`: {e}') self.templates[template_path] = data @@ -56,9 +54,7 @@ def load(self, template, parameters=None): if branch in data: data = data[branch] else: - raise ValueError( - 'Template `{}` has no element `{}`'.format('/'.join(path_parts), '.'.join(branches[: i + 1])) - ) + raise ValueError(f"Template `{'/'.join(path_parts)}` has no element `{'.'.join(branches[:i + 1])}`") elif isinstance(data, list): for item in data: if isinstance(item, dict) and item.get('name') == branch: @@ -103,12 +99,10 @@ def override(cls, template, overrides): break else: raise ValueError( - 'Template override `{}` has no named mapping `{}`'.format('.'.join(override_keys[:i]), key) + f"Template override `{'.'.join(override_keys[:i])}` has no named mapping `{key}`" ) else: - raise ValueError( - 'Template override `{}` does not refer to a mapping'.format('.'.join(override_keys[:i])) - ) + raise ValueError(f"Template override `{'.'.join(override_keys[:i])}` does not refer to a mapping") # Force assign the desired value to the final key if isinstance(root, dict): diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/configuration/utils.py b/datadog_checks_dev/datadog_checks/dev/tooling/configuration/utils.py index ce58760d8ff357..ab51d8d231d2e6 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/configuration/utils.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/configuration/utils.py @@ -4,7 +4,7 @@ def default_option_example(option_name): - return '<{}>'.format(option_name.upper()) + return f'<{option_name.upper()}>' def normalize_source_name(source_name): diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/constants.py b/datadog_checks_dev/datadog_checks/dev/tooling/constants.py index 7a1c17be70c708..c3d961ab2aefb2 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/constants.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/constants.py @@ -2,7 +2,6 @@ # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) import os -from collections import OrderedDict import semver @@ -13,23 +12,21 @@ 'integrations-extras', 'integrations-internal', ] -VERSION_BUMP = OrderedDict( - [ - ('Added', semver.bump_minor), - ('Changed', semver.bump_major), - ('Deprecated', semver.bump_minor), - ('Fixed', semver.bump_patch), - ('Removed', semver.bump_major), - ('Security', semver.bump_minor), - ('major', semver.bump_major), - ('minor', semver.bump_minor), - ('patch', semver.bump_patch), - ('fix', semver.bump_patch), - ('rc', lambda v: semver.bump_prerelease(v, 'rc')), - ('alpha', lambda v: semver.bump_prerelease(v, 'alpha')), - ('beta', lambda v: semver.bump_prerelease(v, 'beta')), - ] -) +VERSION_BUMP = { + 'Added': semver.bump_minor, + 'Changed': semver.bump_major, + 'Deprecated': semver.bump_minor, + 'Fixed': semver.bump_patch, + 'Removed': semver.bump_major, + 'Security': semver.bump_minor, + 'major': semver.bump_major, + 'minor': semver.bump_minor, + 'patch': semver.bump_patch, + 'fix': semver.bump_patch, + 'rc': lambda v: semver.bump_prerelease(v, 'rc'), + 'alpha': lambda v: semver.bump_prerelease(v, 'alpha'), + 'beta': lambda v: semver.bump_prerelease(v, 'beta'), +} AGENT_V5_ONLY = {'agent_metrics', 'docker_daemon', 'go-metro', 'kubernetes', 'ntp'} diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/create.py b/datadog_checks_dev/datadog_checks/dev/tooling/create.py index 98a479965bc154..9e1e8d93e32bb6 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/create.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/create.py @@ -70,7 +70,7 @@ def construct_template_fields(integration_name, repo_choice, **kwargs): config = { 'author': author, - 'check_class': '{}Check'.format(''.join(part.capitalize() for part in normalized_integration_name.split('_'))), + 'check_class': f"{''.join(part.capitalize() for part in normalized_integration_name.split('_'))}Check", 'check_name': normalized_integration_name, 'integration_name': integration_name, 'check_name_kebab': check_name_kebab, @@ -102,7 +102,7 @@ def create_template_files(template_name, new_root, config, read=False): template_path = path_join(root, template_file) file_path = template_path.replace(template_root, '') - file_path = '{}{}'.format(new_root, file_path.format(**config)) + file_path = f'{new_root}{file_path.format(**config)}' files.append(File(file_path, template_path, config, read=read)) diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/e2e/agent.py b/datadog_checks_dev/datadog_checks/dev/tooling/e2e/agent.py index 65b19c701c8217..e645a3bd73031a 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/e2e/agent.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/e2e/agent.py @@ -54,7 +54,7 @@ def get_pip_exe(python_version, platform=LINUX): if platform == WINDOWS: return [r'C:\Program Files\Datadog\Datadog Agent\embedded{}\python.exe'.format(python_version), '-m', 'pip'] else: - return ['/opt/datadog-agent/embedded/bin/pip{}'.format(python_version)] + return [f'/opt/datadog-agent/embedded/bin/pip{python_version}'] def get_agent_conf_dir(check, agent_version, platform=LINUX): @@ -65,12 +65,12 @@ def get_agent_conf_dir(check, agent_version, platform=LINUX): return r'C:\ProgramData\Datadog\conf.d' elif platform == MAC: if agent_version >= 6: - return '/opt/datadog-agent/etc/conf.d/{}.d'.format(check) + return f'/opt/datadog-agent/etc/conf.d/{check}.d' else: return '/opt/datadog-agent/etc/conf.d' else: if agent_version >= 6: - return '/etc/datadog-agent/conf.d/{}.d'.format(check) + return f'/etc/datadog-agent/conf.d/{check}.d' else: return '/etc/dd-agent/conf.d' @@ -94,7 +94,7 @@ def get_agent_service_cmd(version, platform, action): 'launchctl', 'load' if action == 'start' else 'unload', '-w', - '{}/Library/LaunchAgents/com.datadoghq.agent.plist'.format(expanduser("~")), + f"{expanduser('~')}/Library/LaunchAgents/com.datadoghq.agent.plist", ] else: return ['sudo', 'service', 'datadog-agent', action] diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/e2e/config.py b/datadog_checks_dev/datadog_checks/dev/tooling/e2e/config.py index e54432f129e166..9f9c317c4b1ca9 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/e2e/config.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/e2e/config.py @@ -14,7 +14,7 @@ def config_file_name(check): - return '{}.yaml'.format(check) + return f'{check}.yaml' def locate_env_dir(check, env): diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/e2e/docker.py b/datadog_checks_dev/datadog_checks/dev/tooling/e2e/docker.py index c2ff6b5e241f45..8ad224a15f4012 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/e2e/docker.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/e2e/docker.py @@ -48,17 +48,17 @@ def __init__( self.python_version = python_version or DEFAULT_PYTHON_VERSION self._agent_version = self.metadata.get('agent_version') - self.container_name = 'dd_{}_{}'.format(self.check, self.env) + self.container_name = f'dd_{self.check}_{self.env}' self.config_dir = locate_config_dir(check, env) self.config_file = locate_config_file(check, env) self.config_file_name = config_file_name(self.check) # If we use a default build, and it's missing the py suffix, adds it if default_agent and self.agent_build and 'py' not in self.agent_build: - self.agent_build = '{}-py{}'.format(self.agent_build, self.python_version) + self.agent_build = f'{self.agent_build}-py{self.python_version}' if self.agent_build and self.metadata.get('use_jmx', False): - self.agent_build = '{}-jmx'.format(self.agent_build) + self.agent_build = f'{self.agent_build}-jmx' @property def agent_version(self): @@ -66,7 +66,7 @@ def agent_version(self): @property def check_mount_dir(self): - return '/home/{}'.format(self.check) + return f'/home/{self.check}' @property def base_mount_dir(self): @@ -85,8 +85,8 @@ def exec_command(self, command, **kwargs): if command.startswith('pip '): command = command.replace('pip', ' '.join(get_pip_exe(self.python_version)), 1) - cmd += ' {}'.format(self.container_name) - cmd += ' {}'.format(command) + cmd += f' {self.container_name}' + cmd += f' {command}' return run_command(cmd, **kwargs) @@ -104,32 +104,32 @@ def run_check( ): # JMX check if jmx_list: - command = '{} jmx list {}'.format(self.agent_command, jmx_list) + command = f'{self.agent_command} jmx list {jmx_list}' # Classic check else: - command = '{} check {}'.format(self.agent_command, self.check) + command = f'{self.agent_command} check {self.check}' if rate: - command += ' {}'.format(get_rate_flag(self.agent_version)) + command += f' {get_rate_flag(self.agent_version)}' # These are only available for Agent 6+ if times is not None: - command += ' --check-times {}'.format(times) + command += f' --check-times {times}' if pause is not None: - command += ' --pause {}'.format(pause) + command += f' --pause {pause}' if delay is not None: - command += ' --delay {}'.format(delay) + command += f' --delay {delay}' if as_json: - command += ' --json {}'.format(as_json) + command += f' --json {as_json}' if break_point is not None: - command += ' --breakpoint {}'.format(break_point) + command += f' --breakpoint {break_point}' if log_level is not None: - command += ' --log-level {}'.format(log_level) + command += f' --log-level {log_level}' return self.exec_command(command, capture=capture, interactive=break_point is not None) @@ -161,11 +161,11 @@ def detect_agent_version(self): 'run', '--rm', '-e', - 'DD_API_KEY={}'.format(self.api_key), + f'DD_API_KEY={self.api_key}', self.agent_build, 'head', '--lines=1', - '{}'.format(get_agent_version_manifest('linux')), + f"{get_agent_version_manifest('linux')}", ] result = run_command(command, capture=True) match = re.search(MANIFEST_VERSION_PATTERN, result.stdout) @@ -179,14 +179,14 @@ def update_check(self): command.extend(get_pip_exe(self.python_version)) command.extend(('install', '-e', self.check_mount_dir)) if file_exists(path_join(get_root(), self.check, REQUIREMENTS_IN)): - command.extend(('-r', '{}/{}'.format(self.check_mount_dir, REQUIREMENTS_IN))) + command.extend(('-r', f'{self.check_mount_dir}/{REQUIREMENTS_IN}')) run_command(command, capture=True, check=True) def update_base_package(self): command = ['docker', 'exec', self.container_name] command.extend(get_pip_exe(self.python_version)) command.extend(('install', '-e', self.base_mount_dir)) - command.extend(('-r', '{}/{}'.format(self.base_mount_dir, REQUIREMENTS_IN))) + command.extend(('-r', f'{self.base_mount_dir}/{REQUIREMENTS_IN}')) run_command(command, capture=True, check=True) def update_agent(self): @@ -210,7 +210,7 @@ def start_agent(self): 'host', # Agent 6 will simply fail without an API key '-e', - 'DD_API_KEY={}'.format(self.api_key), + f'DD_API_KEY={self.api_key}', # Don't write .pyc, needed to fix this issue (only Python 2): # When reinstalling a package, .pyc are not cleaned correctly. The issue is fixed by not writing them # in the first place. @@ -223,17 +223,17 @@ def start_agent(self): 'DD_EXPVAR_PORT=0', # Run API on a random port '-e', - 'DD_CMD_PORT={}'.format(free_port), + f'DD_CMD_PORT={free_port}', # Disable trace agent '-e', 'DD_APM_ENABLED=false', # Mount the config directory, not the file, to ensure updates are propagated # https://github.com/moby/moby/issues/15793#issuecomment-135411504 '-v', - '{}:{}'.format(self.config_dir, get_agent_conf_dir(self.check, self.agent_version)), + f'{self.config_dir}:{get_agent_conf_dir(self.check, self.agent_version)}', # Mount the check directory '-v', - '{}:{}'.format(path_join(get_root(), self.check), self.check_mount_dir), + f'{path_join(get_root(), self.check)}:{self.check_mount_dir}', # Mount the /proc directory '-v', '/proc:/host/proc', @@ -243,18 +243,18 @@ def start_agent(self): # Any environment variables passed to the start command for key, value in sorted(self.env_vars.items()): - command.extend(['-e', '{}={}'.format(key, value)]) + command.extend(['-e', f'{key}={value}']) if 'proxy' in self.metadata: if 'http' in self.metadata['proxy']: - command.extend(['-e', 'DD_PROXY_HTTP={}'.format(self.metadata['proxy']['http'])]) + command.extend(['-e', f"DD_PROXY_HTTP={self.metadata['proxy']['http']}"]) if 'https' in self.metadata['proxy']: - command.extend(['-e', 'DD_PROXY_HTTPS={}'.format(self.metadata['proxy']['https'])]) + command.extend(['-e', f"DD_PROXY_HTTPS={self.metadata['proxy']['https']}"]) if self.base_package: # Mount the check directory command.append('-v') - command.append('{}:{}'.format(self.base_package, self.base_mount_dir)) + command.append(f'{self.base_package}:{self.base_mount_dir}') # The chosen tag command.append(self.agent_build) diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/e2e/format.py b/datadog_checks_dev/datadog_checks/dev/tooling/e2e/format.py index cb972ab0f046cd..890ef5c4f8c513 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/e2e/format.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/e2e/format.py @@ -1,8 +1,6 @@ # (C) Datadog, Inc. 2018-present # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) -from __future__ import unicode_literals - import re from ..._env import E2E_FIXTURE_NAME, deserialize_data @@ -12,10 +10,10 @@ def parse_config_from_result(env, result): if 'NO E2E FIXTURE AVAILABLE' in result.stdout: - return None, None, 'The environment fixture `{}` does not exist.'.format(E2E_FIXTURE_NAME) + return None, None, f'The environment fixture `{E2E_FIXTURE_NAME}` does not exist.' - if '{}: platform mismatch'.format(env) in result.stdout: - return None, None, 'The environment `{}` does not support this platform.'.format(env) + if f'{env}: platform mismatch' in result.stdout: + return None, None, f'The environment `{env}` does not support this platform.' decoded = parse_encoded_config_data(result.stdout) if decoded is None: @@ -32,7 +30,7 @@ def parse_config_from_result(env, result): metadata = decoded['metadata'] if config is None: - return None, None, 'The environment fixture `{}` did not yield any configuration.'.format(E2E_FIXTURE_NAME) + return None, None, f'The environment fixture `{E2E_FIXTURE_NAME}` did not yield any configuration.' return config, metadata, None diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/e2e/local.py b/datadog_checks_dev/datadog_checks/dev/tooling/e2e/local.py index 052e1df29837f8..870563c8a87e8b 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/e2e/local.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/e2e/local.py @@ -107,20 +107,20 @@ def use_config(self, config): def copy_config_to_local_agent(self): conf_dir = get_agent_conf_dir(self.check, self.agent_version, self.platform) - check_conf_file = os.path.join(conf_dir, '{}.yaml'.format(self.check)) + check_conf_file = os.path.join(conf_dir, f'{self.check}.yaml') if not os.path.exists(conf_dir): os.makedirs(conf_dir) if file_exists(check_conf_file): - copyfile(check_conf_file, '{}.bak'.format(check_conf_file)) + copyfile(check_conf_file, f'{check_conf_file}.bak') copyfile(self.config_file, check_conf_file) def remove_config_from_local_agent(self): check_conf_file = os.path.join( - get_agent_conf_dir(self.check, self.agent_version, self.platform), '{}.yaml'.format(self.check) + get_agent_conf_dir(self.check, self.agent_version, self.platform), f'{self.check}.yaml' ) - backup_conf_file = '{}.bak'.format(check_conf_file) + backup_conf_file = f'{check_conf_file}.bak' os.remove(check_conf_file) if file_exists(backup_conf_file): move(backup_conf_file, check_conf_file) @@ -139,32 +139,32 @@ def run_check( ): # JMX check if jmx_list: - command = '{} jmx list {}'.format(self.agent_command, jmx_list) + command = f'{self.agent_command} jmx list {jmx_list}' # Classic check else: - command = '{} check {}'.format(self.agent_command, self.check) + command = f'{self.agent_command} check {self.check}' if rate: - command += ' {}'.format(get_rate_flag(self.agent_version)) + command += f' {get_rate_flag(self.agent_version)}' # These are only available for Agent 6+ if times is not None: - command += ' --check-times {}'.format(times) + command += f' --check-times {times}' if pause is not None: - command += ' --pause {}'.format(pause) + command += f' --pause {pause}' if delay is not None: - command += ' --delay {}'.format(delay) + command += f' --delay {delay}' if as_json: - command += ' --json {}'.format(as_json) + command += f' --json {as_json}' if break_point is not None: - command += ' --breakpoint {}'.format(break_point) + command += f' --breakpoint {break_point}' if log_level is not None: - command += ' --log-level {}'.format(log_level) + command += f' --log-level {log_level}' return run_command(command, capture=capture) diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/e2e/run.py b/datadog_checks_dev/datadog_checks/dev/tooling/e2e/run.py index 2ee7384e339aa3..f49af5d53a311f 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/e2e/run.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/e2e/run.py @@ -16,21 +16,21 @@ def _execute(check, command, env_vars): operation = 'stopping' with chdir(path_join(get_root(), check), env_vars=env_vars): - echo_debug('%s env with env_vars: %s' % (operation, env_vars), cr=True, indent=True) - echo_debug('%s env with command: %s' % (operation, command), indent=True) + echo_debug(f'{operation} env with env_vars: {env_vars}', cr=True, indent=True) + echo_debug(f'{operation} env with command: {command}', indent=True) result = run_command(command, capture=True) - echo_debug('command result stdout: %s' % result.stdout, indent=True) - echo_debug('command result stderr: %s' % result.stderr, indent=True) + echo_debug(f'command result stdout: {result.stdout}', indent=True) + echo_debug(f'command result stderr: {result.stderr}', indent=True) return result def start_environment(check, env): - command = 'tox --develop -e {}'.format(env) + command = f'tox --develop -e {env}' env_vars = { E2E_TEAR_DOWN: 'false', 'PYTEST_ADDOPTS': '--benchmark-skip --exitfirst', - 'TOX_TESTENV_PASSENV': '{} USERNAME PYTEST_ADDOPTS {}'.format(E2E_TEAR_DOWN, ' '.join(get_ci_env_vars())), + 'TOX_TESTENV_PASSENV': f"{E2E_TEAR_DOWN} USERNAME PYTEST_ADDOPTS {' '.join(get_ci_env_vars())}", } result = _execute(check, command, env_vars) @@ -38,7 +38,7 @@ def start_environment(check, env): def stop_environment(check, env, metadata=None): - command = 'tox --develop -e {}'.format(env) + command = f'tox --develop -e {env}' env_vars = { E2E_SET_UP: 'false', 'PYTEST_ADDOPTS': '--benchmark-skip --exitfirst', diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/git.py b/datadog_checks_dev/datadog_checks/dev/tooling/git.py index 737c0f56db18b0..0f4f647a3599ee 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/git.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/git.py @@ -42,7 +42,7 @@ def get_commits_since(check_name, target_tag=None): target_path = os.path.join(root, check_name) else: target_path = root - command = 'git log --pretty=%s {}{}'.format('' if target_tag is None else '{}... '.format(target_tag), target_path) + command = f"git log --pretty=%s {'' if target_tag is None else f'{target_tag}... '}{target_path}" with chdir(root): return run_command(command, capture=True).stdout.splitlines() @@ -53,7 +53,7 @@ def git_show_file(path, ref): Return the contents of a file at a given tag """ root = get_root() - command = 'git show {}:{}'.format(ref, path) + command = f'git show {ref}:{path}' with chdir(root): return run_command(command, capture=True).stdout @@ -69,7 +69,7 @@ def git_commit(targets, message, force=False, sign=False): target_paths.append(os.path.join(root, t)) with chdir(root): - result = run_command('git add{} {}'.format(' -f' if force else '', ' '.join(target_paths))) + result = run_command(f"git add{' -f' if force else ''} {' '.join(target_paths)}") if result.code != 0: return result @@ -81,12 +81,12 @@ def git_tag(tag_name, push=False): Tag the repo using an annotated tag. """ with chdir(get_root()): - result = run_command('git tag -a {} -m "{}"'.format(tag_name, tag_name), capture=True) + result = run_command(f'git tag -a {tag_name} -m "{tag_name}"', capture=True) if push: if result.code != 0: return result - return run_command('git push origin {}'.format(tag_name), capture=True) + return run_command(f'git push origin {tag_name}', capture=True) return result @@ -125,7 +125,7 @@ def tracked_by_git(filename): """ with chdir(get_root()): # https://stackoverflow.com/a/2406813 - result = run_command('git ls-files --error-unmatch {}'.format(filename), capture=True) + result = run_command(f'git ls-files --error-unmatch {filename}', capture=True) return result.code == 0 @@ -134,5 +134,5 @@ def ignored_by_git(filename): Return a boolean value for whether the given file is ignored by git. """ with chdir(get_root()): - result = run_command('git check-ignore -q {}'.format(filename), capture=True) + result = run_command(f'git check-ignore -q {filename}', capture=True) return result.code == 0 diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/github.py b/datadog_checks_dev/datadog_checks/dev/tooling/github.py index a6e1ac1c838675..f0433dfa2e5db8 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/github.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/github.py @@ -68,8 +68,7 @@ def get_pr(pr_num, config=None, raw=False): def get_pr_from_hash(commit_hash, repo, config=None, raw=False): response = requests.get( - 'https://api.github.com/search/issues?q=sha:{}+repo:DataDog/{}'.format(commit_hash, repo), - auth=get_auth_info(config), + f'https://api.github.com/search/issues?q=sha:{commit_hash}+repo:DataDog/{repo}', auth=get_auth_info(config), ) if raw: diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/release.py b/datadog_checks_dev/datadog_checks/dev/tooling/release.py index 81a64a4809cc7c..1cf7e3b050c106 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/release.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/release.py @@ -20,7 +20,7 @@ def get_release_tag_string(check_name, version_string): Compose a string to use for release tags """ if check_name: - return '{}-{}'.format(check_name, version_string) + return f'{check_name}-{version_string}' else: return version_string @@ -47,7 +47,7 @@ def get_package_name(folder_name): elif folder_name == 'datadog_checks_downloader': return 'datadog-checks-downloader' - return '{}{}'.format(DATADOG_PACKAGE_PREFIX, folder_name.replace('_', '-')) + return f"{DATADOG_PACKAGE_PREFIX}{folder_name.replace('_', '-')}" def get_folder_name(package_name): @@ -72,26 +72,26 @@ def get_agent_requirement_line(check, version): # no manifest if check in ('datadog_checks_base', 'datadog_checks_downloader'): - return '{}=={}'.format(package_name, version) + return f'{package_name}=={version}' m = load_manifest(check) platforms = sorted(m.get('supported_os', [])) # all platforms if platforms == ALL_PLATFORMS: - return '{}=={}'.format(package_name, version) + return f'{package_name}=={version}' # one specific platform elif len(platforms) == 1: - return "{}=={}; sys_platform == '{}'".format(package_name, version, PLATFORMS_TO_PY.get(platforms[0])) + return f"{package_name}=={version}; sys_platform == '{PLATFORMS_TO_PY.get(platforms[0])}'" elif platforms: if 'windows' not in platforms: - return "{}=={}; sys_platform != 'win32'".format(package_name, version) + return f"{package_name}=={version}; sys_platform != 'win32'" elif 'mac_os' not in platforms: - return "{}=={}; sys_platform != 'darwin'".format(package_name, version) + return f"{package_name}=={version}; sys_platform != 'darwin'" elif 'linux' not in platforms: - return "{}=={}; sys_platform != 'linux2'".format(package_name, version) + return f"{package_name}=={version}; sys_platform != 'linux2'" - raise ManifestError("Can't parse the `supported_os` list for the check {}: {}".format(check, platforms)) + raise ManifestError(f"Can't parse the `supported_os` list for the check {check}: {platforms}") def update_agent_requirements(req_file, check, newline): @@ -105,7 +105,7 @@ def update_agent_requirements(req_file, check, newline): current_package_name = line.split('==')[0] if current_package_name == package_name: - lines[i] = '{}\n'.format(newline) + lines[i] = f'{newline}\n' break write_file_lines(req_file, sorted(lines)) diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/requirements.py b/datadog_checks_dev/datadog_checks/dev/tooling/requirements.py index 457fad7d4c274c..273f1e0b058390 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/requirements.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/requirements.py @@ -120,7 +120,7 @@ def write_packages(self, reqs_file): """ Dump the packages in the catalog in a requirements file """ - write_file_lines(reqs_file, ('{}\n'.format(package) for package in self.packages)) + write_file_lines(reqs_file, (f'{package}\n' for package in self.packages)) def add_package(self, check_name, package): """ @@ -183,7 +183,7 @@ def make_catalog(verify=False, checks=None): for check_name in sorted(checks): for package in read_packages(os.path.join(root, check_name, REQUIREMENTS_IN)): if not package.version: - errors.append('Unpinned dependency `{}` in the `{}` check'.format(package.name, check_name)) + errors.append(f'Unpinned dependency `{package.name}` in the `{check_name}` check') catalog.add_package(check_name, package) return catalog, errors diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/signing.py b/datadog_checks_dev/datadog_checks/dev/tooling/signing.py index 8698beea5f5c46..48f144b5ad6d30 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/signing.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/signing.py @@ -17,13 +17,9 @@ from in_toto.gpg.constants import GPG_COMMAND from .constants import get_root -from .git import ( - ignored_by_git, tracked_by_git -) +from .git import ignored_by_git, tracked_by_git from ..subprocess import run_command -from ..utils import ( - chdir, ensure_dir_exists, path_join, stream_file_lines, write_file -) +from ..utils import chdir, ensure_dir_exists, path_join, stream_file_lines LINK_DIR = '.in-toto' STEP_NAME = 'tag' @@ -39,7 +35,7 @@ def __init__(self, filename): def __str__(self): - return '{} has neither been tracked nor ignored by git and in-toto!'.format(self.filename) + return f'{self.filename} has neither been tracked nor ignored by git and in-toto!' class UntrackedButIgnoredFileException(Exception): @@ -48,7 +44,7 @@ def __init__(self, filename): def __str__(self): - return '{} has not been tracked, but it should be ignored by git and in-toto!'.format(self.filename) + return f'{self.filename} has not been tracked, but it should be ignored by git and in-toto!' def read_gitignore_patterns(): @@ -63,7 +59,7 @@ def read_gitignore_patterns(): def get_key_id(gpg_exe): - result = run_command('{} --card-status'.format(gpg_exe), capture='out', check=True) + result = run_command(f'{gpg_exe} --card-status', capture='out', check=True) lines = result.stdout.splitlines() for line in lines: if line.startswith('Signature key ....:'): @@ -110,7 +106,7 @@ def update_link_metadata(checks): # Find this latest signed link metadata file on disk. # NOTE: in-toto currently uses the first 8 characters of the signing keyId. key_id_prefix = key_id[:8].lower() - tag_link = '{}.{}.link'.format(STEP_NAME, key_id_prefix) + tag_link = f'{STEP_NAME}.{key_id_prefix}.link' # Final location of metadata file. metadata_file = path_join(LINK_DIR, tag_link) diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/testing.py b/datadog_checks_dev/datadog_checks/dev/tooling/testing.py index 82f6a3a8d45b33..8ad6a16a6e7c90 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/testing.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/testing.py @@ -143,7 +143,7 @@ def coverage_sources(check): elif check == 'datadog_checks_downloader': package_path = 'datadog_checks/downloader' else: - package_path = 'datadog_checks/{}'.format(check) + package_path = f'datadog_checks/{check}' return package_path, 'tests' @@ -152,7 +152,7 @@ def fix_coverage_report(check, report_file): report = read_file_binary(report_file) # Make every check's `tests` directory path unique so they don't get combined in UI - report = report.replace(b'"tests/', '"{}/tests/'.format(check).encode('utf-8')) + report = report.replace(b'"tests/', f'"{check}/tests/'.encode('utf-8')) write_file_binary(report_file, report) @@ -172,7 +172,7 @@ def construct_pytest_options( e2e=False, ): # Prevent no verbosity - pytest_options = '--verbosity={}'.format(verbose or 1) + pytest_options = f'--verbosity={verbose or 1}' if not verbose: pytest_options += ' --tb=short' @@ -198,10 +198,10 @@ def construct_pytest_options( # junit report file must contain the env name to handle multiple envs # $TOX_ENV_NAME is a tox injected variable # See https://tox.readthedocs.io/en/latest/config.html#injected-environment-variables - ' --junit-xml=.junit/test-{test_group}-$TOX_ENV_NAME.xml' + f' --junit-xml=.junit/test-{test_group}-$TOX_ENV_NAME.xml' # Junit test results class prefix - ' --junit-prefix={check}' - ).format(check=check, test_group=test_group) + f' --junit-prefix={check}' + ) if coverage: pytest_options += ( @@ -216,19 +216,19 @@ def construct_pytest_options( ) if marker: - pytest_options += ' -m "{}"'.format(marker) + pytest_options += f' -m "{marker}"' if test_filter: - pytest_options += ' -k "{}"'.format(test_filter) + pytest_options += f' -k "{test_filter}"' if pytest_args: - pytest_options += ' {}'.format(pytest_args) + pytest_options += f' {pytest_args}' return pytest_options def pytest_coverage_sources(*checks): - return ' '.join(' '.join('--cov={}'.format(source) for source in coverage_sources(check)) for check in checks) + return ' '.join(' '.join(f'--cov={source}' for source in coverage_sources(check)) for check in checks) def testable_files(files): diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/utils.py b/datadog_checks_dev/datadog_checks/dev/tooling/utils.py index b671fd8a9aa1d6..6cf0758c33b45c 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/utils.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/utils.py @@ -5,11 +5,9 @@ import os import re from ast import literal_eval -from collections import OrderedDict import requests import semver -from six import string_types from ..utils import file_exists, read_file, write_file from .constants import NOT_CHECKS, VERSION_BUMP, get_root @@ -22,9 +20,9 @@ def format_commit_id(commit_id): if commit_id: if commit_id.isdigit(): - return 'PR #{}'.format(commit_id) + return f'PR #{commit_id}' else: - return 'commit hash `{}`'.format(commit_id) + return f'commit hash `{commit_id}`' return commit_id @@ -39,7 +37,7 @@ def get_current_agent_version(): most_recent = sorted(versions)[-1] - return "{}.{}".format(most_recent[0], most_recent[1]) + return f"{most_recent[0]}.{most_recent[1]}" def is_package(d): @@ -176,13 +174,13 @@ def load_manifest(check_name): """ manifest_path = get_manifest_file(check_name) if file_exists(manifest_path): - return json.loads(read_file(manifest_path).strip(), object_pairs_hook=OrderedDict) + return json.loads(read_file(manifest_path).strip()) return {} def write_manifest(manifest, check_name): manifest_path = get_manifest_file(check_name) - write_file(manifest_path, '{}\n'.format(json.dumps(manifest, indent=2))) + write_file(manifest_path, f'{json.dumps(manifest, indent=2)}\n') def get_bump_function(changelog_types): @@ -206,7 +204,7 @@ def parse_agent_req_file(contents): datadog-active-directory==1.1.1; sys_platform == 'win32' """ - catalog = OrderedDict() + catalog = {} for line in contents.splitlines(): toks = line.split('==', 1) if len(toks) != 2 or not toks[0] or not toks[1]: @@ -222,6 +220,6 @@ def parse_agent_req_file(contents): def parse_version_parts(version): - if not isinstance(version, string_types): + if not isinstance(version, str): return [] return [int(v) for v in version.split('.') if v.isdigit()] diff --git a/datadog_checks_dev/setup.py b/datadog_checks_dev/setup.py index 5482fc37c3cf7b..4cfe851e7f05fd 100644 --- a/datadog_checks_dev/setup.py +++ b/datadog_checks_dev/setup.py @@ -58,11 +58,14 @@ 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy', ], packages=['datadog_checks', 'datadog_checks.dev'], install_requires=REQUIRES, + # TODO: Uncomment when we fully drop Python 2 + # python_requires='>=3.7', include_package_data=True, extras_require={ 'cli': [ diff --git a/datadog_checks_dev/tox.ini b/datadog_checks_dev/tox.ini index 6fe8455027ad4b..0a4b325d832ce9 100644 --- a/datadog_checks_dev/tox.ini +++ b/datadog_checks_dev/tox.ini @@ -19,4 +19,5 @@ passenv = setenv = DDEV_TESTING_PLUGIN=true commands = - pytest -v {posargs} tests + py27: pytest -v --ignore tests/tooling {posargs} tests + py37: pytest -v {posargs} tests