diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 000000000..7e8b60b83 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,33 @@ +# See https://help.github.com/articles/about-codeowners/ for syntax +# Rules are matched top-to-bottom, so one team can own subdirectories +# and another team can own the rest of the directory. Last matching +# pattern is the one used. + +# Default owners of everything in the repo +* @DataDog/api-clients + +# API +/datadog/api/ @DataDog/api-clients +/tests/integration/api/ @DataDog/api-clients +/tests/unit/api/ @DataDog/api-clients + +# Dogshell +/datadog/dogshell/ @DataDog/api-clients +/tests/integration/dogshell/ @DataDog/api-clients + +# Dogstatd +/datadog/dogstatsd/ @DataDog/api-clients @DataDog/agent-metrics-logs +/datadog/util/ @DataDog/api-clients @DataDog/agent-metrics-logs +/tests/integration/dogstatsd/ @DataDog/api-clients @DataDog/agent-metrics-logs +/tests/unit/dogstatsd/ @DataDog/api-clients @DataDog/agent-metrics-logs +/tests/unit/util/ @DataDog/api-clients @DataDog/agent-metrics-logs +/tests/util/ @DataDog/api-clients @DataDog/agent-metrics-logs +/tests/performance/test_statsd_* @DataDog/api-clients @DataDog/agent-metrics-logs + +# Threadstats +/datadog/threadstats/ @DataDog/api-clients @DataDog/agent-metrics-logs +/tests/unit/threadstats/ @DataDog/api-clients @DataDog/agent-metrics-logs + +# Documentation +*.md @DataDog/documentation @DataDog/api-clients +LICENSE @DataDog/documentation @DataDog/api-clients diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 000000000..3928afd18 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,36 @@ +--- +name: Bug report +about: Create a report to help us improve +title: '' +labels: 'kind/bug' +assignees: '' + +--- + +**Describe the bug** +A clear and concise description of what the bug is. + +Label the issue properly. +- Add `severity/` label. +- Add `documentation` label if this issue is related to documentation changes. + +**To Reproduce** +Steps to reproduce the behavior: +1. Go to '...' +2. Click on '....' +3. Scroll down to '....' +4. See error + +**Expected behavior** +A clear and concise description of what you expected to happen. + +**Screenshots** +If applicable, add screenshots to help explain your problem. + +**Environment and Versions (please complete the following information):** +A clear and precise description of your setup: + - version for this project in use. + - services, libraries, languages and tools list and versions. + +**Additional context** +Add any other context about the problem here. diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 000000000..22a9fae53 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,23 @@ +--- +name: Feature request +about: Suggest an idea for this project +title: '' +labels: 'kind/feature-request' +assignees: '' + +--- + +**Note:** +If you have a feature request, you should [contact support](https://docs.datadoghq.com/help/) so the request can be properly tracked. + +**Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. + +**Describe the solution you'd like** +A clear and concise description of what you want to happen. + +**Describe alternatives you've considered** +A clear and concise description of any alternative solutions or features you've considered. + +**Additional context** +Add any other context or screenshots about the feature request here. \ No newline at end of file diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 000000000..e641bdfc5 --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,77 @@ +### Requirements for Contributing to this repository + +* Fill out the template below. Any pull request that does not include enough information to be reviewed in a timely manner may be closed at the maintainers' discretion. +* The pull request must only fix one issue, or add one feature, at the time. +* The pull request must update the test suite to demonstrate the changed functionality. +* After you create the pull request, all status checks must be pass before a maintainer reviews your contribution. For more details, please see [CONTRIBUTING](/CONTRIBUTING.md). + +### What does this PR do? + + + +### Description of the Change + + + +### Alternate Designs + + + +### Possible Drawbacks + + + +### Verification Process + + + +### Additional Notes + + + +### Release Notes + + + +### Review checklist (to be filled by reviewers) + +- [ ] Feature or bug fix MUST have appropriate tests (unit, integration, etc...) +- [ ] PR title must be written as a CHANGELOG entry [(see why)](https://github.com/DataDog/datadogpy/blob/master/CONTRIBUTING.md#pull-request-title) +- [ ] Files changes must correspond to the primary purpose of the PR as described in the title (small unrelated changes should have their own PR) +- [ ] PR must have one `changelog/` label attached. If applicable it should have the `backward-incompatible` label attached. +- [ ] PR should not have `do-not-merge/` label attached. +- [ ] If Applicable, issue must have `kind/` and `severity/` labels attached at least. + diff --git a/.github/labeler.yml b/.github/labeler.yml new file mode 100644 index 000000000..d29c9ed7d --- /dev/null +++ b/.github/labeler.yml @@ -0,0 +1,22 @@ +# Configuration for labeler - https://github.com/actions/labeler + +documentation: + - "*.md" + - "**/*.md" + +resource-api: + - /datadog/api/**/* + - /tests/integration/api/**/* + - /tests/unit/api/**/* + +resource-dogshell: + - /datadog/dogshell/**/* + - /tests/integration/dogshell/**/* + +resource-dogstatd: + - /datadog/dogstatsd/**/* + - /tests/unit/dogstatsd/**/* + +resource-threadstats: + - /datadog/threadstats/**/* + - /tests/unit/threadstats/**/* diff --git a/.github/workflows/changelog.yaml b/.github/workflows/changelog.yaml new file mode 100644 index 000000000..c3454b79c --- /dev/null +++ b/.github/workflows/changelog.yaml @@ -0,0 +1,26 @@ +name: "Ensure labels" + +permissions: + pull-requests: read + +on: # yamllint disable-line rule:truthy + pull_request: + types: + - labeled + - unlabeled + - opened + - synchronize + - reopened + - ready_for_review + +jobs: + changelog: + runs-on: ubuntu-latest + steps: + - name: Check changelog labels + if: github.event.pull_request.draft == false && false == contains(join(github.event.pull_request.labels.*.name, ','), 'changelog/') + run: |- + echo "::error Add 'changelog/*' label"; + exit 1; + - name: OK + run: echo "Thank you!" diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml new file mode 100644 index 000000000..387ae5dd1 --- /dev/null +++ b/.github/workflows/codeql-analysis.yml @@ -0,0 +1,50 @@ +name: "CodeQL" + +permissions: + contents: read + checks: write + +on: + push: + branches: [ master ] + pull_request: + # The branches below must be a subset of the branches above + branches: [ master ] + +jobs: + analyze: + name: Analyze + runs-on: ubuntu-latest + permissions: + actions: read + contents: read + security-events: write + + strategy: + fail-fast: false + matrix: + language: [ 'python' ] + # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] + # Learn more about CodeQL language support at https://git.io/codeql-language-support + + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v2 + with: + languages: ${{ matrix.language }} + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. + # queries: ./path/to/local/query, your-org/your-repo/queries@main + + # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). + # If this step fails, then you should remove it and run the build manually (see below) + - name: Autobuild + uses: github/codeql-action/autobuild@v2 + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v2 diff --git a/.github/workflows/labeler.yml b/.github/workflows/labeler.yml new file mode 100644 index 000000000..efe88ab83 --- /dev/null +++ b/.github/workflows/labeler.yml @@ -0,0 +1,16 @@ +name: "Pull Request Labeler" + +permissions: + contents: read + pull-requests: write + +on: +- pull_request + +jobs: + triage: + runs-on: ubuntu-latest + steps: + - uses: DataDog/labeler@glob-all + with: + repo-token: "${{ secrets.GITHUB_TOKEN }}" diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml new file mode 100644 index 000000000..1898f83c6 --- /dev/null +++ b/.github/workflows/release.yaml @@ -0,0 +1,40 @@ +name: Build + +permissions: + contents: write + pull-requests: write + +on: + pull_request: + release: + types: + - published + +jobs: + build_wheels: + name: Build wheels on Ubuntu 20.04 + runs-on: ubuntu-20.04 + if: github.event_name == 'release' && github.event.action == 'published' + steps: + - uses: actions/checkout@v2 + # Include all history and tags, needed for building the right version + with: + fetch-depth: 0 + + - uses: actions/setup-python@v2 + name: Install Python + with: + python-version: '3.9' + + - name: Install datadog_checks_dev + run: | + python -m pip install datadog_checks_dev[cli]==20.0.1 + + - name: Set ddev pypi credentials + run: | + ddev config set pypi.user __token__ + ddev config set pypi.pass ${{ secrets.PYPI_TOKEN }} + + - name: Publish the wheel to PyPI + run: | + ddev release upload . --sdist diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml new file mode 100644 index 000000000..51f9ad156 --- /dev/null +++ b/.github/workflows/stale.yml @@ -0,0 +1,45 @@ +# Configuration for https://github.com/actions/stale + +name: "Stale issues and pull requests" + +permissions: + contents: write + issues: write + pull-requests: write + +on: + schedule: + - cron: "0 5 * * *" + +jobs: + stale: + runs-on: ubuntu-latest + steps: + - uses: actions/stale@v1 + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + stale-issue-message: >- + Thanks for your contribution! + + + This issue has been automatically marked as stale because it has not had + activity in the last 30 days. Note that the issue will not be automatically + closed, but this notification will remind us to investigate why there's + been inactivity. Thank you for participating in the Datadog open source community. + + + If you would like this issue to remain open: + + 1. Verify that you can still reproduce the issue in the latest version of this project. + + 1. Comment that the issue is still reproducible and include updated details requested in the issue template. + days-before-stale: 30 + days-before-close: 99999 + stale-issue-label: 'stale' + exempt-issue-label: 'stale/exempt' + stale-pr-message: >- + This issue has been automatically marked as stale because it has not had activity in the last 30 days. + + Note that the issue will not be automatically closed, but this notification will remind us to investigate why there's been inactivity. + stale-pr-label: 'stale' + exempt-pr-label: 'stale/exempt' diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 000000000..9364437c7 --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,83 @@ +name: test + +permissions: + contents: read + +on: + push: + branches: + - master + pull_request: + branches: + - master + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }} + cancel-in-progress: true + +jobs: + lint: + name: Lint datadopy files + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + + - name: Set up Python 3.8 + uses: actions/setup-python@v4 + with: + python-version: "3.8" + + - name: Install dependencies + run: | + python -m pip install pre-commit + python -m pip install tox + + # - name: Run black + # run: pre-commit run --all-files --hook-stage=manual black + + - name: Run mypy + run: tox -e mypy + + - name: Run flake8 + run: tox -e flake8 + + run: + name: Python ${{ matrix.python-version }} on ${{ startsWith(matrix.os, 'macos-') && 'macOS' || startsWith(matrix.os, 'windows-') && 'Windows' || 'Linux' }} + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-22.04] + python-version: ['pypy2.7', '3.7', 'pypy3.8'] + # os: [ubuntu-latest, windows-latest, macos-latest] + # python-version: ['2.7', '3.7', '3.8', '3.9', '3.10', 'pypy-2.7', 'pypy-3.8'] + env: + TOXENV: ${{ matrix.python-version }} + + steps: + - uses: actions/checkout@v3 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Set constrains for python2.7 + # Latest PyYaml supported version for python 2.7 is 5.4.1 which requires + # cython<3 to build. See: https://github.com/yaml/pyyaml/issues/724 + if: ${{ matrix.python-version == 'pypy2.7' }} + run: | + echo "cython<3" > /tmp/constraints.txt + echo "PIP_CONSTRAINT=/tmp/constraints.txt" >> $GITHUB_ENV + + - name: Install tox + run: pip install tox + + - name: Run unit tests + run: tox + + - name: Run integration tests on cassettes + run: tox -e integration -- --vcr-record=none + + - name: Run admin integration tests on cassettes + run: tox -e integration-admin -- --vcr-record=none diff --git a/.github/workflows/test_integration.yml b/.github/workflows/test_integration.yml new file mode 100644 index 000000000..152c0b500 --- /dev/null +++ b/.github/workflows/test_integration.yml @@ -0,0 +1,51 @@ +name: Run Integration Tests + +permissions: + contents: read + +on: # yamllint disable-line rule:truthy + pull_request: + types: + - labeled + - unlabeled + - opened + - synchronize + - reopened + - ready_for_review + +concurrency: + group: integration-${{ github.head_ref }} + cancel-in-progress: true + +jobs: + integration_tests: + runs-on: ubuntu-latest + if: github.event_name == 'pull_request' && github.event.pull_request.draft == false && contains(github.event.pull_request.labels.*.name, 'ci/integrations') + steps: + - name: Checkout code + uses: actions/checkout@v3 + + - name: Set up Python 3.7 + uses: actions/setup-python@v4 + with: + python-version: "3.7" + cache: "pip" + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install tox + + - name: Run integration tests + run: tox -e integration -- --vcr-record=all + env: + DD_TEST_CLIENT_API_KEY: "${{ secrets.DD_TEST_CLIENT_API_KEY }}" + DD_TEST_CLIENT_APP_KEY: "${{ secrets.DD_TEST_CLIENT_APP_KEY }}" + DD_TEST_CLIENT_USER: "${{ secrets.DD_TEST_CLIENT_USER }}" + + - name: Run admin integration tests + run: tox -e integration-admin -- --vcr-record=all + env: + DD_TEST_CLIENT_API_KEY: ${{ secrets.DD_TEST_CLIENT_API_KEY }} + DD_TEST_CLIENT_APP_KEY: ${{ secrets.DD_TEST_CLIENT_APP_KEY }} + DD_TEST_CLIENT_USER: ${{ secrets.DD_TEST_CLIENT_USER }} diff --git a/.gitignore b/.gitignore index 734741940..e436b5108 100644 --- a/.gitignore +++ b/.gitignore @@ -25,7 +25,7 @@ pip-log.txt # Unit test / coverage reports .coverage .tox -nosetests.xml +.mypy_cache #Doc ###doc @@ -42,3 +42,5 @@ nosetests.xml .DS_Store .eggs/ .env/ +.idea/ +*.swp diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 000000000..32a057649 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,11 @@ +repos: +- repo: local + hooks: + - id: black + name: format with black + language: python + entry: tox -e black + stages: [manual] + pass_filenames: false + additional_dependencies: + - "tox" diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 000000000..6bf3a9726 --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,20 @@ +# Read the Docs configuration file for Sphinx projects +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +# Required +version: 2 + +# Set the OS, Python version and other tools you might need +build: + os: ubuntu-22.04 + tools: + python: "3.7" + +# Build documentation in the docs/ directory with Sphinx +sphinx: + configuration: doc/source/conf.py + +# Optionally declare the Python requirements required to build your docs +python: + install: + - requirements: doc/requirements.txt diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index e07417005..000000000 --- a/.travis.yml +++ /dev/null @@ -1,14 +0,0 @@ -sudo: false -language: python -python: 2.7 -env: - - TOX_ENV=py26 - - TOX_ENV=py27 - - TOX_ENV=pypy - - TOX_ENV=py33 - - TOX_ENV=py34 - - TOX_ENV=flake8 -install: - - pip install tox -script: - - tox -e $TOX_ENV diff --git a/CHANGELOG.md b/CHANGELOG.md index 8488911a4..f4a035766 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,35 +1,448 @@ -CHANGELOG +# Changelog + +## v0.50.2 / 2024-11-21 + +* [Fixed] [AMLII-2170] fix removed/renamed function flush. See [#868](https://github.com/DataDog/datadogpy/pull/868). + +## 0.50.1 / 2024-09-18 + +* [Added] Add the ability for buffering and aggregation to work at the same time. See [#851](https://github.com/DataDog/datadogpy/pull/851). + +## v0.50.0 / 2024-08-20 + +* [Added] Add client side aggregation. See [#844](https://github.com/DataDog/datadogpy/pull/844). +* [Added] Add metric object type. See [#837](https://github.com/DataDog/datadogpy/pull/837). +* [Added] Support passing Unix timestamps to dogstatsd. See [#831](https://github.com/DataDog/datadogpy/pull/831). +* [Fixed] Fix a potential deadlock on fork. See [#836](https://github.com/DataDog/datadogpy/pull/836). +* [Changed] feat(origin detection): send both container ID and Entity ID. See [#828](https://github.com/DataDog/datadogpy/pull/828). + +## 0.49.1 / 2024-03-18 + +* [Fixed] Fix potential metric loss when open_buffer is combined with disable_buffering=False. See [#820](https://github.com/DataDog/datadogpy/pull/820). + +## 0.49.0 / 2024-03-12 + +* [Added] Add --restricted_roles option to Monitors API. See [#809](https://github.com/DataDog/datadogpy/pull/809). +* [Added] Support inode resolution mechanism for Origin Detection. See [#813](https://github.com/DataDog/datadogpy/pull/813). +* [Fixed] Fix potential deadlock during process fork. See [#817](https://github.com/DataDog/datadogpy/pull/817). + +## 0.48.0 / 2024-01-12 + +* [Added] Improve forking support. See [#804](https://github.com/DataDog/datadogpy/pull/804). +* [Fixed] Close socket during initialization. See [#808](https://github.com/DataDog/datadogpy/pull/808). +* [Fixed] Update payload size when updating socket path. See [#807](https://github.com/DataDog/datadogpy/pull/807). +* [Fixed] Fix stale client_transport tag. See [#802](https://github.com/DataDog/datadogpy/pull/802). +* [Fixed] Provide backwards compatible telemetry counters. See [#801](https://github.com/DataDog/datadogpy/pull/801). + +## 0.47.0 / 2023-09-06 + +* [Added] Add IPv6 support for dogstatsd. See [#791](https://github.com/DataDog/datadogpy/pull/791). +* [Added] Add blocking socket mode and background sender. See [#787](https://github.com/DataDog/datadogpy/pull/787). + +## 0.46.0 / 2023-07-12 + +* [Added] Add tags and priority to the fpost monitor request. See [#739](https://github.com/DataDog/datadogpy/pull/739). +* [Added] Add AP1 Datacenter. See [#779](https://github.com/DataDog/datadogpy/pull/779). +* [Fixed] Add alphanumeric check to app/api key config creation. See [#781](https://github.com/DataDog/datadogpy/pull/781). +* [Fixed] Add govcloud to site param descriptions, and add custom url fallback. See [#780](https://github.com/DataDog/datadogpy/pull/780). + +## 0.45.0 / 2023-03-15 + +* [Added] Add dogwrap site option to send to US3 site. See [#749](https://github.com/DataDog/datadogpy/pull/749). Thanks [holidayworking](https://github.com/holidayworking). +* [Added] Add SLO search endpoint and update get_all. See [#744](https://github.com/DataDog/datadogpy/pull/744). +* [Added] [statsd] Add origin detection with container ID field. See [#720](https://github.com/DataDog/datadogpy/pull/720). +* [Fixed] Fix duplicate parsers in dogshell. See [#760](https://github.com/DataDog/datadogpy/pull/760). +* [Changed] Use header for credentials everywhere. See [#754](https://github.com/DataDog/datadogpy/pull/754). +* [Changed] [statsd] Raise ValueError instead of Exception when payload is too large. See [#730](https://github.com/DataDog/datadogpy/pull/730). Thanks [mlanicaputo](https://github.com/mlanicaputo). + +## 0.44.0 / 2022-03-02 + +* [Added] Update package metadata. See [#713](https://github.com/DataDog/datadogpy/pull/713). +* [Fixed] [statsd] Restore buffering state when exiting context manager. See [#715](https://github.com/DataDog/datadogpy/pull/715). +* [Changed] [threadstats] Ensure `ThreadStats` and `DogStatsd` `event()` signatures match. See [#712](https://github.com/DataDog/datadogpy/pull/712). +* [Changed] Improved dogstatsd mypy integration by adding additional type annotations. See [#710](https://github.com/DataDog/datadogpy/pull/710). Thanks [jahodfra](https://github.com/jahodfra). + +## 0.43.0 / 2021-12-09 + +* [Added] [statsd] Add ability to toggle `statsd.disable_buffering` state during runtime. See [#700](https://github.com/DataDog/datadogpy/pull/700). +* [Added] Add a dogshell option to change Datadog site to call API. See [#691](https://github.com/DataDog/datadogpy/pull/691). +* [Fixed] [statsd] Fix dedicated-endpoint telemetry shipping when used with UDP. See [#690](https://github.com/DataDog/datadogpy/pull/690). +* [Changed] [statsd] Disable statsd buffering by default. See [#692](https://github.com/DataDog/datadogpy/pull/692). +* [Changed] [statsd] Always terminate packets with newline. See [#685](https://github.com/DataDog/datadogpy/pull/685). +* [Changed] [statsd] Enable buffering by default for statsd. See [#670](https://github.com/DataDog/datadogpy/pull/670). +* [Changed] [statsd] Add caching to tag normalization for Python3.2+. See [#674](https://github.com/DataDog/datadogpy/pull/674). + +## 0.42.0 / 2021-07-01 + +* [Fixed] Remove unused decorator dependency. See [#651](https://github.com/DataDog/datadogpy/pull/651). +* [Fixed] [dogstatsd] Fix unicode handling of event text. See [#661](https://github.com/DataDog/datadogpy/pull/661). +* [Added] [dogstatsd] Improve tag normalization speed. See [#672](https://github.com/DataDog/datadogpy/pull/672). + +## 0.41.0 / 2021-04-15 + +* [Fixed] Fix decorator dependency for Python 2.7. See [#646](https://github.com/DataDog/datadogpy/pull/646). Thanks [artem888881](https://github.com/artem888881). +* [Fixed] [dogstatsd] Fix buffer operation thread-safety. See [#642](https://github.com/DataDog/datadogpy/pull/642). +* [Fixed] [dogstatsd] Improve performance of telemetry serialization. See [#641](https://github.com/DataDog/datadogpy/pull/641). + +## 0.40.1 / 2021-03-01 + +* [Fixed] Fix blocking connections in dogstatsd. See [#634](https://github.com/DataDog/datadogpy/pull/634). + +## 0.40.0 / 2021-02-09 + +* [Added] Support DD env vars in threadstats. See [#625](https://github.com/DataDog/datadogpy/pull/625). +* [Added] Add logs listing feature. See [#622](https://github.com/DataDog/datadogpy/pull/622). Thanks [userlocalhost](https://github.com/userlocalhost). +* [Added] [telemetry] optionally decouple telemetry destination from other metrics. See [#558](https://github.com/DataDog/datadogpy/pull/558). +* [Fixed] Lazy log formatting. See [#628](https://github.com/DataDog/datadogpy/pull/628). +* [Fixed] Turn EAGAIN warning into debug. See [#629](https://github.com/DataDog/datadogpy/pull/629). +* [Fixed] Turn warning into info. See [#623](https://github.com/DataDog/datadogpy/pull/623). +* [Fixed] [dogstatsd] use monotonic clock source when available for timers. See [#615](https://github.com/DataDog/datadogpy/pull/615). Thanks [jd](https://github.com/jd). +* [Fixed] Fix Synthetics delete_test parameter name. See [#603](https://github.com/DataDog/datadogpy/pull/603). Thanks [jonathan-hosmer](https://github.com/jonathan-hosmer). +* [Fixed] Delay creation of lambda ThreadStats. See [#609](https://github.com/DataDog/datadogpy/pull/609). + +## 0.39.0 / 2020-08-25 + +* [Added] Add environment variable to disable statsd metric collection. See [#589](https://github.com/DataDog/datadogpy/pull/589). Thanks [dgzlopes](https://github.com/dgzlopes). +* [Added] [dogstatsd] Add support for distribution context manager and decorator. See [#581](https://github.com/DataDog/datadogpy/pull/581). Thanks [dnlserrano](https://github.com/dnlserrano). +* [Added] [dogstatsd] sock.setblocking(0) for UDP socket. See [#590](https://github.com/DataDog/datadogpy/pull/590). +* [Added] [dogstatsd] Add type information to some dogstatsd methods. See [#579](https://github.com/DataDog/datadogpy/pull/579). Thanks [vishalkuo](https://github.com/vishalkuo). + +## 0.38.0 / 2020-07-02 + +* [Added] Improve user-agent header to include telemetry information. See [#583](https://github.com/DataDog/datadogpy/pull/583). +* [Fixed] Use Python 2 compatible configparser explicitly. See [#585](https://github.com/DataDog/datadogpy/pull/585). Thanks [Panfilwk](https://github.com/Panfilwk). +* [Fixed] Fix error after creating `.dogrc` when not answering `y` or `n` at the prompt. See [#582](https://github.com/DataDog/datadogpy/pull/582). Thanks [NassimBounouas](https://github.com/NassimBounouas). + +## v0.37.1 / 2020-06-23 + +* [Fixed] Add `typing` and `configparser` dependencies for python versions that need it. See [#572](https://github.com/DataDog/datadogpy/pull/572). Thanks [jairideout](https://github.com/jairideout). + +## v0.37.0 / 2020-06-19 + +* [Added] Add version as an importable variable and remove dependency on `pkg_resources`. See [#566](https://github.com/DataDog/datadogpy/pull/566). Thanks [sjhewitt](https://github.com/sjhewitt). +* [Added] Initial support for Python type hints and Mypy type checking. See [#565](https://github.com/DataDog/datadogpy/pull/565). Thanks [jairideout](https://github.com/jairideout). +* [Fixed] [dogstatsd] Correct inverted constants. See [#568](https://github.com/DataDog/datadogpy/pull/568). +* [Changed] [dogstatsd] Size-based packetization for dogstatsd batched metrics . See [#562](https://github.com/DataDog/datadogpy/pull/562). + +## v0.36.0 / 2020-05-05 + +* [Added] Add excluded_regions to POST/PUT AWS lib. See [#552](https://github.com/DataDog/datadogpy/pull/552). +* [Added] Add support for DD_ENV, DD_SERVICE, and DD_VERSION environment variables. See [#548](https://github.com/DataDog/datadogpy/pull/548). +* [Fixed] Fix dogwrap help output case. See [#557](https://github.com/DataDog/datadogpy/pull/557). Thanks [deiga](https://github.com/deiga). +* [Fixed] Fix decode attribute error with options for py3. See [#555](https://github.com/DataDog/datadogpy/pull/555). + +## 0.35.0 / 2020-03-09 + +* [Added] Add `Set` metric type to threadstats. See [#545](https://github.com/DataDog/datadogpy/pull/545). +* [Added] Add enum for monitor types. See [#544](https://github.com/DataDog/datadogpy/pull/544). +* [Added] Support DD_API_KEY environment variable in dogwrap. See [#543](https://github.com/DataDog/datadogpy/pull/543). +* [Added] Add back telemetry to Dogstatsd client. See [#533](https://github.com/DataDog/datadogpy/pull/533). +* [Fixed] Remove illegal characters from tags. See [#517](https://github.com/DataDog/datadogpy/pull/517). Thanks [jirikuncar](https://github.com/jirikuncar). +* [Fixed] Fix syntax error in dogwrap timeout handler and always collect output. See [#538](https://github.com/DataDog/datadogpy/pull/538). Thanks [Matt343](https://github.com/Matt343). + +## 0.34.1 / 2020-02-10 + +* [Fixed] Revert dogstatsd telemetry. See [#530](https://github.com/DataDog/datadogpy/pull/530). +* [Fixed] Fix ServiceLevelObjective.get_all limit default in docstring. See [#527](https://github.com/DataDog/datadogpy/pull/527). Thanks [taylor-chen](https://github.com/taylor-chen). + +## 0.34.0 / 2020-02-04 + +* [Deprecated] Alias `dog` script names as `dogshell`. Please start using `dogshell` instead of `dog` command. See [#305](https://github.com/DataDog/datadogpy/pull/305). Thanks [dwminer](https://github.com/dwminer). +* [Fixed] [dogshell] Enforce the default 'normal' event priority client side. See [#511](https://github.com/DataDog/datadogpy/pull/511). +* [Fixed] [dogstatsd] Handle EAGAIN socket error when dropping packets. See [#515](https://github.com/DataDog/datadogpy/pull/515). Thanks [mrknmc](https://github.com/mrknmc). +* [Fixed] [dogstatsd] Handle OSError on socket.close on Python 3.6+. See [#510](https://github.com/DataDog/datadogpy/pull/510). Thanks [charettes](https://github.com/charettes). +* [Added] [dogstatsd] Add `statsd_constant_tags` kwarg to datadog.initialize(). See [#494](https://github.com/DataDog/datadogpy/pull/494). Thanks [kainswor](https://github.com/kainswor). +* [Added] [dogstatsd] Adding telemetry to dogstatsd. See [#505](https://github.com/DataDog/datadogpy/pull/505). +* [Added] [dogwrap] Add duration as metric. See [#506](https://github.com/DataDog/datadogpy/pull/506). +* [Added] [dogwrap] Add option to send to EU endpoint. See [#502](https://github.com/DataDog/datadogpy/pull/502). +* [Added] [dogwrap] Add warning option for dogwrap based on exit codes. See [#471](https://github.com/DataDog/datadogpy/pull/471). Thanks [dabcoder](https://github.com/dabcoder). +* [Added] Include LICENSE in MANIFEST.in. See [#500](https://github.com/DataDog/datadogpy/pull/500). Thanks [jjhelmus](https://github.com/jjhelmus). +* [Added] Add base class for all exceptions. See [#496](https://github.com/DataDog/datadogpy/pull/496). Thanks [hakamadare](https://github.com/hakamadare). +* [Added] Tag normalization. See [#489](https://github.com/DataDog/datadogpy/pull/489). + + +# 0.33.0 / 2019-12-12 + +* [FEATURE] Roles and Permissions APIs [#481][] +* [FEATURE] Add support for Azure, GCP and AWS integrations endpoints [#429][] +* [FEATURE] Add support for new `Monitor.can_delete` endpoint [#474][] +* [FEATURE] Add support for the `Monitor.validate` endpoint [#487][] +* [FEATURE] Add support for `/v1/downtime/cancel/by_scope` [#488][] +* [IMPROVEMENT] Dogshell: remove Exception wrapping [#477][] + +# 0.32.0 / 2019-11-18 + +* [BUGFIX] Fix distribution metric submission by sending api/app keys through query params for this endpoint. [#480][] +* [FEATURE] Add Synthetics support [#433][] + +# 0.31.0 / 2019-10-30 + +* [BUGFIX] Fix possible issue that could leak file descriptors when reading config [#425][] +* [BUGFIX] Fix graph snapshot status endpoint [#448][] +* [BUGFIX] Revert `users` resource name to singular `user` as it was not fully supported [#450][] +* [BUGFIX] Fix error printing to stderr char by char [#449][] +* [BUGFIX] Add `_return_raw_response` to `api` module to prevent import errors before `initialize` is called [#461][] +* [BUGFIX] Threadstats: Fix periodic timer error on interpreter shutdown [#423][] +* [FEATURE] Add support for SLOs [#453][] and [#464][] +* [FEATURE] Add ability to send compressed payloads for metrics and distribution. [#466][] +* [FEATURE] Add parameter `hostname_from_config` to `initialize` to enable/disable hostname lookup from datadog-agent config to avoid warnings [#428][] +* [FEATURE] Dogstatsd: add ability to specify a default sample rate for all submissions [#470][] (thanks [@dtao][]) +* [IMPROVEMENT] Send API credentials through headers instead of URL query parameter [#446][] +* [IMPROVEMENT] Clarify docstring for metrics API [#463][] +* [IMPROVEMENT] Assert `alert_type` is correct when creating event [#467][] +* [IMPROVEMENT] Dogshell: make query and type optional when updating a monitor [#447][] + +# 0.30.0 / 2019-09-12 + +* [BUGFIX] Treat `API_HOST` as URL, not as string [#411][] +* [FEATURE] Add `return_raw_response` option to `initialize` to enable adding raw responses to return values [#414][] +* [IMPROVEMENT] Add project URLs to package metadata [#413][] (thanks [@Tenzer][]) +* [IMPROVEMENT] Add support for handling a 401 status as an API error [#418][] +* [IMPROVEMENT] Allow configuring proxy in `~/.dogrc` for usage with dogshell [#415][] +* [IMPROVEMENT] Update `user` resource name to `users` to match new plural endpoints [#421][] +* [OTHER] Add deprecation warning to old aws lambda threadstats integration [#417][] +* [OTHER] Removed functionality to delete events and comments, as it's no longer supported by API [#420][] + +# 0.29.3 / 2019-06-12 + +* [BUGFIX] Fix encoding issue on install [#391][] and [#392][] (thanks [@Alphadash][] and [@ningirsu][]) +* [BUGFIX] Dogwrap: Fix dogwrap unicode option parsing on python 3, [#395][] (thanks [@Matt343][]) + +# 0.29.2 / 2019-06-10 + +* [BUGFIX] Revert [Return Rate Limit Headers][#378], [#401][] + +# 0.29.1 / 2019-06-10 + +* [BUGFIX] Properly extend response headers to response object to fix [Return Rate Limit Headers][#378], [#397][] + +# 0.29.0 / 2019-06-05 + +* [BUGFIX] Lambda wrapper: Always flush, even on exceptions, [#359][] (thanks [@jmehnle][]) +* [BUGFIX] API: Do not send JSON body in GET requests, [#382][] +* [BUGFIX] API: Allow listing timeboards with empty descriptions, [#385][] (thanks [@Tenzer][]) +* [BUGFIX] Dogwrap: Better string handling and python3 support, [#379][] +* [BUGFIX] Threadstats: ensure MetricsAggregator is threadsafe, [#370][] (thanks [@TheKevJames][]) +* [BUGFIX] Dogshell: Fixes the `--tags` argument for service_checks, [#387][] (thanks [@gordlea][]) +* [FEATURE] API: Add support for dashboard list API v2, [#374][] +* [IMPROVEMENT] API: Handle http code 429 rate limiting in external python library, [#376][] +* [IMPROVEMENT] API: Add ability to not attach_host_name to metrics, events and distributions, [#383][] +* [IMPROVEMENT] API: Return Rate Limit Headers, [#378][] (thanks [@fdhoff][]) +* [IMPROVEMENT] API: Do not override API parameters with default when calling initialize if they are already set, [#386][] +* [IMPROVEMENT] Dogshell: Add `--tags` support to monitors, [#356][] +* [IMPROVEMENT] Dogshell: Add documentation for environment variables, [#388][] (thanks [@sc68cal][]) +* [IMPROVEMENT] Dogstatsd: Added a new parameter `statsd_default_namespace` to the `initialize` method, [#353][] (thanks [@lceS2][]) +* [IMPROVEMENT] Import Iterable from collections.abc on python3 to avoid deprecation warning, [#381][] +* [IMPROVEMENT] Do not capture `/bin/hostname` stderr, [#368][] (thanks [@brendanlong][]) +* [IMPROVEMENT] Add support for environment variables `DD_API_KEY` and `DD_APP_KEY` for setting API and APP keys respectively, [#373][] + ========= +# 0.28.0 / 2019-03-27 + +* [BUGFIX] Dogshell: Properly require `handle` as an argument to the `comment` subcommand, [#364][] +* [FEATURE] API: Add support for the `Dashboard.get_all` API, [#362][] +* [FEATURE] Dogshell: Add support for defining monitors as JSON files, [#322][] (thanks [@Hefeweizen][]) +* [FEATURE] DogStatsD: Add support for the `DD_AGENT_HOST`, `DD_DOGSTATSD_PORT`, and `DD_ENTITY_ID` environment variables, [#363][] +* [IMPROVEMENT] API: Add support for the `free` layout_type in `Dashboard.create` and `Dashboard.update`, [#362][] + +# 0.27.0 / 2019-03-06 + +**New Dashboards API: https://docs.datadoghq.com/api/?lang=python#dashboards** + +The Timeboard and Screenboard API resources are deprecated in favor of the new Dashboard resource. See https://docs.datadoghq.com/api/?lang=python#dashboards for more details. + +* [BUGFIX] API: Fix `UnicodeError` exceptions raised by the API client on errors that contain non ascii characters, [#223][], [#346][] +* [BUGFIX] DogStatsD: Fix unsafe socket creation on multithreaded applications and catch more exceptions, [#212][], [#349][] +* [FEATURE] API: Add support for the new Dashboard API, [#351][] +* [OTHER] Support `tox` for testing, [#342][] +* [OTHER] Support Python 3.7, **drop support for Python 3.3**, [#345][] + +# 0.26.0 / 2018-11-29 + +* [IMPROVEMENT] API: Keep HTTP connections alive when using `requests`, [#328][] + +# 0.25.0 / 2018-11-27 + +* [FEATURE] ThreadStats: Add AWS Lambda wrapper, [#324][] + +# 0.24.0 / 2018-11-12 + +* [BUGFIX] DogStatsD: Do not send empty UDP packets, [#264][] (thanks [@Tenzer][]) +* [FEATURE] API: Add support for distributions, [#312][] +* [FEATURE] ThreadStats: Add support for distributions, [#312][] +* [OTHER] Remove `simplejson` 3p dependency, [#304][], [#309][] (thanks [@alexpjohnson][]) + +# 0.23.0 / 2018-10-18 + +* [BUGFIX] Dogshell: Submit `--date_happened` timestamp when posting events, [#287][], [#301][] (thanks [@gplasky][]) +* [FEATURE] API: Add [search](https://docs.datadoghq.com/api/?lang=python#monitors-search) and [groups search](https://docs.datadoghq.com/api/?lang=python#monitors-group-search) methods to the `Monitor` resource, [#299][] +* [IMPROVEMENT] Dogshell: Set API and APP keys with environment variables, [#228][] (thanks [@taraslayshchuk][]) +* [IMPROVEMENT] DogStatsD: Prevent an issue that was causing the `timed` context manager object from overwriting a method with an instance variable, [#263][] (thanks [@florean][]) +* [OTHER] Include tests in PyPI tarball, [#259][] (thanks [@dotlambda][]) + +# 0.22.0 / 2018-06-27 + +**New API endpoint: https://api.datadoghq.com/api** + +The Datadog API client now uses https://api.datadoghq.com/api endpoint instead of https://app.datadoghq.com/api. +See [#257][] for more details. + +* [BUGFIX] API: Close requests' sessions to limit memory usage, [#272][] (thanks [@thehesiod][]) +* [BUGFIX] Dogwrap: Fix `TypeError` exceptions when truncating `stdout`, `stderr` with Python 3, [#260][], [#267][] (thanks [@cabouffard][], [@glasnt][]) +* [FEATURE] DogStatsD: Add client level tags to status checks, [#279][] (thanks [@marshallbrekka][]) +* [FEATURE] DogStatsD: Add support for `statsd_socket_path` option in `initialize` function, [#282][] +* [IMPROVEMENT] Dogwrap: Default output encoding to UTF-8, [#268][] (thanks [@glasnt][]) + +# 0.21.0 / 2018-06-04 + +**Search hosts: `Infrastructure.search` is deprecated** +The `Infrastructure.search` method is deprecated in favor of the new `Hosts.search` method. + +* [BUGFIX] API: Prevent exception contexts from logging URLs and credentials, [#266][] +* [FEATURE] API: Add `search` and `totals` methods to the `Hosts` resource, [#261][] + +# 0.20.0 / 2018-03-23 +* [FEATURE] API: New `DashboardList` resource, [#252][] + +# 0.19.0 / 2018-02-08 + +**ThreadStats: metric type change** + +`ThreadStats` count metrics (produced from the `increment`/`decrement` and `histogram` methods) are now reported with the `count`/`rate` metric type, instead of `gauge`. +As a result, for the corresponding metrics: +1. Metric queries can use the `.as_count()`/ `.as_rate()` functions to switch between count and rate representations. +2. The default time aggregation uses a sum instead of an average. **This may affect the representation of existing metric queries, thus, monitors' definitions and metric graphs.** + +See [#242][] (thanks [@nilabhsagar][]) for more details. + + +* [BUGFIX] ThreadStats: Send count metrics with `Rate` metric type, [#242][] (thanks [@nilabhsagar][]) +* [IMPROVEMENT] ThreadStats: Flush all metrics on exit, [#221][] + + +# 0.18.0 / 2018-01-24 +* [BUGFIX] Dogshell: Service checks can be sent with optional parameters set to null values, [#241][] (thanks [@timvisher][]) +* [BUGFIX] Dogwrap: Respect the output channel encoding format, [#236][] (thanks [@martin308][]) +* [FEATURE] DogstatsD: Add beta support for sending global distribution metrics, [#249][] + +# 0.17.0 / 2017-11-06 +* [BUGFIX] API: Discard non-null parameters in `api.ServiceCheck.check`method, [#206][], [#207][] (thanks [@ronindesign][]) +* [BUGFIX] API: Update HTTP method from `GET` to `POST` for `api.Screenboard.share` method, [#234][] (thanks [@seiro-ogasawara][]) +* [BUGFIX] Dogwrap: Encode from unicode before writing to stdout, stderr, [#201][], [#203][] (thanks [@ronindesign][]) +* [FEATURE] API: Add `list` method to `Metric` resource, [#230][] (thanks [@jbain][]) +* [FEATURE] DogStatsD: Add `socket_path` option to enable Unix socket traffic to DogStatsD 6, [#199][] +* [IMPROVEMENT] DogStatsD: Improve performances, speed up payload construction, [#233][] (thanks [@shargan][]) + +# 0.16.0 / 2017-04-26 +* [FEATURE] Dogshell: Add filtering options to the `monitor show_all` command, [#194][] + +# 0.15.0 / 2017-01-24 +* [FEATURE] API: Add metric metadata endpoints [#181][] +* [IMPROVEMENT] API: Disable redirection following with `urlfetch` HTTP library [#168][] (thanks [@evanj][]) +* [IMPROVEMENT] API: Increase default timeout from 3 to 60 seconds [#174][] (thanks [@ojongerius][]) +* [IMPROVEMENT] DogStatsD: Better exceptions on system default route resolution failures [#166][], [#156][] +* [IMPROVEMENT] DogStatsD: Close sockets when freed [#167][] (thanks [@thehesiod][]) + +# 0.14.0 / 2016-09-22 + +**Logging** + +`dd.datadogpy` logger name is no longer. `datadog` now uses logger names matching the project hierarchy, i.e. +* `datadog.api` +* `datadog.statsd` +* `datadog.threadstats` + +By default, `datadog` loggers are set with a do-nothing handler ([`NullHandler`](https://docs.python.org/3/howto/logging.html#configuring-logging-for-a-library)). + +To setup a different handler, one can add a handler +```python +import logging + +logging.getLogger("datadog").addHandler(...) +``` + +### Changes +* [FEATURE] DogStatsD: Provide elapsed time from the `timed` decorator, [#154][] (thanks [@tuukkamustonen][]) +* [FEATURE] DogStatsD: Allow starting and stopping `timed` manually, [#155][] (thanks [@tuukkamustonen][]) +* [IMPROVEMENT] DogStatsD: Support timing for coroutine functions on Python 3.5 or higher, [#146][] (thanks [@thehesiod][]) +* [OTHER] Rename loggers and set null handlers, [#161][] + +# 0.13.0 / 2016-08-24 +* [BUGFIX] Dogshell: Fix `UnicodeError` exceptions when a timeboard name contains non ascii characters, [#140][] +* [BUGFIX] DogStatsD: Support unicode characters in tags, [#132][], [#152][] +* [BUGFIX] ThreadStats: Fix `RuntimeError` exceptions on flush caused by an unsafe thread operation, [#143][], [#151][] (thanks [@leozc][]) +* [FEATURE] API: Add `delete` method to `Event` resource, [#145][] +* [IMPROVEMENT] DogStatsD: Have `timed` context manager to return itself, [#147][] (thanks [@ross][]) + +# 0.12.0 / 2016-05-27 +* [BUGFIX] API: Do not raise on hostname resolution failures, [#106][] +* [FEATURE] DogStatsD: Allow to dynamically use default route as a StatsD host, [#134][] +* [IMPROVEMENT] API: Enhance compatibility with Google App Engine, support `urlfetch` as a HTTP library [#106][] + +# 0.11.0 / 2016-03-14 +* [BUGFIX] Dogshell: Print usage when no argument is given on Python 3, [#123][] +* [BUGFIX] DogStatsD: Do not modify metric-level `tags` parameters when `constant_tags` is set, [#94][] (thanks [@steven-liu][]) +* [BUGFIX] DogStatsD: Fix thread-safety of the `[@timed][]` decorator, [#126][] (thanks [@mgood][]) +* [BUGFIX] ThreadStats: Do not modify metric-level `tags` parameters when `constant_tags` is set, [#94][], [#117][] (thanks [@steven-liu][]) +* [FEATURE] Dogshell: Add an `alert_type` option for `event post`, [#120][] (thanks [@drstevens][]) +* [FEATURE] DogStatD: Set constant tags from `DATADOG_TAGS` environment variable, [#114][] (thanks [@ewdurbin][] ) +* [FEATURE] DogStatsD: Support namespace, [#118][] +* [FEATURE] ThreadStats: Set constant tags from `DATADOG_TAGS` environment variable, [#114][] (thanks [@ewdurbin][] ) +* [FEATURE] ThreadStats: Support namespace, [#118][] +* [IMPROVEMENT] API: Support real numerical data types in `Metrics`, [#103][] +* [IMPROVEMENT] Dogshell: Attach hostname by default to event and metric posts, [#122][] +* [IMPROVEMENT] DogStatsD: Discard `None` values, [#119][] (thanks [@dcrosta][]) +* [IMPROVEMENT] DogStatsD: Import from top level, [#105][] +* [IMPROVEMENT] Dogwrap: Trim output and update event format, [#104][] (thanks [@gnarf][]) +* [OTHER] API: Adjust the documentation, [#96][], [#101][], [#110][], [#111][] (thanks [@aristiden7o][], [@emad][], [@aknuds1][], [@meawoppl][]) +* [OTHER] Dogshell: Update misleading help message for `event stream`, [#124][] + +# 0.10.0 / 2015-10-19 +* [BUGFIX] Fix typo in Dogshell breaking the Timeboard `pull_all` method, [#92][] +* [FEATURE] Enhance `constant_tags` support to ThreadStats and Statsd events, [#90][] (thanks [@jofusa][]) +* [FEATURE] New CRUD User API, [#89][] +* [OTHER] Fix Dogwrap documentation output typo, [#87][] (thanks [@gnarf][]) + +# 0.9.0 / 2015-08-31 +* [FEATURE] Option to time in ms with `statsd`, [#78][] (thanks [@g--][]) +* [FEATURE] Option to unmute `api` ApiError exceptions, [#76][] +* [OTHER] Use `simplejson` with Python 3.x, [#83][] + +# 0.8.0 / 2015-07-30 +* [FEATURE] Constant tags client option to append tags to every submitted metrics, [#68][] (thanks [@jofusa][]) +* [FEATURE] Embeddable graphs API, [#62][] +* [FEATURE] Optional metric name for the timed decorator, [#71][] (thanks [@clokep][]) +* [IMPROVEMENT] Option to use the verify parameter in requests to configure a ca certificates file or to disable verification, [#70][] (thanks [@ogst][]) + # 0.7.0 / 2015-07-01 -* [FEATURE] Revoke a shared screenboard, [#46][] -* [FEATURE] Add new monitor `unmute` arg (`all_scopes`) to allow clearing all mute settings for a given monitor, [#58][] -* [IMPROVEMENT] Add a timed context manager to `statsd`, [#65][] (thanks [@clokep][]) -* [IMPROVEMENT] Adjust dogshell descriptions to distinguish between `mute_all`/`unmute_all` and `mute`/`unmute` methods, [#58][] -* [IMPROVEMENT] Include additional information in 403 response exceptions, [#58][] * [BUGFIX] Fix `Metric.send` method to play nice with multiple metrics, [#59][] (thanks [@kuzmich][]) * [BUGFIX] Fix socket creation thread-unsafe code, [#57][] [#60][] (thanks [@GrahamDumpleton][]) * [BUGFIX] Rename `metric_type` parameter to `type` in `Metric.send` method, [#64][] +* [FEATURE] Add new monitor `unmute` arg (`all_scopes`) to allow clearing all mute settings for a given monitor, [#58][] +* [FEATURE] Revoke a shared screenboard, [#46][] +* [IMPROVEMENT] Add a timed context manager to `statsd`, [#65][] (thanks [@clokep][]) +* [IMPROVEMENT] Adjust Dogshell descriptions to distinguish between `mute_all`/`unmute_all` and `mute`/`unmute` methods, [#58][] +* [IMPROVEMENT] Include additional information in 403 response exceptions, [#58][] * [OTHER] Update `requests` library, per CVE-2015-2296, [#63][] -# 0.6.0 / 2015-06-01 -* [FEATURE] Add `message` parameter support to host muting commands, [#51][] -* [BUGFIX] Always fall back when unable to determine hostname from `datadog.conf`, [#53][] +# 0.6.1 // 2016.09.09 +* [BUGFIX] Fix socket creation thread-unsafe code, [#57][] [#60][] (thanks [@GrahamDumpleton][]) # 0.6.0 / 2015-06-01 -* [FEATURE] Add `message` parameter support to host muting commands, [#51][] * [BUGFIX] Always fall back when unable to determine hostname from `datadog.conf`, [#53][] +* [FEATURE] Add `message` parameter support to host muting commands, [#51][] # 0.5.0 / 2015-05-19 -* [FEATURE] Add support for metric query API, [#45][] * [BUGFIX] Fix an unexpected exception raised in `initialize` method on Windows with Python3.4, [#47][] +* [FEATURE] Add support for metric query API, [#45][] # 0.4.0 / 2015-04-24 -* [FEATURE] Initialize API parameters from environment variables, [#43][] -* [FEATURE] Add a priority option to Dogwrap, or auto-detect it based on output, [#42][] -* [FEATURE] Stream Dogwrap command output during its execution or buffer it, [#39][] * [BUGFIX] Fix a wrong event post parameter in Dogshell/Dogwrap, [#36][] * [BUGFIX] Fix wrong keys in auto-generated .dogrc, [#34][] +* [FEATURE] Add a priority option to Dogwrap, or auto-detect it based on output, [#42][] +* [FEATURE] Initialize API parameters from environment variables, [#43][] +* [FEATURE] Stream Dogwrap command output during its execution or buffer it, [#39][] * [OTHER] Add PyPI classifiers, [#41][] # 0.3.0 / 2015-04-08 @@ -39,15 +452,15 @@ CHANGELOG * [BUGFIX] Fix a leftover debug statement # 0.2.1 / REMOVED -* [BUGFIX] Import json module from `datadog.compat` * [BUGFIX] Fix test requirements +* [BUGFIX] Import json module from `datadog.compat` * [OTHER] Contributing update See [#8][], thanks [@benweatherman][] # 0.2.0 / 2015-03-31 -* [FEATURE] Add tests to check `statsd` and `threadstats` thread safety, [#6][] * [BUGFIX] Fixes `threadstats` unsafe thread operations, [#6][] +* [FEATURE] Add tests to check `statsd` and `threadstats` thread safety, [#6][] * [OTHER] Changelog update, [#9][] [@miketheman][] # 0.1.2 / 2015-03-23 @@ -78,11 +491,213 @@ See [#8][], thanks [@benweatherman][] [#58]: https://github.com/DataDog/datadogpy/issues/58 [#59]: https://github.com/DataDog/datadogpy/issues/59 [#60]: https://github.com/DataDog/datadogpy/issues/60 +[#62]: https://github.com/DataDog/datadogpy/issues/62 [#63]: https://github.com/DataDog/datadogpy/issues/63 [#64]: https://github.com/DataDog/datadogpy/issues/64 [#65]: https://github.com/DataDog/datadogpy/issues/65 +[#67]: https://github.com/DataDog/datadogpy/issues/67 +[#68]: https://github.com/DataDog/datadogpy/issues/68 +[#70]: https://github.com/DataDog/datadogpy/issues/70 +[#71]: https://github.com/DataDog/datadogpy/issues/71 +[#76]: https://github.com/DataDog/datadogpy/issues/76 +[#77]: https://github.com/DataDog/datadogpy/issues/77 +[#78]: https://github.com/DataDog/datadogpy/issues/78 +[#83]: https://github.com/DataDog/datadogpy/issues/83 +[#87]: https://github.com/DataDog/datadogpy/issues/87 +[#89]: https://github.com/DataDog/datadogpy/issues/89 +[#90]: https://github.com/DataDog/datadogpy/issues/90 +[#92]: https://github.com/DataDog/datadogpy/issues/92 +[#94]: https://github.com/DataDog/datadogpy/issues/94 +[#96]: https://github.com/DataDog/datadogpy/issues/96 +[#101]: https://github.com/DataDog/datadogpy/issues/101 +[#103]: https://github.com/DataDog/datadogpy/issues/103 +[#104]: https://github.com/DataDog/datadogpy/issues/104 +[#105]: https://github.com/DataDog/datadogpy/issues/105 +[#106]: https://github.com/DataDog/datadogpy/issues/106 +[#110]: https://github.com/DataDog/datadogpy/issues/110 +[#111]: https://github.com/DataDog/datadogpy/issues/111 +[#114]: https://github.com/DataDog/datadogpy/issues/114 +[#117]: https://github.com/DataDog/datadogpy/issues/117 +[#118]: https://github.com/DataDog/datadogpy/issues/118 +[#119]: https://github.com/DataDog/datadogpy/issues/119 +[#120]: https://github.com/DataDog/datadogpy/issues/120 +[#122]: https://github.com/DataDog/datadogpy/issues/122 +[#123]: https://github.com/DataDog/datadogpy/issues/123 +[#124]: https://github.com/DataDog/datadogpy/issues/124 +[#126]: https://github.com/DataDog/datadogpy/issues/126 +[#132]: https://github.com/DataDog/datadogpy/issues/132 +[#134]: https://github.com/DataDog/datadogpy/issues/134 +[#140]: https://github.com/DataDog/datadogpy/issues/140 +[#143]: https://github.com/DataDog/datadogpy/issues/143 +[#145]: https://github.com/DataDog/datadogpy/issues/145 +[#146]: https://github.com/DataDog/datadogpy/issues/146 +[#147]: https://github.com/DataDog/datadogpy/issues/147 +[#151]: https://github.com/DataDog/datadogpy/issues/151 +[#152]: https://github.com/DataDog/datadogpy/issues/152 +[#154]: https://github.com/DataDog/datadogpy/issues/154 +[#155]: https://github.com/DataDog/datadogpy/issues/155 +[#156]: https://github.com/DataDog/datadogpy/issues/156 +[#161]: https://github.com/DataDog/datadogpy/issues/161 +[#166]: https://github.com/DataDog/datadogpy/issues/166 +[#167]: https://github.com/DataDog/datadogpy/issues/167 +[#168]: https://github.com/DataDog/datadogpy/issues/168 +[#174]: https://github.com/DataDog/datadogpy/issues/174 +[#175]: https://github.com/DataDog/datadogpy/issues/175 +[#176]: https://github.com/DataDog/datadogpy/issues/176 +[#178]: https://github.com/DataDog/datadogpy/issues/178 +[#181]: https://github.com/DataDog/datadogpy/issues/181 +[#184]: https://github.com/DataDog/datadogpy/issues/184 +[#185]: https://github.com/DataDog/datadogpy/issues/185 +[#194]: https://github.com/DataDog/datadogpy/issues/194 +[#199]: https://github.com/DataDog/datadogpy/issues/199 +[#201]: https://github.com/DataDog/datadogpy/issues/201 +[#203]: https://github.com/DataDog/datadogpy/issues/203 +[#206]: https://github.com/DataDog/datadogpy/issues/206 +[#207]: https://github.com/DataDog/datadogpy/issues/207 +[#212]: https://github.com/DataDog/datadogpy/issues/212 +[#221]: https://github.com/DataDog/datadogpy/issues/221 +[#223]: https://github.com/DataDog/datadogpy/issues/223 +[#228]: https://github.com/DataDog/datadogpy/issues/228 +[#230]: https://github.com/DataDog/datadogpy/issues/230 +[#233]: https://github.com/DataDog/datadogpy/issues/233 +[#234]: https://github.com/DataDog/datadogpy/issues/234 +[#236]: https://github.com/DataDog/datadogpy/issues/236 +[#241]: https://github.com/DataDog/datadogpy/issues/241 +[#242]: https://github.com/DataDog/datadogpy/issues/242 +[#249]: https://github.com/DataDog/datadogpy/issues/249 +[#252]: https://github.com/DataDog/datadogpy/issues/252 +[#257]: https://github.com/DataDog/datadogpy/issues/257 +[#259]: https://github.com/DataDog/datadogpy/issues/259 +[#260]: https://github.com/DataDog/datadogpy/issues/260 +[#261]: https://github.com/DataDog/datadogpy/issues/261 +[#263]: https://github.com/DataDog/datadogpy/issues/263 +[#264]: https://github.com/DataDog/datadogpy/issues/264 +[#266]: https://github.com/DataDog/datadogpy/issues/266 +[#267]: https://github.com/DataDog/datadogpy/issues/267 +[#268]: https://github.com/DataDog/datadogpy/issues/268 +[#272]: https://github.com/DataDog/datadogpy/issues/272 +[#279]: https://github.com/DataDog/datadogpy/issues/279 +[#282]: https://github.com/DataDog/datadogpy/issues/282 +[#287]: https://github.com/DataDog/datadogpy/issues/287 +[#299]: https://github.com/DataDog/datadogpy/issues/299 +[#301]: https://github.com/DataDog/datadogpy/issues/301 +[#304]: https://github.com/DataDog/datadogpy/issues/304 +[#309]: https://github.com/DataDog/datadogpy/issues/309 +[#312]: https://github.com/DataDog/datadogpy/issues/312 +[#322]: https://github.com/DataDog/datadogpy/issues/322 +[#324]: https://github.com/DataDog/datadogpy/issues/324 +[#328]: https://github.com/DataDog/datadogpy/issues/328 +[#342]: https://github.com/DataDog/datadogpy/issues/342 +[#345]: https://github.com/DataDog/datadogpy/issues/345 +[#346]: https://github.com/DataDog/datadogpy/issues/346 +[#349]: https://github.com/DataDog/datadogpy/issues/349 +[#351]: https://github.com/DataDog/datadogpy/issues/351 +[#353]: https://github.com/DataDog/datadogpy/issues/353 +[#356]: https://github.com/DataDog/datadogpy/issues/356 +[#359]: https://github.com/DataDog/datadogpy/issues/359 +[#362]: https://github.com/DataDog/datadogpy/issues/362 +[#363]: https://github.com/DataDog/datadogpy/issues/363 +[#364]: https://github.com/DataDog/datadogpy/issues/364 +[#368]: https://github.com/DataDog/datadogpy/issues/368 +[#370]: https://github.com/DataDog/datadogpy/issues/370 +[#373]: https://github.com/DataDog/datadogpy/issues/373 +[#374]: https://github.com/DataDog/datadogpy/issues/374 +[#376]: https://github.com/DataDog/datadogpy/issues/376 +[#378]: https://github.com/DataDog/datadogpy/issues/378 +[#379]: https://github.com/DataDog/datadogpy/issues/379 +[#381]: https://github.com/DataDog/datadogpy/issues/381 +[#382]: https://github.com/DataDog/datadogpy/issues/382 +[#383]: https://github.com/DataDog/datadogpy/issues/383 +[#385]: https://github.com/DataDog/datadogpy/issues/385 +[#386]: https://github.com/DataDog/datadogpy/issues/386 +[#387]: https://github.com/DataDog/datadogpy/issues/387 +[#388]: https://github.com/DataDog/datadogpy/issues/388 +[#391]: https://github.com/DataDog/datadogpy/issues/391 +[#392]: https://github.com/DataDog/datadogpy/issues/392 +[#395]: https://github.com/DataDog/datadogpy/issues/395 +[#397]: https://github.com/DataDog/datadogpy/issues/397 +[#401]: https://github.com/DataDog/datadogpy/issues/401 +[#411]: https://github.com/DataDog/datadogpy/issues/411 +[#413]: https://github.com/DataDog/datadogpy/issues/413 +[#414]: https://github.com/DataDog/datadogpy/issues/414 +[#415]: https://github.com/DataDog/datadogpy/issues/415 +[#417]: https://github.com/DataDog/datadogpy/issues/417 +[#418]: https://github.com/DataDog/datadogpy/issues/418 +[#420]: https://github.com/DataDog/datadogpy/issues/420 +[#421]: https://github.com/DataDog/datadogpy/issues/421 +[#423]: https://github.com/DataDog/datadogpy/issues/423 +[#425]: https://github.com/DataDog/datadogpy/issues/425 +[#428]: https://github.com/DataDog/datadogpy/issues/428 +[#429]: https://github.com/DataDog/datadogpy/issues/429 +[#433]: https://github.com/DataDog/datadogpy/issues/433 +[#446]: https://github.com/DataDog/datadogpy/issues/446 +[#447]: https://github.com/DataDog/datadogpy/issues/447 +[#448]: https://github.com/DataDog/datadogpy/issues/448 +[#449]: https://github.com/DataDog/datadogpy/issues/449 +[#450]: https://github.com/DataDog/datadogpy/issues/450 +[#453]: https://github.com/DataDog/datadogpy/issues/453 +[#461]: https://github.com/DataDog/datadogpy/issues/461 +[#463]: https://github.com/DataDog/datadogpy/issues/463 +[#464]: https://github.com/DataDog/datadogpy/issues/464 +[#466]: https://github.com/DataDog/datadogpy/issues/466 +[#467]: https://github.com/DataDog/datadogpy/issues/467 +[#470]: https://github.com/DataDog/datadogpy/issues/470 +[#474]: https://github.com/DataDog/datadogpy/issues/474 +[#477]: https://github.com/DataDog/datadogpy/issues/477 +[#480]: https://github.com/DataDog/datadogpy/issues/480 +[#481]: https://github.com/DataDog/datadogpy/issues/481 +[#487]: https://github.com/DataDog/datadogpy/issues/487 +[#488]: https://github.com/DataDog/datadogpy/issues/488 +[@Alphadash]: https://github.com/Alphadash [@GrahamDumpleton]: https://github.com/GrahamDumpleton +[@Hefeweizen]: https://github.com/Hefeweizen +[@Matt343]: https://github.com/Matt343 +[@Tenzer]: https://github.com/Tenzer +[@TheKevJames]: https://github.com/TheKevJames +[@aknuds1]: https://github.com/aknuds1 +[@alexpjohnson]: https://github.com/alexpjohnson +[@aristiden7o]: https://github.com/aristiden7o [@benweatherman]: https://github.com/benweatherman +[@brendanlong]: https://github.com/brendanlong +[@cabouffard]: https://github.com/cabouffard [@clokep]: https://github.com/clokep +[@dcrosta]: https://github.com/dcrosta +[@dtao]: https://github.com/dtao +[@dotlambda]: https://github.com/dotlambda +[@drstevens]: https://github.com/drstevens +[@emad]: https://github.com/emad +[@evanj]: https://github.com/evanj +[@ewdurbin]: https://github.com/ewdurbin +[@fdhoff]: https://github.com/fdhoff +[@florean]: https://github.com/florean +[@g--]: https://github.com/g-- +[@glasnt]: https://github.com/glasnt +[@gnarf]: https://github.com/gnarf +[@gordlea]: https://github.com/gordlea +[@gplasky]: https://github.com/gplasky +[@jbain]: https://github.com/jbain +[@jmehnle]: https://github.com/jmehnle +[@jofusa]: https://github.com/jofusa [@kuzmich]: https://github.com/kuzmich -[@miketheman]: https://github.com/miketheman \ No newline at end of file +[@lceS2]: https://github.com/lceS2 +[@leozc]: https://github.com/leozc +[@marshallbrekka]: https://github.com/marshallbrekka +[@martin308]: https://github.com/martin308 +[@meawoppl]: https://github.com/meawoppl +[@mgood]: https://github.com/mgood +[@miketheman]: https://github.com/miketheman +[@nilabhsagar]: https://github.com/nilabhsagar +[@ningirsu]: https://github.com/ningirsu +[@ogst]: https://github.com/ogst +[@ojongerius]: https://github.com/ojongerius +[@ronindesign]: https://github.com/ronindesign +[@ross]: https://github.com/ross +[@sc68cal]: https://github.com/sc68cal +[@seiro-ogasawara]: https://github.com/seiro-ogasawara +[@shargan]: https://github.com/shargan +[@steven-liu]: https://github.com/steven-liu +[@taraslayshchuk]: https://github.com/taraslayshchuk +[@thehesiod]: https://github.com/thehesiod +[@timed]: https://github.com/timed +[@timvisher]: https://github.com/timvisher +[@tuukkamustonen]: https://github.com/tuukkamustonen diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index cb3119305..e4f40f9c9 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,20 +1,115 @@ -# Contributing +# How to contribute -We love pull requests. Here's a quick guide. +First of all, thanks for contributing! -Fork, then clone the repo: +This document provides some basic guidelines for contributing to this repository. To propose improvements, feel free to submit a PR. - git clone git@github.com:your-username/datadogpy.git +## Reporting a Bug - Requesting a feature - Github Issues -Make sure the tests pass: +* **Ensure the bug was not already reported** by searching on GitHub under [Issues][1]. +* If you're unable to find an open issue addressing the problem, [open a new one][2]. + - **Fill out the issue template completely**. Label the issue properly. + - Add `severity/` label. + - Add `documentation` label if this issue is related to documentation changes. +* If you have a feature request, it is encouraged to [contact support][3] so the request can be prioritized and properly tracked. +* **Do not open an issue if you have a question**, instead [contact support][3]. - python setup.py test +## Suggesting an enhancements - Pull Requests -Make your change. Add tests for your change. Make the tests pass again. +Have you fixed an issue? Many thanks! -Push to your fork and [submit a pull request][pr]. +Read the [development guide](/DEVELOPMENT.md) for more information on how to get started. -[pr]: https://github.com/your-username/datadogpy/compare/DataDog:master...master +In order to ease/speed up our review, here are some items you can check/improve when submitting your PR: +* **Ensure an [Issue has been created](#reporting)**. +* Avoid changing too many things at once. + - Make sure that your Pull Requests only fixes one Issue at the time. +* **Write tests** for the code you wrote. +* Make sure that **all tests pass locally**. +* Summarize your PR with a **meaningful title** and **fill out the pull request description template completely!** +* Add the most suitable changelog label choosing one of the following: + * `changelog/Added` for new features. + * `changelog/Changed` for changes in existing functionality. + * `changelog/Deprecated` for soon-to-be removed features. + * `changelog/Removed` for now removed features. + * `changelog/Fixed` for any bug fixes. + * `changelog/Security` in case of vulnerabilities. + * `changelog/no-changelog` in case this PR should not appear in the changelog at all. -At this point you're waiting on us. We may suggest some changes or -improvements or alternatives. +See [here][4] for more details about changelogs. + +Your pull request must pass all CI tests before we will merge it. If you're seeing +an error and don't think it's your fault, it may not be! [Join us on Slack][5] or send us an email, and together we'll +get it sorted out. + +### Keep it small, focused + +Avoid changing too many things at once. For instance if you're fixing two different +checks at once, it makes reviewing harder and the _time-to-release_ longer. + +### Pull Request title + +Unless the PR is marked with the proper exclusion label, the title will be used +to automatically fill the changelog entries. For this reason the title must be +concise but explanatory. + +### Commit Messages + +Please don't be this person: `git commit -m "Fixed stuff"`. Take a moment to +write meaningful commit messages. + +The commit message should describe the reason for the change and give extra details +that will allow someone later on to understand in 5 seconds the thing you've been +working on for a day. + +### Releasing + +The release procedure is managed by Datadog, instructions can be found in the [RELEASING](/RELEASING.md) document. + +## Asking a questions + +Need help? Contact [Datadog support][3] + +## Additional Notes + +### Issue and Pull Request Labels + +This section lists the labels we use to help us track and manage issues and pull requests. + +| Label name | Usage | Description +|-------------------------------|--------------------------|------------------------------------------------------------ +| `backward-incompatible` | Issues and Pull Requests | Warn for backward incompatible changes. +| `changelog/Added` | Pull Request Only | Added features results into a minor version bump. +| `changelog/Changed` | Pull Request Only | Changed features results into a major version bump. +| `changelog/Deprecated` | Pull Request Only | Deprecated features results into a major version bump. +| `changelog/Fixed` | Pull Request Only | Fixed features results into a bug fix version bump. +| `changelog/no-changelog` | Pull Request Only | Changes don't appear in changelog. +| `changelog/Removed` | Pull Request Only | Deprecated features results into a major version bump. +| `changelog/Security` | Pull Request Only | Fixed features results into a bug fix version bump. +| `community/help-wanted` | Issue Only | Community help wanted. +| `community` | Issues and Pull Requests | Community driven changes. +| `dev/testing` | Issues and Pull Requests | Tests related changes. +| `dev/tooling` | Issues and Pull Requests | Tooling related changes. +| `do-not-merge/HOLD` | Pull Request Only | Do not merge this PR. +| `do-not-merge/WIP` | Pull Request Only | Do not merge this PR. +| `documentation` | Issues and Pull Requests | Documentation related changes. +| `duplicate` | Issue Only | Duplicate issue. +| `invalid` | Issue Only | Invalid issue. +| `kind/bug` | Issue Only | Bug related issue. +| `kind/feature-request` | Issue Only | Feature request related issue. +| `severity/critical` | Issue Only | Critical severity issue. +| `severity/major` | Issue Only | Major severity issue. +| `severity/minor` | Issue Only | Minor severity issue. +| `severity/normal` | Issue Only | Normal severity issue. +| `stale` | Issues and Pull Requests | Stale - Bot reminder. +| `stale/exempt` | Issues and Pull Requests | Exempt from being marked as stale. +| `resource/api` | Issues and Pull Requests | API Client related issue or changes. +| `resource/dogshell` | Issues and Pull Requests | Dogshell related issue or changes. +| `resource/dogstatsd` | Issues and Pull Requests | DogStatsD related issue or changes. +| `resource/threadstats` | Issues and Pull Requests | Threadstats related issue or changes. + +[1]: https://github.com/DataDog/datadogpy/issues +[2]: https://github.com/DataDog/datadogpy/issues/new +[3]: https://docs.datadoghq.com/help +[4]: https://keepachangelog.com/en/1.0.0 +[5]: https://datadoghq.slack.com diff --git a/DEVELOPMENT.md b/DEVELOPMENT.md new file mode 100644 index 000000000..48af01653 --- /dev/null +++ b/DEVELOPMENT.md @@ -0,0 +1,206 @@ +# Development + +## Basics + +We love pull requests. Here's a quick guide. + +Fork, then clone the repo: + + git clone git@github.com:your-username/datadogpy.git + + +## Adding new API endpoints +This section outlines the process for adding a new endpoint to this API client. + +Let's use the example of creating an endpoint for `Hosts`. This example endpoint accepts either a GET or DELETE request at the `/hosts` endpoint as well as a GET request at the `hosts/totals` endpoint. + +**NOTE:** This endpoint is just an example and doesn't describe the existing `hosts` resource. + +Start by adding a new file `hosts.py` to the `datadog/api` folder for the new endpoint. Use the following simple class structure: + +``` +from datadog.api.resources import ( + GetableAPIResource, + DeletableAPIResource +) + +class Hosts(GetableAPIResource, DeletableAPIResource): + """ + A wrapper around Hosts HTTP API. + """ + _resource_name = 'hosts' +``` + +Each class has the above simple structure, most importantly the following two pieces: + +* A `_resource_name` - Indicates the URI of the api. +* A set of classes to inherit from. This is where the get/post/put/delete request code is defined for you. Available options are: + +| Class Name | Description | +| --------------------- | ----------------------------------------------------------------------------------------------- | +| CreateableAPIResource | Wrapper class for providing a `POST` request for your class, implementing a `create` method. | +| SendableAPIResource | Fork of CreateableAPIResource class with a `send` method. | +| UpdatableAPIResource | Wrapper class for providing a `PUT` request for your class, implementing an `update` method. | +| DeletableAPIResource | Wrapper class for providing a `DELETE` request for your class, implementing an `delete` method. | +| GetableAPIResource | Wrapper class for providing a `GET` request for your class, implementing an `get` method. | +| ListableAPIResource | Wrapper class for providing a `GET` request for your class, implementing an `get_all` method. | +| SearchableAPIResource | Fork of ListableAPIResource class with a `_search` method. | +| ActionAPIResource | Generic wrapper to trigger any type of HTTP request. | + +More information about the available classes to inherit from can be found in the [`resources.py`](https://github.com/DataDog/datadogpy/blob/master/datadog/api/resources.py) file. + +Looking back at the class above: + +* The URI this class can access is defined: `hosts`. +* The `delete` and `get` methods can be called by inheriting `GetableAPIResource` and `DeletableAPIResource`. + +The remaining piece is to add support for the `GET` request to the `hosts/totals` URI. To do this, update your code to include: + +``` +from datadog.api.resources import ( + GetableAPIResource, + DeletableAPIResource, + ActionAPIResource +) + +class Hosts(GetableAPIResource, DeletableAPIResource, ActionAPIResource): + """ + A wrapper around Hosts HTTP API. + """ + _resource_name = 'hosts' + @classmethod + def totals(cls): + """ + Get total number of hosts active and up. + + :returns: Dictionary representing the API's JSON response + """ + return super(Hosts, cls)._trigger_class_action('GET', 'totals') +``` + +Notice the addition of the new inherited class `ActionAPIResource`, and the new function `totals`. This new `totals` function calls the `_trigger_class_action` method from that class and appends `totals` to our URI, making the full path: `baseAPI/hosts/totals`. + +Now you can use your new SDK and call the following methods with various params and request bodies: +* `Hosts.totals()` +* `Hosts.get()` +* `Hosts.delete()` + +### Tests + +This project contains: +- [Datadog API Client](/datadog/api) +- [Dogshell](/datadog/dogshell) +- [DogStatsD](/datadog/dogstatsd) +- [Threadstats](/datadog/threadstats) + + +We have [unit](/tests/unit), [integration](/tests/integration) and [performance](/tests/performance) tests. +Integration tests need an _API_ and _APP Keys_ to run against a Datadog account. +- __WARNING__: Never use keys for an organization that contains anything important. + +We use `tox` to run tests. You can find the [tox.ini](/tox.ini) config in the root directory. +We create 2 environments: +- Default environments: they will run all Unit, Performance and Integration tests not marked as `admin_needed`. + - Execute this with the `tox` command. +- The explicit `integration-admin` environment: It will only run integration tests marked with the `admin_needed` marker. + - Tests marked as `admin-needed` need an API and APP Key with admin permissions. + - __!!!WARNING!!!__ These tests will use these keys to do destructive changes on your Datadog account. + - __Never use keys for an organization that contains anything important!__. + +#### Setup Integration Tests + +To setup integration tests you will need to export the following environment variables. + +``` +# !!!WARNING!!! The integration tests will use these keys to do destructive changes. +# Never use keys for an organization that contains anything important. +export DD_TEST_CLIENT_API_KEY= +export DD_TEST_CLIENT_APP_KEY= +export DD_TEST_CLIENT_USER= +``` + +#### Run tests + +By default, when invoking `tox`, [unit](#unit-tests), [style](#style-checks) and [integration](#integration-tests) that don't require admin credentials will all be run. + +##### Unit tests + +Unit tests are run with all the `pyXX` environments. + +For example, run the unit tests with Python 3.7 with: +``` +tox -e py37 +``` + +##### Style checks + +Run flake8 validation with: +``` +tox -e flake8 +``` + +##### Integration tests + +Integration tests run against an actual Datadog account. You need to [provide credentials](#setup-integration-tests) for them to run. +For this reason, it is **highly recommended** to avoid providing credentials for a production account. + +There are two kinds of integration tests: + - [Regular integration tests](#regular-integration-tests) + - [Admin integration tests](#admin-integration-tests) + +###### Regular integration tests + +Regular integration tests are tests that can work with credentials from a standard Datadog user, without admin privileges. +They will create resources in Datadog such as dashboards or monitors, and clean up after themselves. + +Run them with +``` +tox -e integration +``` + +###### Admin integration tests + +Admin integration tests are tests that either need admin privileges to run (e.g. manage users) or can destructive changes to your org (e.g. muting/unmuting of all monitors). +They are not run by default when invoking `tox`, you have to run them explicitly with: +``` +tox -e integration-admin +``` + +##### Run specific tests methods/classes/folders + +`tox` invokes `pytest` to run the tests. You can pass `pytest` arguments in the `tox` command line for further filtering of the tests you want to run. + +For example, to exclude all integrations tests using the `--ignore-glob` argument. + +``` +tox -- --ignore-glob=tests/integration/* +``` + +Another example below shows how to run test classes or test methods matching a given string by using the `-k` argument from `pytest`. +With this command, only run classes and methods matching `dogstatsd` are run. + +``` +tox -- -k dogstatsd +``` + +To run the entire `dogstatsd` folder, use: + +``` +tox -- tests/unit/dogstatsd +``` + +## Submit Your Changes + +Make your change. Add tests for your change. Make the tests pass again. + +You can also install this project locally in editable mode to make changes and run any manual tests. +This can be done by installing using the following pip command: + +``` +pip install -e +``` + +Push to your fork and submit a [pull request](/CONTRIBUTING.md). + +At this point you're waiting on us. We may suggest some changes or +improvements or alternatives. diff --git a/LICENSE b/LICENSE index 1ee705f76..984d5d257 100644 --- a/LICENSE +++ b/LICENSE @@ -1,24 +1,26 @@ -Copyright (c) 2015, Datadog -All rights reserved. +Copyright (c) 2015-Present Datadog, Inc Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Datadog nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/LICENSE-3rdparty.csv b/LICENSE-3rdparty.csv new file mode 100644 index 000000000..3afd934d5 --- /dev/null +++ b/LICENSE-3rdparty.csv @@ -0,0 +1,4 @@ +Component,Origin,License,Copyright +setup.py,decorator,BSD-2-Clause,Copyright (c) 2005-2018, Michele Simionato +setup.py,requests,Apache-2.0,Copyright 2019 Kenneth Reitz +setup.py,argparse,Python-2.0,2006-2009 Steven J. Bethard diff --git a/README.md b/README.md index 4023ebf02..2bb40165b 100644 --- a/README.md +++ b/README.md @@ -1,17 +1,21 @@ -The Datadog Python library -=========================== -[![Build Status](https://travis-ci.org/DataDog/datadogpy.svg?branch=master)](https://travis-ci.org/DataDog/datadogpy) +# The Datadog Python library -Datadogpy is a collection of tools suitable for inclusion in existing Python projects or for development of standalone scripts. It provides an abstraction on top of Datadog's raw HTTP interface and agent's StatsD metrics aggregation server, to interact with Datadog and efficiently report events and metrics. +[![Unit Tests](https://dev.azure.com/datadoghq/datadogpy/_apis/build/status/DataDog.datadogpy.unit?branchName=master)](https://dev.azure.com/datadoghq/datadogpy/_build/latest?definitionId=10&branchName=master) +[![Integration Tests](https://dev.azure.com/datadoghq/datadogpy/_apis/build/status/DataDog.datadogpy.integration?branchName=master)](https://dev.azure.com/datadoghq/datadogpy/_build/latest?definitionId=13&branchName=master) +[![Documentation Status](https://readthedocs.org/projects/datadogpy/badge/?version=latest)](https://readthedocs.org/projects/datadogpy/?badge=latest) +[![PyPI - Version](https://img.shields.io/pypi/v/datadog.svg)](https://pypi.org/project/datadog) +[![PyPI - Downloads](https://pepy.tech/badge/datadog)](https://pepy.tech/project/datadog) -- Library Documentation: http://datadogpy.readthedocs.org/en/latest/ -- HTTP API Documentation: http://docs.datadoghq.com/api/ -- DatadogHQ: http://datadoghq.com +The Datadog Python Library is a collection of tools suitable for inclusion in existing Python projects or for the development of standalone scripts. It provides an abstraction on top of Datadog's raw HTTP interface and the Agent's DogStatsD metrics aggregation server, to interact with Datadog and efficiently report events and metrics. -See [CHANGELOG.md](CHANGELOG.md) for changes. +- Library Documentation: https://datadogpy.readthedocs.io/en/latest/ +- HTTP API Documentation: https://docs.datadoghq.com/api/ +- DatadogHQ: https://datadoghq.com + +See [CHANGELOG.md](https://github.com/DataDog/datadogpy/blob/master/CHANGELOG.md) for changes. + +## Installation -Installation ------------- To install from pip: pip install datadog @@ -20,44 +24,179 @@ To install from source: python setup.py install +## Datadog API -Quick Start Guide ------------------ -``` python -# Configure the module according to your needs -from datadog import initialize +To support all Datadog HTTP APIs, a generated library is +available which will expose all the endpoints: +[datadog-api-client-python](https://github.com/DataDog/datadog-api-client-python). + +Find below a working example for submitting an event to your Event Stream: + +```python +from datadog import initialize, api options = { - 'api_key':'api_key', - 'app_key':'app_key' + "api_key": "", + "app_key": "", } initialize(**options) -# Use Datadog REST API client -from datadog import api +title = "Something big happened!" +text = "And let me tell you all about it here!" +tags = ["version:1", "application:web"] + +api.Event.create(title=title, text=text, tags=tags) +``` + +**Consult the full list of supported Datadog API endpoints with working code examples in [the Datadog API documentation](https://docs.datadoghq.com/api/latest/?code-lang=python).** + +**Note**: The full list of available Datadog API endpoints is also available in the [Datadog Python Library documentation](https://datadogpy.readthedocs.io/en/latest/) + +#### Environment Variables + +As an alternate method to using the `initialize` function with the `options` parameters, set the environment variables `DATADOG_API_KEY` and `DATADOG_APP_KEY` within the context of your application. + +If `DATADOG_API_KEY` or `DATADOG_APP_KEY` are not set, the library attempts to fall back to Datadog's APM environment variable prefixes: `DD_API_KEY` and `DD_APP_KEY`. + +```python +from datadog import initialize, api + +# Assuming you've set `DD_API_KEY` and `DD_APP_KEY` in your env, +# initialize() will pick it up automatically +initialize() title = "Something big happened!" -text = 'And let me tell you all about it here!' -tags = ['version:1', 'application:web'] +text = "And let me tell you all about it here!" +tags = ["version:1", "application:web"] api.Event.create(title=title, text=text, tags=tags) +``` + +In development, you can disable any `statsd` metric collection using `DD_DOGSTATSD_DISABLE=True` (or any not-empty value). + +## DogStatsD + +In order to use DogStatsD metrics, the Agent must be [running and available](https://docs.datadoghq.com/developers/dogstatsd/?code-lang=python). + +### Instantiate the DogStatsD client with UDP + +Once the Datadog Python Library is installed, instantiate the StatsD client using UDP in your code: + +```python +from datadog import initialize, statsd + +options = { + "statsd_host": "127.0.0.1", + "statsd_port": 8125, +} + +initialize(**options) +``` + +See the full list of available [DogStatsD client instantiation parameters](https://docs.datadoghq.com/developers/dogstatsd/?code-lang=python#client-instantiation-parameters). + +#### Instantiate the DogStatsd client with UDS + +Once the Datadog Python Library is installed, instantiate the StatsD client using UDS in your code: +```python + +from datadog import initialize, statsd + +options = { + "statsd_socket_path": PATH_TO_SOCKET, +} + +initialize(**options) +``` + +#### Origin detection over UDP and UDS + +Origin detection is a method to detect which pod `DogStatsD` packets are coming from in order to add the pod's tags to the tag list. +The `DogStatsD` client attaches an internal tag, `entity_id`. The value of this tag is the content of the `DD_ENTITY_ID` environment variable if found, which is the pod's UID. The Datadog Agent uses this tag to add container tags to the metrics. To avoid overwriting this global tag, make sure to only `append` to the `constant_tags` list. + +To enable origin detection over UDP, add the following lines to your application manifest +```yaml +env: + - name: DD_ENTITY_ID + valueFrom: + fieldRef: + fieldPath: metadata.uid +``` + +### Usage +#### Metrics + +After the client is created, you can start sending custom metrics to Datadog. See the dedicated [Metric Submission: DogStatsD documentation](https://docs.datadoghq.com/metrics/dogstatsd_metrics_submission/?code-lang=python) to see how to submit all supported metric types to Datadog with working code examples: + +* [Submit a COUNT metric](https://docs.datadoghq.com/metrics/dogstatsd_metrics_submission/?code-lang=python#count). +* [Submit a GAUGE metric](https://docs.datadoghq.com/metrics/dogstatsd_metrics_submission/?code-lang=python#gauge). +* [Submit a SET metric](https://docs.datadoghq.com/metrics/dogstatsd_metrics_submission/?code-lang=python#set) +* [Submit a HISTOGRAM metric](https://docs.datadoghq.com/metrics/dogstatsd_metrics_submission/?code-lang=python#histogram) +* [Submit a TIMER metric](https://docs.datadoghq.com/metrics/dogstatsd_metrics_submission/?code-lang=python#timer) +* [Submit a DISTRIBUTION metric](https://docs.datadoghq.com/metrics/dogstatsd_metrics_submission/?code-lang=python#distribution) + +Some options are supported when submitting metrics, like [applying a Sample Rate to your metrics](https://docs.datadoghq.com/metrics/dogstatsd_metrics_submission/?code-lang=python#metric-submission-options) or [tagging your metrics with your custom tags](https://docs.datadoghq.com/metrics/dogstatsd_metrics_submission/?code-lang=python#metric-tagging). + +#### Events + +After the client is created, you can start sending events to your Datadog Event Stream. See the dedicated [Event Submission: DogStatsD documentation](https://docs.datadoghq.com/events/guides/dogstatsd/?code-lang=python) to see how to submit an event to your Datadog Event Stream. + +#### Service Checks + +After the client is created, you can start sending Service Checks to Datadog. See the dedicated [Service Check Submission: DogStatsD documentation](https://docs.datadoghq.com/developers/service_checks/dogstatsd_service_checks_submission/?code-lang=python) to see how to submit a Service Check to Datadog. +### Monitoring this client -# Use Statsd, a Python client for DogStatsd -from datadog import statsd +This client automatically injects telemetry about itself in the DogStatsD stream. +Those metrics will not be counted as custom and will not be billed. This feature can be disabled using the `statsd.disable_telemetry()` method. -statsd.increment('whatever') -statsd.gauge('foo', 42) +See [Telemetry documentation](https://docs.datadoghq.com/developers/dogstatsd/high_throughput/?code-lang=python#client-side-telemetry) to learn more about it. -# Or ThreadStats, an alternative tool to collect and flush metrics, using Datadog REST API -from datadog import ThreadStats -stats = ThreadStats() -stats.start() -stats.increment('home.page.hits') +### Benchmarks +_Note: You will need to install `psutil` package before running the benchmarks._ + +If you would like to get an approximate idea on the throughput that your DogStatsD library +can handle on your system, you can run the included local benchmark code: + +```sh-session +$ # Python 2 Example +$ python2 -m unittest -vvv tests.performance.test_statsd_throughput + +$ # Python 3 Example +$ python3 -m unittest -vvv tests.performance.test_statsd_throughput ``` -Threadsafety ------------- -`DogStatsd` and `ThreadStats` are threadsafe. +You can also add set `BENCHMARK_*` to customize the runs: +```sh-session +$ # Example #1 +$ BENCHMARK_NUM_RUNS=10 BENCHMARK_NUM_THREADS=1 BENCHMARK_NUM_DATAPOINTS=5000 BENCHMARK_TRANSPORT="UDP" python2 -m unittest -vvv tests.performance.test_statsd_throughput + +$ # Example #2 +$ BENCHMARK_NUM_THREADS=10 BENCHMARK_TRANSPORT="UDS" python3 -m unittest -vvv tests.performance.test_statsd_throughput +``` + +## Maximum packets size in high-throughput scenarios + +In order to have the most efficient use of this library in high-throughput scenarios, +default values for the maximum packets size have already been set for both UDS (8192 bytes) +and UDP (1432 bytes) in order to have the best usage of the underlying network. +However, if you perfectly know your network and you know that a different value for the maximum packets +size should be used, you can set it with the parameter `max_buffer_len`. Example: + +```python +from datadog import initialize + +options = { + "api_key": "", + "app_key": "", + "max_buffer_len": 4096, +} + +initialize(**options) +``` + +## Thread Safety + +`DogStatsD` and `ThreadStats` are thread-safe. diff --git a/RELEASING.md b/RELEASING.md new file mode 100644 index 000000000..f3e6e16b1 --- /dev/null +++ b/RELEASING.md @@ -0,0 +1,38 @@ +# Releasing +This document summarizes the process of doing a new release of this project. +Release can only be performed by Datadog maintainers of this repository. + +## Schedule +This project does not have a strict release schedule. However, we would make a release at least every 2 months. + - No release will be done if no changes got merged to the `master` branch during the above mentioned window. + - Releases may be done more frequently than the above mentioned window. + +## Make Sure Everything Works +* Check and upgrade dependencies where it applies and makes sense. [Example](https://github.com/DataDog/datadogpy/commit/f81efe8cbf6e5bc5cb4ab46da750248161d0c548#diff-2eeaed663bd0d25b7e608891384b7298) + - Create a distinct pull request and test your changes since it may introduce regressions. + - While using the latest versions of dependencies is advised, it may not always be possible due to potential compatibility issues. + - Upgraded dependencies should be thoroughly considered and tested to ensure they are safe! +* Make sure tests are passing. + - Locally and in the continuous integration system. +* Make sure documentation is up-to-date and building correctly. +* Build the package locally (e.g. `python3 setup.py sdist`), install it into a fresh virtualenv and test the changes that have been made since the last release. + +## Release Process +Our team will trigger the release pipeline. + +### Prerequisite +- Install [datadog_checks_dev](https://datadoghq.dev/integrations-core/setup/#ddev) using Python 3. +- Setup PyPI, see the internal documentation for more details + +### Update Changelog and version + +1. See changes ready for release by running `ddev release show changes .` at the root of this project. Add any missing labels to PRs if needed. +1. Run `ddev release changelog . ` to update the `CHANGELOG.md` file at the root of this repository +1. Commit the changes to the repository in a release branch and open a PR. Do not merge yet. +1. Bump the version in [`datadog/version.py`](datadog/version.py) and push it to your changelog PR. [Example](https://github.com/DataDog/datadogpy/pull/495/files#diff-2eeaed663bd0d25b7e608891384b7298) +1. Merge the PR to master. + +### Release +1. Create the release on GitHub. [Example](https://github.com/DataDog/datadogpy/releases/tag/0.40.0) +1. A github action will kick off that builds and publishes this tag to PyPI. Confirm the [release is available](https://pypi.org/project/datadog/#history) +1. Bump the version again in `datadog/version.py` to a dev version (e.g. `0.34.0` -> `0.34.1.dev`), open a PR and merge it to master. \ No newline at end of file diff --git a/Rakefile.rb b/Rakefile.rb index 9a5063159..242fcf2ac 100644 --- a/Rakefile.rb +++ b/Rakefile.rb @@ -18,5 +18,5 @@ task :release do - sh "python setup.py sdist upload" + sh "python setup.py sdist bdist_wheel --universal upload" end diff --git a/SUPPORT.md b/SUPPORT.md new file mode 100644 index 000000000..7524bc137 --- /dev/null +++ b/SUPPORT.md @@ -0,0 +1,10 @@ +# Support + +The issue queue we have here on GitHub is primarily intended for tracking features, +bugs and work items associated with this datadog open source project. + +For any other support request, please reach out through one of the following: + + * Contact our [support](https://docs.datadoghq.com/help/) + * Join us [on Slack](http://datadoghq.slack.com) + \ No newline at end of file diff --git a/datadog/__init__.py b/datadog/__init__.py index 3deeedc03..dec93813a 100644 --- a/datadog/__init__.py +++ b/datadog/__init__.py @@ -1,3 +1,6 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc """ Datadogpy is a collection of Datadog Python tools. It contains: @@ -7,32 +10,45 @@ without hindering performance. * datadog.dogshell: a command-line tool, wrapping datadog.api, to interact with Datadog REST API. """ -from pkg_resources import get_distribution, DistributionNotFound +# stdlib +import logging import os import os.path +from typing import Any, List, Optional +# datadog from datadog import api -from datadog.dogstatsd import statsd -from datadog.threadstats import ThreadStats # noqa +from datadog.dogstatsd import DogStatsd, statsd # noqa +from datadog.threadstats import ThreadStats, datadog_lambda_wrapper, lambda_metric # noqa +from datadog.util.compat import iteritems, NullHandler, text from datadog.util.hostname import get_hostname +from datadog.version import __version__ # noqa +# Loggers +logging.getLogger("datadog.api").addHandler(NullHandler()) +logging.getLogger("datadog.dogstatsd").addHandler(NullHandler()) +logging.getLogger("datadog.threadstats").addHandler(NullHandler()) -try: - _dist = get_distribution("datadog") - # Normalize case for Windows systems - dist_loc = os.path.normcase(_dist.location) - here = os.path.normcase(__file__) - if not here.startswith(os.path.join(dist_loc, __name__)): - # not installed, but there is another version that *is*e - raise DistributionNotFound -except DistributionNotFound: - __version__ = 'Please install datadog with setup.py' -else: - __version__ = _dist.version - -def initialize(api_key=None, app_key=None, host_name=None, api_host=None, - proxies=None, statsd_host=None, statsd_port=None): +def initialize( + api_key=None, # type: Optional[str] + app_key=None, # type: Optional[str] + host_name=None, # type: Optional[str] + api_host=None, # type: Optional[str] + statsd_host=None, # type: Optional[str] + statsd_port=None, # type: Optional[int] + statsd_disable_aggregation=True, # type: bool + statsd_disable_buffering=True, # type: bool + statsd_aggregation_flush_interval=0.3, # type: float + statsd_use_default_route=False, # type: bool + statsd_socket_path=None, # type: Optional[str] + statsd_namespace=None, # type: Optional[str] + statsd_constant_tags=None, # type: Optional[List[str]] + return_raw_response=False, # type: bool + hostname_from_config=True, # type: bool + **kwargs # type: Any +): + # type: (...) -> None """ Initialize and configure Datadog.api and Datadog.statsd modules @@ -42,7 +58,11 @@ def initialize(api_key=None, app_key=None, host_name=None, api_host=None, :param app_key: Datadog application key :type app_key: string - :param proxies: Proxy to use to connect to Datadog API + :param host_name: Set a specific hostname + :type host_name: string + + :param proxies: Proxy to use to connect to Datadog API; + for example, 'proxies': {'http': "http::@:/"} :type proxies: dictionary mapping protocol to the URL of the proxy. :param api_host: Datadog API endpoint @@ -53,16 +73,80 @@ def initialize(api_key=None, app_key=None, host_name=None, api_host=None, :param statsd_port: Port of DogStatsd server or statsd daemon :type statsd_port: port + + :param statsd_disable_buffering: Enable/disable statsd client buffering support + (default: True). + :type statsd_disable_buffering: boolean + + :param statsd_disable_aggregation: Enable/disable statsd client aggregation support + (default: True). + :type statsd_disable_aggregation: boolean + + :param statsd_aggregation_flush_interval: If aggregation is enabled, set the flush interval for + aggregation/buffering + (default: 0.3 seconds) + :type statsd_aggregation_flush_interval: float + + :param statsd_use_default_route: Dynamically set the statsd host to the default route + (Useful when running the client in a container) + :type statsd_use_default_route: boolean + + :param statsd_socket_path: path to the DogStatsd UNIX socket. Supersedes statsd_host + and stats_port if provided. + + :param statsd_constant_tags: A list of tags to be applied to all metrics ("tag", "tag:value") + :type statsd_constant_tags: list of string + + :param cacert: Path to local certificate file used to verify SSL \ + certificates. Can also be set to True (default) to use the systems \ + certificate store, or False to skip SSL verification + :type cacert: path or boolean + + :param mute: Mute any ApiError or ClientError before they escape \ + from datadog.api.HTTPClient (default: True). + :type mute: boolean + + :param return_raw_response: Whether or not to return the raw response object in addition \ + to the decoded response content (default: False) + :type return_raw_response: boolean + + :param hostname_from_config: Set the hostname from the Datadog agent config (agent 5). Will be deprecated + :type hostname_from_config: boolean """ - # Configure api - api._api_key = api_key if api_key is not None else os.environ.get('DATADOG_API_KEY') - api._application_key = app_key if app_key is not None else os.environ.get('DATADOG_APP_KEY') - api._host_name = host_name if host_name is not None else get_hostname() - api._api_host = api_host if api_host is not None else \ - os.environ.get('DATADOG_HOST', 'https://app.datadoghq.com') - api._proxies = proxies - - # Given statsd_host and statsd_port, overrides statsd instance - if statsd_host and statsd_port: - statsd.host = statsd_host - statsd.port = int(statsd_port) + # API configuration + api._api_key = api_key or api._api_key or os.environ.get("DATADOG_API_KEY", os.environ.get("DD_API_KEY")) + api._application_key = ( + app_key or api._application_key or os.environ.get("DATADOG_APP_KEY", os.environ.get("DD_APP_KEY")) + ) + api._hostname_from_config = hostname_from_config + api._host_name = host_name or api._host_name or get_hostname(hostname_from_config) + api._api_host = api_host or api._api_host or os.environ.get("DATADOG_HOST", "https://api.datadoghq.com") + + # Statsd configuration + # ...overrides the default `statsd` instance attributes + if statsd_socket_path: + statsd.socket_path = statsd_socket_path + statsd.host = None + statsd.port = None + else: + if statsd_host or statsd_use_default_route: + statsd.host = statsd.resolve_host(statsd_host, statsd_use_default_route) + if statsd_port: + statsd.port = int(statsd_port) + statsd.close_socket() + if statsd_namespace: + statsd.namespace = text(statsd_namespace) + if statsd_constant_tags: + statsd.constant_tags += statsd_constant_tags + + if statsd_disable_aggregation: + statsd.disable_aggregation() + else: + statsd.enable_aggregation(statsd_aggregation_flush_interval) + statsd.disable_buffering = statsd_disable_buffering + api._return_raw_response = return_raw_response + + # HTTP client and API options + for key, value in iteritems(kwargs): + attribute = "_{}".format(key) + setattr(api, attribute, value) diff --git a/datadog/api/__init__.py b/datadog/api/__init__.py index d09b174a4..eb477c97d 100644 --- a/datadog/api/__init__.py +++ b/datadog/api/__init__.py @@ -1,31 +1,52 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc # flake8: noqa +from typing import Optional + # API settings -_api_key = None -_application_key = None -_api_version = 'v1' -_api_host = None -_host_name = None +_api_key = None # type: Optional[str] +_application_key = None # type: Optional[str] +_api_version = "v1" +_api_host = None # type: Optional[str] +_host_name = None # type: Optional[str] +_hostname_from_config = True +_cacert = True # HTTP(S) settings _proxies = None -_timeout = 3 +_timeout = 60 _max_timeouts = 3 _max_retries = 3 _backoff_period = 300 -_swallow = True +_mute = True +_return_raw_response = False # Resources from datadog.api.comments import Comment +from datadog.api.dashboard_lists import DashboardList +from datadog.api.distributions import Distribution from datadog.api.downtimes import Downtime from datadog.api.timeboards import Timeboard +from datadog.api.dashboards import Dashboard from datadog.api.events import Event from datadog.api.infrastructure import Infrastructure +from datadog.api.metadata import Metadata from datadog.api.metrics import Metric from datadog.api.monitors import Monitor from datadog.api.screenboards import Screenboard -from datadog.api.graphs import Graph -from datadog.api.hosts import Host +from datadog.api.graphs import Graph, Embed +from datadog.api.hosts import Host, Hosts from datadog.api.service_checks import ServiceCheck from datadog.api.tags import Tag from datadog.api.users import User +from datadog.api.aws_integration import AwsIntegration +from datadog.api.aws_log_integration import AwsLogsIntegration +from datadog.api.azure_integration import AzureIntegration +from datadog.api.gcp_integration import GcpIntegration +from datadog.api.roles import Roles +from datadog.api.permissions import Permissions +from datadog.api.service_level_objectives import ServiceLevelObjective +from datadog.api.synthetics import Synthetics +from datadog.api.logs import Logs diff --git a/datadog/api/api_client.py b/datadog/api/api_client.py new file mode 100644 index 000000000..db34873bf --- /dev/null +++ b/datadog/api/api_client.py @@ -0,0 +1,290 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# stdlib +import json +import logging +import time +import zlib + +# datadog +from datadog.api import _api_version, _max_timeouts, _backoff_period +from datadog.api.exceptions import ClientError, ApiError, HttpBackoff, HttpTimeout, ApiNotInitialized +from datadog.api.http_client import resolve_http_client +from datadog.util.compat import is_p3k +from datadog.util.format import construct_url, normalize_tags + + +log = logging.getLogger("datadog.api") + + +class APIClient(object): + """ + Datadog API client: format and submit API calls to Datadog. + Embeds a HTTP client. + """ + + # HTTP transport parameters + _backoff_period = _backoff_period + _max_timeouts = _max_timeouts + _backoff_timestamp = None + _timeout_counter = 0 + _sort_keys = False + + # Plugged HTTP client + _http_client = None + + @classmethod + def _get_http_client(cls): + """ + Getter for the embedded HTTP client. + """ + if not cls._http_client: + cls._http_client = resolve_http_client() + + return cls._http_client + + @classmethod + def submit( + cls, + method, + path, + api_version=None, + body=None, + attach_host_name=False, + response_formatter=None, + error_formatter=None, + suppress_response_errors_on_codes=None, + compress_payload=False, + **params + ): + """ + Make an HTTP API request + + :param method: HTTP method to use to contact API endpoint + :type method: HTTP method string + + :param path: API endpoint url + :type path: url + + :param api_version: The API version used + + :param body: dictionary to be sent in the body of the request + :type body: dictionary + + :param response_formatter: function to format JSON response from HTTP API request + :type response_formatter: JSON input function + + :param error_formatter: function to format JSON error response from HTTP API request + :type error_formatter: JSON input function + + :param attach_host_name: link the new resource object to the host name + :type attach_host_name: bool + + :param suppress_response_errors_on_codes: suppress ApiError on `errors` key in the response for the given HTTP + status codes + :type suppress_response_errors_on_codes: None|list(int) + + :param compress_payload: compress the payload using zlib + :type compress_payload: bool + + :param params: dictionary to be sent in the query string of the request + :type params: dictionary + + :returns: JSON or formatted response from HTTP API request + """ + try: + # Check if it's ok to submit + if not cls._should_submit(): + _, backoff_time_left = cls._backoff_status() + raise HttpBackoff(backoff_time_left) + + # Import API, User and HTTP settings + from datadog.api import ( + _api_key, + _application_key, + _api_host, + _mute, + _host_name, + _proxies, + _max_retries, + _timeout, + _cacert, + _return_raw_response, + ) + + # Check keys and add then to params + if _api_key is None: + raise ApiNotInitialized("API key is not set." " Please run 'initialize' method first.") + + # Set api and app keys in headers + headers = {} + headers["DD-API-KEY"] = _api_key + if _application_key: + headers["DD-APPLICATION-KEY"] = _application_key + + # Check if the api_version is provided + if not api_version: + api_version = _api_version + + # Attach host name to body + if attach_host_name and body: + # Is it a 'series' list of objects ? + if "series" in body: + # Adding the host name to all objects + for obj_params in body["series"]: + if obj_params.get("host", "") == "": + obj_params["host"] = _host_name + else: + if body.get("host", "") == "": + body["host"] = _host_name + + # If defined, make sure tags are defined as a comma-separated string + if "tags" in params and isinstance(params["tags"], list): + tag_list = normalize_tags(params["tags"]) + params["tags"] = ",".join(tag_list) + + # If defined, make sure monitor_ids are defined as a comma-separated string + if "monitor_ids" in params and isinstance(params["monitor_ids"], list): + params["monitor_ids"] = ",".join(str(i) for i in params["monitor_ids"]) + + # Process the body, if necessary + if isinstance(body, dict): + body = json.dumps(body, sort_keys=cls._sort_keys) + headers["Content-Type"] = "application/json" + + if compress_payload: + body = zlib.compress(body.encode("utf-8")) + headers["Content-Encoding"] = "deflate" + + # Construct the URL + url = construct_url(_api_host, api_version, path) + + # Process requesting + start_time = time.time() + + result = cls._get_http_client().request( + method=method, + url=url, + headers=headers, + params=params, + data=body, + timeout=_timeout, + max_retries=_max_retries, + proxies=_proxies, + verify=_cacert, + ) + + # Request succeeded: log it and reset the timeout counter + duration = round((time.time() - start_time) * 1000.0, 4) + log.info("%s %s %s (%sms)" % (result.status_code, method, url, duration)) + cls._timeout_counter = 0 + + # Format response content + content = result.content + + if content: + try: + if is_p3k(): + response_obj = json.loads(content.decode("utf-8")) + else: + response_obj = json.loads(content) + except ValueError: + raise ValueError("Invalid JSON response: {0}".format(content)) + + # response_obj can be a bool and not a dict + if isinstance(response_obj, dict): + if response_obj and "errors" in response_obj: + # suppress ApiError when specified and just return the response + if not ( + suppress_response_errors_on_codes + and result.status_code in suppress_response_errors_on_codes + ): + raise ApiError(response_obj) + else: + response_obj = None + + if response_formatter is not None: + response_obj = response_formatter(response_obj) + + if _return_raw_response: + return response_obj, result + else: + return response_obj + + except HttpTimeout: + cls._timeout_counter += 1 + raise + except ClientError as e: + if _mute: + log.error(str(e)) + if error_formatter is None: + return {"errors": e.args[0]} + else: + return error_formatter({"errors": e.args[0]}) + else: + raise + except ApiError as e: + if _mute: + for error in e.args[0].get("errors") or []: + log.error(error) + if error_formatter is None: + return e.args[0] + else: + return error_formatter(e.args[0]) + else: + raise + + @classmethod + def _should_submit(cls): + """ + Returns True if we're in a state where we should make a request + (backoff expired, no backoff in effect), false otherwise. + """ + now = time.time() + should_submit = False + + # If we're not backing off, but the timeout counter exceeds the max + # number of timeouts, then enter the backoff state, recording the time + # we started backing off + if not cls._backoff_timestamp and cls._timeout_counter >= cls._max_timeouts: + log.info( + "Max number of datadog timeouts exceeded, backing off for %s seconds", + cls._backoff_period, + ) + cls._backoff_timestamp = now + should_submit = False + + # If we are backing off but the we've waiting sufficiently long enough + # (backoff_retry_age), exit the backoff state and reset the timeout + # counter so that we try submitting metrics again + elif cls._backoff_timestamp: + backed_off_time, backoff_time_left = cls._backoff_status() + if backoff_time_left < 0: + log.info( + "Exiting backoff state after %s seconds, will try to submit metrics again", + backed_off_time, + ) + cls._backoff_timestamp = None + cls._timeout_counter = 0 + should_submit = True + else: + log.info( + "In backoff state, won't submit metrics for another %s seconds", + backoff_time_left, + ) + should_submit = False + else: + should_submit = True + + return should_submit + + @classmethod + def _backoff_status(cls): + """ + Get a backoff report, i.e. backoff total and remaining time. + """ + now = time.time() + backed_off_time = now - cls._backoff_timestamp + backoff_time_left = cls._backoff_period - backed_off_time + return round(backed_off_time, 2), round(backoff_time_left, 2) diff --git a/datadog/api/aws_integration.py b/datadog/api/aws_integration.py new file mode 100644 index 000000000..eb4358b35 --- /dev/null +++ b/datadog/api/aws_integration.py @@ -0,0 +1,248 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.api.resources import ( + GetableAPIResource, + CreateableAPIResource, + DeletableAPIResource, + UpdatableAPIResource, + UpdatableAPISubResource, + ListableAPISubResource, +) + + +class AwsIntegration( + GetableAPIResource, + CreateableAPIResource, + DeletableAPIResource, + ListableAPISubResource, + UpdatableAPIResource, + UpdatableAPISubResource, +): + """ + A wrapper around AWS Integration API. + """ + + _resource_name = "integration" + _resource_id = "aws" + + @classmethod + def list(cls, **params): + """ + List all Datadog-AWS integrations available in your Datadog organization. + + >>> api.AwsIntegration.list() + """ + return super(AwsIntegration, cls).get(id=cls._resource_id, **params) + + @classmethod + def create(cls, **params): + """ + Add a new AWS integration config. + + :param account_id: Your AWS Account ID without dashes. \ + Consult the Datadog AWS integration to learn more about \ + your AWS account ID. + :type account_id: string + + :param access_key_id: If your AWS account is a GovCloud \ + or China account, enter the corresponding Access Key ID. + :type access_key_id: string + + :param role_name: Your Datadog role delegation name. \ + For more information about you AWS account Role name, \ + see the Datadog AWS integration configuration info. + :type role_name: string + + :param filter_tags: The array of EC2 tags (in the form key:value) \ + defines a filter that Datadog uses when collecting metrics from EC2. \ + Wildcards, such as ? (for single characters) and * (for multiple characters) \ + can also be used. Only hosts that match one of the defined tags will be imported \ + into Datadog. The rest will be ignored. Host matching a given tag can also be \ + excluded by adding ! before the tag. e.x. \ + env:production,instance-type:c1.*,!region:us-east-1 For more information \ + on EC2 tagging, see the AWS tagging documentation. + :type filter_tags: list of strings + + :param host_tags: Array of tags (in the form key:value) to add to all hosts and \ + metrics reporting through this integration. + :type host_tags: list of strings + + :param account_specific_namespace_rules: An object (in the form \ + {"namespace1":true/false, "namespace2":true/false}) that enables \ + or disables metric collection for specific AWS namespaces for this \ + AWS account only. A list of namespaces can be found at the \ + /v1/integration/aws/available_namespace_rules endpoint. + :type account_specific_namespace_rules: dictionary + + :param excluded_regions: An array of AWS regions to exclude \ + from metrics collection. + :type excluded_regions: list of strings + + :returns: Dictionary representing the API's JSON response + + >>> account_id = "" + >>> access_key_id = "" + >>> role_name = "DatadogAwsRole" + >>> filter_tags = [":"] + >>> host_tags = [":"] + >>> account_specific_namespace_rules = {"namespace1":true/false, "namespace2":true/false} + >>> excluded_regions = ["us-east-1", "us-west-1"] + + >>> api.AwsIntegration.create(account_id=account_id, role_name=role_name, \ + filter_tags=filter_tags,host_tags=host_tags,\ + account_specific_namespace_rules=account_specific_namespace_rules \ + excluded_regions=excluded_regions) + """ + return super(AwsIntegration, cls).create(id=cls._resource_id, **params) + + @classmethod + def update(cls, **body): + """ + Update an AWS integration config. + + :param account_id: Your existing AWS Account ID without dashes. \ + Consult the Datadog AWS integration to learn more about \ + your AWS account ID. + :type account_id: string + + :param new_account_id: Your new AWS Account ID without dashes. \ + Consult the Datadog AWS integration to learn more about \ + your AWS account ID. This is the account to be updated. + :type new_account_id: string + + :param role_name: Your existing Datadog role delegation name. \ + For more information about you AWS account Role name, \ + see the Datadog AWS integration configuration info. + :type role_name: string + + :param new_role_name: Your new Datadog role delegation name. \ + For more information about you AWS account Role name, \ + see the Datadog AWS integration configuration info. \ + This is the role_name to be updated. + :type new_role_name: string + + :param access_key_id: If your AWS account is a GovCloud \ + or China account, enter the existing Access Key ID. + :type access_key_id: string + + :param new_access_key_id: If your AWS account is a GovCloud \ + or China account, enter the new Access Key ID to be set. + :type new_access_key_id: string + + :param secret_access_key: If your AWS account is a GovCloud \ + or China account, enter the existing Secret Access Key. + :type secret_access_key: string + + :param new_secret_access_key: If your AWS account is a GovCloud \ + or China account, enter the new key to be set. + :type new_secret_access_key: string + + :param filter_tags: The array of EC2 tags (in the form key:value) \ + defines a filter that Datadog uses when collecting metrics from EC2. \ + Wildcards, such as ? (for single characters) and * (for multiple characters) \ + can also be used. Only hosts that match one of the defined tags will be imported \ + into Datadog. The rest will be ignored. Host matching a given tag can also be \ + excluded by adding ! before the tag. e.x. \ + env:production,instance-type:c1.*,!region:us-east-1 For more information \ + on EC2 tagging, see the AWS tagging documentation. + :type filter_tags: list of strings + + :param host_tags: Array of tags (in the form key:value) to add to all hosts and \ + metrics reporting through this integration. + :type host_tags: list of strings + + :param account_specific_namespace_rules: An object (in the form \ + {"namespace1":true/false, "namespace2":true/false}) that enables \ + or disables metric collection for specific AWS namespaces for this \ + AWS account only. A list of namespaces can be found at the \ + /v1/integration/aws/available_namespace_rules endpoint. + :type account_specific_namespace_rules: dictionary + + :param excluded_regions: An array of AWS regions to exclude \ + from metrics collection. + :type excluded_regions: list of strings + + :returns: Dictionary representing the API's JSON response + + The following will depend on whether role delegation or access keys are being used. + If using role delegation, use the fields for role_name and account_id. + For access keys, use fields for access_key_id and secret_access_key. + + Both the existing fields and new fields are required no matter what. i.e. If the config is \ + account_id/role_name based, then `account_id`, `role_name`, `new_account_id`, and \ + `new_role_name` are all required. + + For access_key based accounts, `access_key_id`, `secret_access_key`, `new_access_key_id`, \ + and `new_secret_access_key` are all required. + + >>> account_id = "" + >>> role_name = "" + >>> access_key_id = "" + >>> secret_access_key = "" + >>> new_account_id = "" + >>> new_role_name = "" + >>> new_access_key_id = "" + >>> new_secret_access_key = "" + >>> filter_tags = [":"] + >>> host_tags = [":"] + >>> account_specific_namespace_rules = {"namespace1":true/false, "namespace2":true/false} + >>> excluded_regions = ["us-east-1", "us-west-1"] + + >>> api.AwsIntegration.update(account_id=account_id, role_name=role_name, \ + new_account_id=new_account_id, new_role_name=new_role_name, \ + filter_tags=filter_tags,host_tags=host_tags,\ + account_specific_namespace_rules=account_specific_namespace_rules, \ + excluded_regions=excluded_regions) + """ + params = {} + if body.get("account_id") and body.get("role_name"): + params["account_id"] = body.pop("account_id") + params["role_name"] = body.pop("role_name") + if body.get("new_account_id"): + body["account_id"] = body.pop("new_account_id") + if body.get("new_role_name"): + body["role_name"] = body.pop("new_role_name") + if body.get("access_key_id") and body.get("secret_access_key"): + params["access_key_id"] = body.pop("access_key_id") + params["secret_access_key"] = body.pop("secret_access_key") + if body.get("new_access_key_id"): + body["access_key_id"] = body.pop("new_access_key_id") + if body.get("new_secret_access_key"): + body["secret_access_key"] = body.pop("new_secret_access_key") + return super(AwsIntegration, cls).update(id=cls._resource_id, params=params, **body) + + @classmethod + def delete(cls, **body): + """ + Delete a given Datadog-AWS integration. + + >>> account_id = "" + >>> role_name = "" + + >>> api.AwsIntegration.delete() + """ + return super(AwsIntegration, cls).delete(id=cls._resource_id, body=body) + + @classmethod + def list_namespace_rules(cls, **params): + """ + List all namespace rules available as options. + + >>> api.AwsIntegration.list_namespace_rules() + """ + cls._sub_resource_name = "available_namespace_rules" + return super(AwsIntegration, cls).get_items(id=cls._resource_id, **params) + + @classmethod + def generate_new_external_id(cls, **params): + """ + Generate a new AWS external id for a given AWS account id and role name pair. + + >>> account_id = "" + >>> role_name = "" + + >>> api.AwsIntegration.generate_new_external_id() + """ + cls._sub_resource_name = "generate_new_external_id" + return super(AwsIntegration, cls).update_items(id=cls._resource_id, **params) diff --git a/datadog/api/aws_log_integration.py b/datadog/api/aws_log_integration.py new file mode 100644 index 000000000..352843561 --- /dev/null +++ b/datadog/api/aws_log_integration.py @@ -0,0 +1,111 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.api.resources import DeletableAPISubResource, ListableAPISubResource, AddableAPISubResource + + +class AwsLogsIntegration(DeletableAPISubResource, ListableAPISubResource, AddableAPISubResource): + """ + A wrapper around AWS Logs API. + """ + + _resource_name = "integration" + _resource_id = "aws" + + @classmethod + def list_log_services(cls, **params): + """ + List all namespace rules available as options. + + >>> api.AwsLogsIntegration.list_log_services() + """ + cls._sub_resource_name = "logs/services" + return super(AwsLogsIntegration, cls).get_items(id=cls._resource_id, **params) + + @classmethod + def add_log_lambda_arn(cls, **params): + """ + Attach the Lambda ARN of the Lambda created for the Datadog-AWS \ + log collection to your AWS account ID to enable log collection. + + >>> account_id = "" + >>> lambda_arn = "" + + >>> api.AwsLogsIntegration.add_log_lambda_arn(account_id=account_id, lambda_arn=lambda_arn) + """ + cls._sub_resource_name = "logs" + return super(AwsLogsIntegration, cls).add_items(id=cls._resource_id, **params) + + @classmethod + def save_services(cls, **params): + """ + Enable Automatic Log collection for your AWS services. + + >>> account_id = "" + >>> services = ["s3", "elb", "elbv2", "cloudfront", "redshift", "lambda"] + + >>> api.AwsLogsIntegration.save_services() + """ + cls._sub_resource_name = "logs/services" + return super(AwsLogsIntegration, cls).add_items(id=cls._resource_id, **params) + + @classmethod + def delete_config(cls, **params): + """ + Delete a Datadog-AWS log collection configuration by removing the specific Lambda ARN \ + associated with a given AWS account. + + >>> account_id = "" + >>> lambda_arn = "" + + >>> api.AwsLogsIntegration.delete_config(account_id=account_id, lambda_arn=lambda_arn) + """ + cls._sub_resource_name = "logs" + return super(AwsLogsIntegration, cls).delete_items(id=cls._resource_id, **params) + + @classmethod + def check_lambda(cls, **params): + """ + Check function to see if a lambda_arn exists within an account. \ + This sends a job on our side if it does not exist, then immediately returns \ + the status of that job. Subsequent requests will always repeat the above, so this endpoint \ + can be polled intermittently instead of blocking. + + Returns a status of 'created' when it's checking if the Lambda exists in the account. + Returns a status of 'waiting' while checking. + Returns a status of 'checked and ok' if the Lambda exists. + Returns a status of 'error' if the Lambda does not exist. + + >>> account_id = "" + >>> lambda_arn = "" + + >>> api.AwsLogsIntegration.check_lambda(account_id=account_id, lambda_arn=lambda_arn) + """ + cls._sub_resource_name = "logs/check_async" + return super(AwsLogsIntegration, cls).add_items(id=cls._resource_id, **params) + + @classmethod + def check_services(cls, **params): + """ + Test if permissions are present to add log-forwarding triggers for the \ + given services + AWS account. Input is the same as for save_services. + Done async, so can be repeatedly polled in a non-blocking fashion until \ + the async request completes + + >>> account_id = "" + >>> services = ["s3", "elb", "elbv2", "cloudfront", "redshift", "lambda"] + + >>> api.AwsLogsIntegration.check_services() + """ + cls._sub_resource_name = "logs/services_async" + return super(AwsLogsIntegration, cls).add_items(id=cls._resource_id, **params) + + @classmethod + def list(cls, **params): + """ + List all Datadog-AWS Logs integrations available in your Datadog organization. + + >>> api.AwsLogsIntegration.list() + """ + cls._sub_resource_name = "logs" + return super(AwsLogsIntegration, cls).get_items(id=cls._resource_id, **params) diff --git a/datadog/api/azure_integration.py b/datadog/api/azure_integration.py new file mode 100644 index 000000000..2bb1ceab8 --- /dev/null +++ b/datadog/api/azure_integration.py @@ -0,0 +1,91 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.api.resources import ( + GetableAPIResource, + CreateableAPIResource, + DeletableAPIResource, + UpdatableAPIResource, + AddableAPISubResource, +) + + +class AzureIntegration( + GetableAPIResource, CreateableAPIResource, DeletableAPIResource, UpdatableAPIResource, AddableAPISubResource +): + """ + A wrapper around Azure integration API. + """ + + _resource_name = "integration" + _resource_id = "azure" + + @classmethod + def list(cls, **params): + """ + List all Datadog-Azure integrations available in your Datadog organization. + + >>> api.AzureIntegration.list() + """ + return super(AzureIntegration, cls).get(id=cls._resource_id, **params) + + @classmethod + def create(cls, **params): + """ + Add a new Azure integration config. + + >>> tenant_name = "" + >>> client_id = "" + >>> client_secret = "" + >>> host_filters = [":"] + + >>> api.AzureIntegration.create(tenant_name=tenant_name, client_id=client_id, \ + client_secret=client_secret,host_filters=host_filters) + """ + return super(AzureIntegration, cls).create(id=cls._resource_id, **params) + + @classmethod + def delete(cls, **body): + """ + Delete a given Datadog-Azure integration. + + >>> tenant_name = "" + >>> client_id = "" + + >>> api.AzureIntegration.delete(tenant_name=tenant_name, client_id=client_id) + """ + return super(AzureIntegration, cls).delete(id=cls._resource_id, body=body) + + @classmethod + def update_host_filters(cls, **params): + """ + Update the defined list of host filters for a given Datadog-Azure integration. \ + + >>> tenant_name = "" + >>> client_id = "" + >>> host_filters = ":" + + >>> api.AzureIntegration.update_host_filters(tenant_name=tenant_name, client_id=client_id, \ + host_filters=host_filters) + """ + cls._sub_resource_name = "host_filters" + return super(AzureIntegration, cls).add_items(id=cls._resource_id, **params) + + @classmethod + def update(cls, **body): + """ + Update an Azure account configuration. + + >>> tenant_name = "" + >>> client_id = "" + >>> new_tenant_name = "" + >>> new_client_id = "" + >>> client_secret = "" + >>> host_filters = ":" + + >>> api.AzureIntegration.update(tenant_name=tenant_name, client_id=client_id, \ + new_tenant_name=new_tenant_name, new_client_id=new_client_id,\ + client_secret=client_secret, host_filters=host_filters) + """ + params = {} + return super(AzureIntegration, cls).update(id=cls._resource_id, params=params, **body) diff --git a/datadog/api/base.py b/datadog/api/base.py deleted file mode 100644 index 1016b3cc1..000000000 --- a/datadog/api/base.py +++ /dev/null @@ -1,436 +0,0 @@ -# stdlib -import time -import logging -import requests - -# datadog -from datadog.api.exceptions import ClientError, ApiError, HttpBackoff, \ - HttpTimeout, ApiNotInitialized -from datadog.api import _api_version, _max_timeouts, _backoff_period -from datadog.util.compat import json, is_p3k - -log = logging.getLogger('dd.datadogpy') - - -class HTTPClient(object): - """ - HTTP client based on Requests library for Datadog API calls - """ - # http transport params - _backoff_period = _backoff_period - _max_timeouts = _max_timeouts - _backoff_timestamp = None - _timeout_counter = 0 - _api_version = _api_version - - @classmethod - def request(cls, method, path, body=None, attach_host_name=False, response_formatter=None, - error_formatter=None, **params): - """ - Make an HTTP API request - - :param method: HTTP method to use to contact API endpoint - :type method: HTTP method string - - :param path: API endpoint url - :type path: url - - :param body: dictionnary to be sent in the body of the request - :type body: dictionary - - :param response_formatter: function to format JSON response from HTTP API request - :type response_formatter: JSON input function - - :param error_formatter: function to format JSON error response from HTTP API request - :type error_formatter: JSON input function - - :param attach_host_name: link the new resource object to the host name - :type attach_host_name: bool - - :param params: dictionnary to be sent in the query string of the request - :type params: dictionary - - :returns: JSON or formated response from HTTP API request - """ - - try: - # Check if it's ok to submit - if not cls._should_submit(): - raise HttpBackoff("Too many timeouts. Won't try again for {1} seconds." - .format(*cls._backoff_status())) - - # Import API, User and HTTP settings - from datadog.api import _api_key, _application_key, _api_host, \ - _swallow, _host_name, _proxies, _max_retries, _timeout - - # Check keys and add then to params - if _api_key is None: - raise ApiNotInitialized("API key is not set." - " Please run 'initialize' method first.") - params['api_key'] = _api_key - if _application_key: - params['application_key'] = _application_key - - # Construct the url - url = "%s/api/%s/%s" % (_api_host, cls._api_version, path.lstrip("/")) - - # Attach host name to body - if attach_host_name and body: - # Is it a 'series' list of objects ? - if 'series' in body: - # Adding the host name to all objects - for obj_params in body['series']: - if 'host' not in obj_params: - obj_params['host'] = _host_name - else: - if 'host' not in body: - body['host'] = _host_name - - # If defined, make sure tags are defined as a comma-separated string - if 'tags' in params and isinstance(params['tags'], list): - params['tags'] = ','.join(params['tags']) - - # Process the body, if necessary - headers = {} - if isinstance(body, dict): - body = json.dumps(body) - headers['Content-Type'] = 'application/json' - - # Process requesting - start_time = time.time() - try: - # Use a session to set a max_retries parameters - s = requests.Session() - http_adapter = requests.adapters.HTTPAdapter(max_retries=_max_retries) - s.mount('https://', http_adapter) - - # Request - result = s.request( - method, - url, - headers=headers, - params=params, - data=body, - timeout=_timeout, - proxies=_proxies) - - result.raise_for_status() - except requests.ConnectionError as e: - raise ClientError("Could not request %s %s%s: %s" % (method, _api_host, url, e)) - except requests.exceptions.Timeout as e: - cls._timeout_counter += 1 - raise HttpTimeout('%s %s timed out after %d seconds.' % (method, url, _timeout)) - except requests.exceptions.HTTPError as e: - if e.response.status_code in (400, 403, 404): - pass - else: - raise - except TypeError as e: - raise TypeError( - "Your installed version of 'requests' library seems not compatible with" - "Datadog's usage. We recommand upgrading it ('pip install -U requests')." - "If you need help or have any question, please contact support@datadoghq.com") - - # Request succeeded: log it and reset the timeout counter - duration = round((time.time() - start_time) * 1000., 4) - log.info("%s %s %s (%sms)" % (result.status_code, method, url, duration)) - cls._timeout_counter = 0 - - # Format response content - content = result.content - - if content: - try: - if is_p3k(): - response_obj = json.loads(content.decode('utf-8')) - else: - response_obj = json.loads(content) - except ValueError: - raise ValueError('Invalid JSON response: {0}'.format(content)) - - if response_obj and 'errors' in response_obj: - raise ApiError(response_obj) - else: - response_obj = None - if response_formatter is None: - return response_obj - else: - return response_formatter(response_obj) - - except ClientError as e: - if _swallow: - log.error(str(e)) - if error_formatter is None: - return {'errors': e.args[0]} - else: - return error_formatter({'errors': e.args[0]}) - else: - raise - except ApiError as e: - if _swallow: - for error in e.args[0]['errors']: - log.error(str(error)) - if error_formatter is None: - return e.args[0] - else: - return error_formatter(e.args[0]) - else: - raise - - # Private functions - @classmethod - def _should_submit(cls): - """ Returns True if we're in a state where we should make a request - (backoff expired, no backoff in effect), false otherwise. - """ - now = time.time() - should_submit = False - - # If we're not backing off, but the timeout counter exceeds the max - # number of timeouts, then enter the backoff state, recording the time - # we started backing off - if not cls._backoff_timestamp and cls._timeout_counter >= cls._max_timeouts: - log.info("Max number of datadog timeouts exceeded, backing off for {0} seconds" - .format(cls._backoff_period)) - cls._backoff_timestamp = now - should_submit = False - - # If we are backing off but the we've waiting sufficiently long enough - # (backoff_retry_age), exit the backoff state and reset the timeout - # counter so that we try submitting metrics again - elif cls._backoff_timestamp: - backed_off_time, backoff_time_left = cls._backoff_status() - if backoff_time_left < 0: - log.info("Exiting backoff state after {0} seconds, will try to submit metrics again" - .format(backed_off_time)) - cls._backoff_timestamp = None - cls._timeout_counter = 0 - should_submit = True - else: - log.info("In backoff state, won't submit metrics for another {0} seconds" - .format(backoff_time_left)) - should_submit = False - else: - should_submit = True - - return should_submit - - @classmethod - def _backoff_status(cls): - now = time.time() - backed_off_time = now - cls._backoff_timestamp - backoff_time_left = cls._backoff_period - backed_off_time - return round(backed_off_time, 2), round(backoff_time_left, 2) - - -# API Resource types are listed below -class CreateableAPIResource(object): - """ - Creatable API Resource - """ - @classmethod - def create(cls, attach_host_name=False, method='POST', id=None, params=None, **body): - """ - Create a new API resource object - - :param attach_host_name: link the new resource object to the host name - :type attach_host_name: bool - - :param method: HTTP method to use to contact API endpoint - :type method: HTTP method string - - :param id: create a new resource object as a child of the given object - :type id: id - - :param params: new resource object source - :type params: dictionary - - :param body: new resource object attributes - :type body: dictionary - - :returns: JSON response from HTTP API request - """ - if params is None: - params = {} - if method == 'GET': - return HTTPClient.request('GET', cls._class_url, **body) - if id is None: - return HTTPClient.request('POST', cls._class_url, body, - attach_host_name=attach_host_name, **params) - else: - return HTTPClient.request('POST', cls._class_url + "/" + str(id), body, - attach_host_name=attach_host_name, **params) - - -class SendableAPIResource(object): - """ - Fork of CreateableAPIResource class with different method names - """ - @classmethod - def send(cls, attach_host_name=False, id=None, **body): - """ - Create an API resource object - - :param attach_host_name: link the new resource object to the host name - :type attach_host_name: bool - - :param id: create a new resource object as a child of the given object - :type id: id - - :param body: new resource object attributes - :type body: dictionary - - :returns: JSON response from HTTP API request - """ - if id is None: - return HTTPClient.request('POST', cls._class_url, body, - attach_host_name=attach_host_name) - else: - return HTTPClient.request('POST', cls._class_url + "/" + str(id), body, - attach_host_name=attach_host_name) - - -class UpdatableAPIResource(object): - """ - Updatable API Resource - """ - @classmethod - def update(cls, id, params=None, **body): - """ - Update an API resource object - - :param params: updated resource object source - :type params: dictionary - - :param body: updated resource object attributes - :type body: dictionary - - :returns: JSON response from HTTP API request - """ - if params is None: - params = {} - return HTTPClient.request('PUT', cls._class_url + "/" + str(id), body, **params) - - -class DeletableAPIResource(object): - """ - Deletable API Resource - """ - @classmethod - def delete(cls, id, **params): - """ - Delete an API resource object - - :param id: resource object to delete - :type id: id - - :returns: JSON response from HTTP API request - """ - return HTTPClient.request('DELETE', cls._class_url + "/" + str(id), **params) - - -class GetableAPIResource(object): - """ - Getable API Resource - """ - @classmethod - def get(cls, id, **params): - """ - Get information about an API resource object - - :param id: resource object id to retrieve - :type id: id - - :param params: parameters to filter API resource stream - :type params: dictionary - - :returns: JSON response from HTTP API request - """ - return HTTPClient.request('GET', cls._class_url + "/" + str(id), **params) - - -class ListableAPIResource(object): - """ - Listable API Resource - """ - @classmethod - def get_all(cls, **params): - """ - List API resource objects - - :param params: parameters to filter API resource stream - :type params: dictionary - - :returns: JSON response from HTTP API request - """ - return HTTPClient.request('GET', cls._class_url, **params) - - -class SearchableAPIResource(object): - """ - Fork of ListableAPIResource class with different method names - """ - @classmethod - def _search(cls, **params): - """ - Query an API resource stream - - :param params: parameters to filter API resource stream - :type params: dictionary - - :returns: JSON response from HTTP API request - """ - return HTTPClient.request('GET', cls._class_url, **params) - - -class ActionAPIResource(object): - """ - Actionable API Resource - """ - @classmethod - def _trigger_class_action(cls, method, name, id=None, **params): - """ - Trigger an action - - :param method: HTTP method to use to contact API endpoint - :type method: HTTP method string - - :param name: action name - :type name: string - - :param id: trigger the action for the specified resource object - :type id: id - - :param params: action parameters - :type params: dictionary - - :returns: JSON response from HTTP API request - """ - if id is None: - return HTTPClient.request(method, cls._class_url + "/" + name, params) - else: - return HTTPClient.request(method, cls._class_url + "/" + str(id) + "/" + name, params) - - @classmethod - def _trigger_action(cls, method, name, id=None, **params): - """ - Trigger an action - - :param method: HTTP method to use to contact API endpoint - :type method: HTTP method string - - :param name: action name - :type name: string - - :param id: trigger the action for the specified resource object - :type id: id - - :param params: action parameters - :type params: dictionary - - :returns: JSON response from HTTP API request - """ - if id is None: - return HTTPClient.request(method, name, params) - else: - return HTTPClient.request(method, name + "/" + str(id), params) diff --git a/datadog/api/comments.py b/datadog/api/comments.py index f0f4c01d3..7ecd50697 100644 --- a/datadog/api/comments.py +++ b/datadog/api/comments.py @@ -1,11 +1,12 @@ -from datadog.api.base import CreateableAPIResource, UpdatableAPIResource, \ - DeletableAPIResource +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.api.resources import CreateableAPIResource, UpdatableAPIResource -class Comment(CreateableAPIResource, UpdatableAPIResource, DeletableAPIResource): +class Comment(CreateableAPIResource, UpdatableAPIResource): """ A wrapper around Comment HTTP API. """ - _class_name = 'comment' - _class_url = '/comments' - _json_name = 'comment' + + _resource_name = "comments" diff --git a/datadog/api/constants.py b/datadog/api/constants.py index c64280dcb..a7e02b8c8 100644 --- a/datadog/api/constants.py +++ b/datadog/api/constants.py @@ -1,6 +1,25 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc + + class CheckStatus(object): OK = 0 WARNING = 1 CRITICAL = 2 UNKNOWN = 3 ALL = (OK, WARNING, CRITICAL, UNKNOWN) + + +class MonitorType(object): + # From https://docs.datadoghq.com/api/?lang=bash#create-a-monitor + QUERY_ALERT = "query alert" + COMPOSITE = "composite" + SERVICE_CHECK = "service check" + PROCESS_ALERT = "process alert" + LOG_ALERT = "log alert" + METRIC_ALERT = "metric alert" + RUM_ALERT = "rum alert" + EVENT_ALERT = "event alert" + SYNTHETICS_ALERT = "synthetics alert" + TRACE_ANALYTICS = "trace-analytics alert" diff --git a/datadog/api/dashboard_list_v2.py b/datadog/api/dashboard_list_v2.py new file mode 100644 index 000000000..127fca97f --- /dev/null +++ b/datadog/api/dashboard_list_v2.py @@ -0,0 +1,19 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.api.resources import ( + AddableAPISubResource, + DeletableAPISubResource, + ListableAPISubResource, + UpdatableAPISubResource, +) + + +class DashboardListV2(ListableAPISubResource, AddableAPISubResource, UpdatableAPISubResource, DeletableAPISubResource): + """ + A wrapper around Dashboard List HTTP API. + """ + + _resource_name = "dashboard/lists/manual" + _sub_resource_name = "dashboards" + _api_version = "v2" diff --git a/datadog/api/dashboard_lists.py b/datadog/api/dashboard_lists.py new file mode 100644 index 000000000..e83785f21 --- /dev/null +++ b/datadog/api/dashboard_lists.py @@ -0,0 +1,39 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.api.resources import ( + AddableAPISubResource, + CreateableAPIResource, + DeletableAPIResource, + DeletableAPISubResource, + GetableAPIResource, + ListableAPIResource, + ListableAPISubResource, + UpdatableAPIResource, + UpdatableAPISubResource, +) + +from datadog.api.dashboard_list_v2 import DashboardListV2 + + +class DashboardList( + AddableAPISubResource, + CreateableAPIResource, + DeletableAPIResource, + DeletableAPISubResource, + GetableAPIResource, + ListableAPIResource, + ListableAPISubResource, + UpdatableAPIResource, + UpdatableAPISubResource, +): + """ + A wrapper around Dashboard List HTTP API. + """ + + _resource_name = "dashboard/lists/manual" + _sub_resource_name = "dashboards" + + # Support for new API version (api.DashboardList.v2) + # Note: This needs to be removed after complete migration of these endpoints from v1 to v2. + v2 = DashboardListV2() diff --git a/datadog/api/dashboards.py b/datadog/api/dashboards.py new file mode 100644 index 000000000..dab9b4d7a --- /dev/null +++ b/datadog/api/dashboards.py @@ -0,0 +1,20 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.api.resources import ( + GetableAPIResource, + CreateableAPIResource, + UpdatableAPIResource, + DeletableAPIResource, + ListableAPIResource, +) + + +class Dashboard( + GetableAPIResource, CreateableAPIResource, UpdatableAPIResource, DeletableAPIResource, ListableAPIResource +): + """ + A wrapper around Dashboard HTTP API. + """ + + _resource_name = "dashboard" diff --git a/datadog/api/distributions.py b/datadog/api/distributions.py new file mode 100644 index 000000000..918f7d8a8 --- /dev/null +++ b/datadog/api/distributions.py @@ -0,0 +1,45 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# datadog +from datadog.api.format import format_points +from datadog.api.resources import SendableAPIResource + + +class Distribution(SendableAPIResource): + """A wrapper around Distribution HTTP API""" + + _resource_name = "distribution_points" + + @classmethod + def send(cls, distributions=None, attach_host_name=True, compress_payload=False, **distribution): + """ + Submit a distribution metric or a list of distribution metrics to the distribution metric + API + + :param compress_payload: compress the payload using zlib + :type compress_payload: bool + :param metric: the name of the time series + :type metric: string + :param points: a (timestamp, [list of values]) pair or + list of (timestamp, [list of values]) pairs + :type points: list + :param host: host name that produced the metric + :type host: string + :param tags: list of tags associated with the metric. + :type tags: string list + :returns: Dictionary representing the API's JSON response + """ + if distributions: + # Multiple distributions are sent + for d in distributions: + if isinstance(d, dict): + d["points"] = format_points(d["points"]) + series_dict = {"series": distributions} + else: + # One distribution is sent + distribution["points"] = format_points(distribution["points"]) + series_dict = {"series": [distribution]} + return super(Distribution, cls).send( + attach_host_name=attach_host_name, compress_payload=compress_payload, **series_dict + ) diff --git a/datadog/api/downtimes.py b/datadog/api/downtimes.py index 9af703feb..567ed9e73 100644 --- a/datadog/api/downtimes.py +++ b/datadog/api/downtimes.py @@ -1,10 +1,38 @@ -from datadog.api.base import GetableAPIResource, CreateableAPIResource,\ - UpdatableAPIResource, ListableAPIResource, DeletableAPIResource +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.api.resources import ( + GetableAPIResource, + CreateableAPIResource, + UpdatableAPIResource, + ListableAPIResource, + DeletableAPIResource, + ActionAPIResource, +) -class Downtime(GetableAPIResource, CreateableAPIResource, UpdatableAPIResource, - ListableAPIResource, DeletableAPIResource): +class Downtime( + GetableAPIResource, + CreateableAPIResource, + UpdatableAPIResource, + ListableAPIResource, + DeletableAPIResource, + ActionAPIResource, +): """ A wrapper around Monitor Downtiming HTTP API. """ - _class_url = '/downtime' + + _resource_name = "downtime" + + @classmethod + def cancel_downtime_by_scope(cls, **body): + """ + Cancels all downtimes matching the scope. + + :param scope: scope to cancel downtimes by + :type scope: string + + :returns: Dictionary representing the API's JSON response + """ + return super(Downtime, cls)._trigger_class_action("POST", "cancel/by_scope", **body) diff --git a/datadog/api/events.py b/datadog/api/events.py index 290740394..55b176f6c 100644 --- a/datadog/api/events.py +++ b/datadog/api/events.py @@ -1,20 +1,21 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.api.exceptions import ApiError +from datadog.api.resources import GetableAPIResource, CreateableAPIResource, SearchableAPIResource from datadog.util.compat import iteritems -from datadog.api.base import GetableAPIResource, CreateableAPIResource, \ - SearchableAPIResource class Event(GetableAPIResource, CreateableAPIResource, SearchableAPIResource): """ A wrapper around Event HTTP API. """ - _class_name = 'event' - _class_url = '/events' - _plural_class_name = 'events' - _json_name = 'event' - _timestamp_keys = set(['start', 'end']) + + _resource_name = "events" + _timestamp_keys = set(["start", "end"]) @classmethod - def create(cls, **params): + def create(cls, attach_host_name=True, **params): """ Post an event. @@ -24,6 +25,12 @@ def create(cls, **params): :param text: event message :type text: string + :param aggregation_key: key by which to group events in event stream + :type aggregation_key: string + + :param alert_type: "error", "warning", "info" or "success". + :type alert_type: string + :param date_happened: when the event occurred. if unset defaults to the current time. \ (POSIX timestamp) :type date_happened: integer @@ -42,12 +49,12 @@ def create(cls, **params): :type tags: list of strings :param host: host to post the event with - :type host: list of strings + :type host: string :param device_name: device_name to post the event with :type device_name: list of strings - :return: JSON response from HTTP request + :returns: Dictionary representing the API's JSON response >>> title = "Something big happened!" >>> text = 'And let me tell you all about it here!' @@ -55,7 +62,11 @@ def create(cls, **params): >>> api.Event.create(title=title, text=text, tags=tags) """ - return super(Event, cls).create(attach_host_name=True, **params) + if params.get("alert_type"): + if params["alert_type"] not in ["error", "warning", "info", "success"]: + raise ApiError("Parameter alert_type must be either error, warning, info or success") + + return super(Event, cls).create(attach_host_name=attach_host_name, **params) @classmethod def query(cls, **params): @@ -64,14 +75,15 @@ def query(cls, **params): optional filtered by *priority* ("low" or "normal"), *sources* and *tags*. - See the `event API documentation `_ for the + See the `event API documentation `_ for the event data format. - :return: JSON response from HTTP request + :returns: Dictionary representing the API's JSON response >>> api.Event.query(start=1313769783, end=1419436870, priority="normal", \ tags=["application:web"]) """ + def timestamp_to_integer(k, v): if k in cls._timestamp_keys: return int(v) diff --git a/datadog/api/exceptions.py b/datadog/api/exceptions.py index 82dfd27fb..afdfa36a5 100644 --- a/datadog/api/exceptions.py +++ b/datadog/api/exceptions.py @@ -1,33 +1,104 @@ -""" Module containing all the possible exceptions that datadog can raise. +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +""" +API & HTTP Clients exceptions. """ -__all__ = [ - 'DatadogException', - 'ClientError', - 'HttpTimeout', - 'HttpBackoff', - 'ApiError', - 'ApiNotInitialized', -] class DatadogException(Exception): - pass + """ + Base class for Datadog API exceptions. Use this for patterns like the following: + + try: + # do something with the Datadog API + except datadog.api.exceptions.DatadogException: + # handle any Datadog-specific exceptions + """ + + +class ProxyError(DatadogException): + """ + HTTP connection to the configured proxy server failed. + """ + + def __init__(self, method, url, exception): + message = ( + u"Could not request {method} {url}: Unable to connect to proxy. " + u"Please check the proxy configuration and try again.".format(method=method, url=url) + ) + super(ProxyError, self).__init__(message) class ClientError(DatadogException): - "When HTTP connection to Datadog endpoint is not possible" + """ + HTTP connection to Datadog endpoint is not possible. + """ + + def __init__(self, method, url, exception): + message = ( + u"Could not request {method} {url}: {exception}. " + u"Please check the network connection or try again later. " + u"If the problem persists, please contact support@datadoghq.com".format( + method=method, url=url, exception=exception + ) + ) + super(ClientError, self).__init__(message) class HttpTimeout(DatadogException): - "HTTP connection timeout" + """ + HTTP connection timeout. + """ + + def __init__(self, method, url, timeout): + message = ( + u"{method} {url} timed out after {timeout}. " + u"Please try again later. " + u"If the problem persists, please contact support@datadoghq.com".format( + method=method, url=url, timeout=timeout + ) + ) + super(HttpTimeout, self).__init__(message) class HttpBackoff(DatadogException): - "Backing off after too many timeouts" + """ + Backing off after too many timeouts. + """ + + def __init__(self, backoff_period): + message = u"Too many timeouts. Won't try again for {backoff_period} seconds. ".format( + backoff_period=backoff_period + ) + super(HttpBackoff, self).__init__(message) + + +class HTTPError(DatadogException): + """ + Datadog returned a HTTP error. + """ + + def __init__(self, status_code=None, reason=None): + reason = u" - {reason}".format(reason=reason) if reason else u"" + message = ( + u"Datadog returned a bad HTTP response code: {status_code}{reason}. " + u"Please try again later. " + u"If the problem persists, please contact support@datadoghq.com".format( + status_code=status_code, + reason=reason, + ) + ) + + super(HTTPError, self).__init__(message) class ApiError(DatadogException): - "Datadog API is returning an error" + """ + Datadog returned an API error (known HTTPError). + + Matches the following status codes: 400, 401, 403, 404, 409, 429. + """ class ApiNotInitialized(DatadogException): diff --git a/datadog/api/format.py b/datadog/api/format.py new file mode 100644 index 000000000..d3e5b72fc --- /dev/null +++ b/datadog/api/format.py @@ -0,0 +1,44 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from numbers import Number +import sys +import time + +if sys.version_info[0] >= 3: + from collections.abc import Iterable +else: + from collections import Iterable + + +def format_points(points): + """ + Format `points` parameter. + + Input: + a value or (timestamp, value) pair or a list of value or (timestamp, value) pairs + + Returns: + list of (timestamp, float value) pairs + + """ + now = time.time() + if not isinstance(points, list): + points = [points] + + formatted_points = [] + for point in points: + if isinstance(point, Number): + timestamp = now + value = float(point) + # Distributions contain a list of points + else: + timestamp = point[0] + if isinstance(point[1], Iterable): + value = [float(p) for p in point[1]] + else: + value = float(point[1]) + + formatted_points.append((timestamp, value)) + + return formatted_points diff --git a/datadog/api/gcp_integration.py b/datadog/api/gcp_integration.py new file mode 100644 index 000000000..978e1ae7b --- /dev/null +++ b/datadog/api/gcp_integration.py @@ -0,0 +1,93 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.api.resources import GetableAPIResource, CreateableAPIResource, DeletableAPIResource, UpdatableAPIResource + + +class GcpIntegration(GetableAPIResource, CreateableAPIResource, DeletableAPIResource, UpdatableAPIResource): + """ + A wrapper around GCP integration API. + """ + + _resource_name = "integration" + _resource_id = "gcp" + + @classmethod + def list(cls, **params): + """ + List all Datadog-Gcp integrations available in your Datadog organization. + + >>> api.GcpIntegration.list() + """ + return super(GcpIntegration, cls).get(id=cls._resource_id, **params) + + @classmethod + def delete(cls, **body): + """ + Delete a given Datadog-GCP integration. + + >>> project_id="" + >>> client_email="" + + >>> api.GcpIntegration.delete(project_id=project_id, client_email=client_email) + """ + return super(GcpIntegration, cls).delete(id=cls._resource_id, body=body) + + @classmethod + def create(cls, **params): + """ + Add a new GCP integration config. + + All of the following fields values are provided by the \ + JSON service account key file created in the GCP Console \ + for service accounts; Refer to the Datadog-Google Cloud \ + Platform integration installation instructions to see how \ + to generate one for your organization. For further references, \ + consult the Google Cloud service account documentation. + + >>> type="service_account" + >>> project_id="" + >>> private_key_id="" + >>> private_key="" + >>> client_email="" + >>> client_id="" + >>> auth_uri=">> token_uri="" + >>> auth_provider_x509_cert_url="" + >>> client_x509_cert_url="" + >>> host_filters=":,:" + + >>> api.GcpIntegration.create(type=type, project_id=project_id, \ + private_key_id=private_key_id,private_key=private_key, \ + client_email=client_email, client_id=client_id, \ + auth_uri=auth_uri, token_uri=token_uri, \ + auth_provider_x509_cert_url=auth_provider_x509_cert_url, \ + client_x509_cert_url=client_x509_cert_url, host_filters=host_filters) + """ + return super(GcpIntegration, cls).create(id=cls._resource_id, **params) + + @classmethod + def update(cls, **body): + """ + Update an existing service account partially (one or multiple fields), \ + by supplying a new value for the field(s) to be updated. + + `project_id` and `client_email` are required, in order to identify the \ + right service account to update. \ + The unspecified fields will keep their original values. + + The only use case for updating this integration is to change \ + host filtering and automute settings. Otherwise, an entirely \ + new integration config is needed. + + >>> project_id="" + >>> client_email="" + >>> host_filters="" + >>> automute=true #boolean + + >>> api.GcpIntegration.update(project_id=project_id, \ + client_email=client_email, host_filters=host_filters, \ + automute=automute) + """ + params = {} + return super(GcpIntegration, cls).update(id=cls._resource_id, params=params, **body) diff --git a/datadog/api/graphs.py b/datadog/api/graphs.py index 60746fb4d..ef29d703c 100644 --- a/datadog/api/graphs.py +++ b/datadog/api/graphs.py @@ -1,12 +1,16 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc from datadog.util.compat import urlparse -from datadog.api.base import CreateableAPIResource, ActionAPIResource +from datadog.api.resources import CreateableAPIResource, ActionAPIResource, GetableAPIResource, ListableAPIResource class Graph(CreateableAPIResource, ActionAPIResource): """ A wrapper around Graph HTTP API. """ - _class_url = '/graph/snapshot' + + _resource_name = "graph/snapshot" @classmethod def create(cls, **params): @@ -25,9 +29,9 @@ def create(cls, **params): :param event_query: a query that will add event bands to the graph :type event_query: string query - :returns: JSON response from HTTP API request + :returns: Dictionary representing the API's JSON response """ - return super(Graph, cls).create(method='GET', **params) + return super(Graph, cls).create(method="GET", **params) @classmethod def status(cls, snapshot_url): @@ -38,10 +42,43 @@ def status(cls, snapshot_url): :param snapshot_url: snapshot URL to check :type snapshot_url: string url - :returns: JSON response from HTTP API request + :returns: Dictionary representing the API's JSON response """ snap_path = urlparse(snapshot_url).path - snap_path = snap_path.split('/snapshot/view/')[1].split('.png')[0] - snapshot_status_url = '/graph/snapshot_status/{0}'.format(snap_path) + snap_path = snap_path.split("/snapshot/view/")[1].split(".png")[0] + + snapshot_status_url = "graph/snapshot_status/{0}".format(snap_path) + + return super(Graph, cls)._trigger_action("GET", snapshot_status_url) + + +class Embed(ListableAPIResource, GetableAPIResource, ActionAPIResource, CreateableAPIResource): + """ + A wrapper around Embed HTTP API. + """ + + _resource_name = "graph/embed" - return super(Graph, cls)._trigger_action('GET', snapshot_status_url) + @classmethod + def enable(cls, embed_id): + """ + Enable a specified embed. + + :param embed_id: embed token + :type embed_id: string embed token + + :returns: Dictionary representing the API's JSON response + """ + return super(Embed, cls)._trigger_class_action("GET", id=embed_id, action_name="enable") + + @classmethod + def revoke(cls, embed_id): + """ + Revoke a specified embed. + + :param embed_id: embed token + :type embed_id: string embed token + + :returns: Dictionary representing the API's JSON response + """ + return super(Embed, cls)._trigger_class_action("GET", id=embed_id, action_name="revoke") diff --git a/datadog/api/hosts.py b/datadog/api/hosts.py index f14432c1f..5bc2a32eb 100644 --- a/datadog/api/hosts.py +++ b/datadog/api/hosts.py @@ -1,14 +1,18 @@ -from datadog.api.base import ActionAPIResource +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.api.resources import ActionAPIResource, SearchableAPIResource class Host(ActionAPIResource): """ A wrapper around Host HTTP API. """ - _class_url = '/host' + + _resource_name = "host" @classmethod - def mute(cls, host_name, **params): + def mute(cls, host_name, **body): """ Mute a host. @@ -25,10 +29,10 @@ def mute(cls, host_name, **params): :param message: message to associate with the muting of this host :type message: string - :returns: JSON response from HTTP API request + :returns: Dictionary representing the API's JSON response """ - return super(Host, cls)._trigger_class_action('POST', 'mute', host_name, **params) + return super(Host, cls)._trigger_class_action("POST", "mute", host_name, **body) @classmethod def unmute(cls, host_name): @@ -38,7 +42,50 @@ def unmute(cls, host_name): :param host_name: hostname :type host_name: string - :returns: JSON response from HTTP API request + :returns: Dictionary representing the API's JSON response + + """ + return super(Host, cls)._trigger_class_action("POST", "unmute", host_name) + + +class Hosts(ActionAPIResource, SearchableAPIResource): + """ + A wrapper around Hosts HTTP API. + """ + + _resource_name = "hosts" + + @classmethod + def search(cls, **params): + """ + Search among hosts live within the past 2 hours. Max 100 + results at a time. + + :param filter: query to filter search results + :type filter: string + + :param sort_field: "status", "apps", "cpu", "iowait", or "load" + :type sort_field: string + + :param sort_dir: "asc" or "desc" + :type sort_dir: string + + :param start: host result to start at + :type start: integer + + :param count: number of host results to return + :type count: integer + + :returns: Dictionary representing the API's JSOn response + + """ + return super(Hosts, cls)._search(**params) + + @classmethod + def totals(cls): + """ + Get total number of hosts active and up. + :returns: Dictionary representing the API's JSON response """ - return super(Host, cls)._trigger_class_action('POST', 'unmute', host_name) + return super(Hosts, cls)._trigger_class_action("GET", "totals") diff --git a/datadog/api/http_client.py b/datadog/api/http_client.py new file mode 100644 index 000000000..8831aff15 --- /dev/null +++ b/datadog/api/http_client.py @@ -0,0 +1,256 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +""" +Available HTTP Client for Datadog API client. + +Priority: +1. `requests` 3p module +2. `urlfetch` 3p module - Google App Engine only +""" +# stdlib +import copy +import logging +import platform +import urllib +from threading import Lock + +# 3p +try: + import requests + import requests.adapters +except ImportError: + requests = None # type: ignore + +try: + from google.appengine.api import urlfetch, urlfetch_errors +except ImportError: + urlfetch, urlfetch_errors = None, None + +try: + import urllib3 # type: ignore +except ImportError: + urllib3 = None + +# datadog +from datadog.api.exceptions import ProxyError, ClientError, HTTPError, HttpTimeout + + +log = logging.getLogger("datadog.api") + + +def _get_user_agent_header(): + from datadog import version + + return "datadogpy/{version} (python {pyver}; os {os}; arch {arch})".format( + version=version.__version__, + pyver=platform.python_version(), + os=platform.system().lower(), + arch=platform.machine().lower(), + ) + + +def _remove_context(exc): + """Python3: remove context from chained exceptions to prevent leaking API keys in tracebacks.""" + exc.__cause__ = None + return exc + + +class HTTPClient(object): + """ + An abstract generic HTTP client. Subclasses must implement the `request` methods. + """ + + @classmethod + def request(cls, method, url, headers, params, data, timeout, proxies, verify, max_retries): + """ + Main method to be implemented by HTTP clients. + + The returned data structure has the following fields: + * `content`: string containing the response from the server + * `status_code`: HTTP status code returned by the server + + Can raise the following exceptions: + * `ClientError`: server cannot be contacted + * `HttpTimeout`: connection timed out + * `HTTPError`: unexpected HTTP response code + """ + raise NotImplementedError(u"Must be implemented by HTTPClient subclasses.") + + +class RequestClient(HTTPClient): + """ + HTTP client based on 3rd party `requests` module, using a single session. + This allows us to keep the session alive to spare some execution time. + """ + + _session = None + _session_lock = Lock() + + @classmethod + def request(cls, method, url, headers, params, data, timeout, proxies, verify, max_retries): + try: + + with cls._session_lock: + if cls._session is None: + cls._session = requests.Session() + http_adapter = requests.adapters.HTTPAdapter(max_retries=max_retries) + cls._session.mount("https://", http_adapter) + cls._session.headers.update({"User-Agent": _get_user_agent_header()}) + + result = cls._session.request( + method, url, headers=headers, params=params, data=data, timeout=timeout, proxies=proxies, verify=verify + ) + + result.raise_for_status() + + except requests.exceptions.ProxyError as e: + raise _remove_context(ProxyError(method, url, e)) + except requests.ConnectionError as e: + raise _remove_context(ClientError(method, url, e)) + except requests.exceptions.Timeout: + raise _remove_context(HttpTimeout(method, url, timeout)) + except requests.exceptions.HTTPError as e: + if e.response.status_code in (400, 401, 403, 404, 409, 429): + # This gets caught afterwards and raises an ApiError exception + pass + else: + raise _remove_context(HTTPError(e.response.status_code, result.reason)) + except TypeError: + raise TypeError( + u"Your installed version of `requests` library seems not compatible with" + u"Datadog's usage. We recommend upgrading it ('pip install -U requests')." + u"If you need help or have any question, please contact support@datadoghq.com" + ) + + return result + + +class URLFetchClient(HTTPClient): + """ + HTTP client based on Google App Engine `urlfetch` module. + """ + + @classmethod + def request(cls, method, url, headers, params, data, timeout, proxies, verify, max_retries): + """ + Wrapper around `urlfetch.fetch` method. + + TO IMPLEMENT: + * `max_retries` + """ + # No local certificate file can be used on Google App Engine + validate_certificate = True if verify else False + + # Encode parameters in the url + url_with_params = "{url}?{params}".format(url=url, params=urllib.urlencode(params)) + newheaders = copy.deepcopy(headers) + newheaders["User-Agent"] = _get_user_agent_header() + + try: + result = urlfetch.fetch( + url=url_with_params, + method=method, + headers=newheaders, + validate_certificate=validate_certificate, + deadline=timeout, + payload=data, + # setting follow_redirects=False may be slightly faster: + # https://cloud.google.com/appengine/docs/python/microservice-performance#use_the_shortest_route + follow_redirects=False, + ) + + cls.raise_on_status(result) + + except urlfetch.DownloadError as e: + raise ClientError(method, url, e) + except urlfetch_errors.DeadlineExceededError: + raise HttpTimeout(method, url, timeout) + + return result + + @classmethod + def raise_on_status(cls, result): + """ + Raise on HTTP status code errors. + """ + status_code = result.status_code + + if (status_code / 100) != 2: + if status_code in (400, 401, 403, 404, 409, 429): + pass + else: + raise HTTPError(status_code) + + +class Urllib3Client(HTTPClient): + """ + HTTP client based on 3rd party `urllib3` module. + """ + + _pool = None + _pool_lock = Lock() + + @classmethod + def request(cls, method, url, headers, params, data, timeout, proxies, verify, max_retries): + """ + Wrapper around `urllib3.PoolManager.request` method. This method will raise + exceptions for HTTP status codes that are not 2xx. + """ + try: + with cls._pool_lock: + if cls._pool is None: + cls._pool = urllib3.PoolManager( + retries=max_retries, + timeout=timeout, + cert_reqs="CERT_REQUIRED" if verify else "CERT_NONE", + ) + + newheaders = copy.deepcopy(headers) + newheaders["User-Agent"] = _get_user_agent_header() + response = cls._pool.request( + method, url, body=data, fields=params, headers=newheaders + ) + cls.raise_on_status(response) + + except urllib3.exceptions.ProxyError as e: + raise _remove_context(ProxyError(method, url, e)) + except urllib3.exceptions.MaxRetryError as e: + raise _remove_context(ClientError(method, url, e)) + except urllib3.exceptions.TimeoutError as e: + raise _remove_context(HttpTimeout(method, url, e)) + except urllib3.exceptions.HTTPError as e: + raise _remove_context(HTTPError(e)) + + return response + + @classmethod + def raise_on_status(cls, response): + """ + Raise on HTTP status code errors. + """ + status_code = response.status + if status_code < 200 or status_code >= 300: + if status_code not in (400, 401, 403, 404, 409, 429): + raise HTTPError(status_code, response.reason) + + +def resolve_http_client(): + """ + Resolve an appropriate HTTP client based the defined priority and user environment. + """ + if requests: + log.debug(u"Use `requests` based HTTP client.") + return RequestClient + + if urlfetch and urlfetch_errors: + log.debug(u"Use `urlfetch` based HTTP client.") + return URLFetchClient + + if urllib3: + log.debug(u"Use `urllib3` based HTTP client.") + return Urllib3Client + + raise ImportError( + u"Datadog API client was unable to resolve a HTTP client. " u" Please install `requests` library." + ) diff --git a/datadog/api/infrastructure.py b/datadog/api/infrastructure.py index 63b324df7..806a0514b 100644 --- a/datadog/api/infrastructure.py +++ b/datadog/api/infrastructure.py @@ -1,21 +1,28 @@ -from datadog.api.base import SearchableAPIResource +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.api.resources import SearchableAPIResource class Infrastructure(SearchableAPIResource): """ A wrapper around Infrastructure HTTP API. """ - _class_url = '/search' - _plural_class_name = 'results' + + _resource_name = "search" @classmethod def search(cls, **params): """ Search for entities in Datadog. - :param q: a query to serch for host and metrics + :param q: a query to search for host and metrics :type q: string query - :returns: JSON response from HTTP API request + :returns: Dictionary representing the API's JSON response """ + # Deprecate the hosts search param + query = params.get("q", "").split(":") + if len(query) > 1 and query[0] == "hosts": + print("[DEPRECATION] Infrastructure.search() is deprecated for ", "hosts. Use `Hosts.search` instead.") return super(Infrastructure, cls)._search(**params) diff --git a/datadog/api/logs.py b/datadog/api/logs.py new file mode 100644 index 000000000..a87efa228 --- /dev/null +++ b/datadog/api/logs.py @@ -0,0 +1,22 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.api.resources import CreateableAPIResource +from datadog.api.api_client import APIClient + + +class Logs(CreateableAPIResource): + """ + A wrapper around Log HTTP API. + """ + + _resource_name = "logs-queries" + + @classmethod + def list(cls, data): + path = "{resource_name}/list".format( + resource_name=cls._resource_name, + ) + api_version = getattr(cls, "_api_version", None) + + return APIClient.submit("POST", path, api_version, data) diff --git a/datadog/api/metadata.py b/datadog/api/metadata.py new file mode 100644 index 000000000..6c251e577 --- /dev/null +++ b/datadog/api/metadata.py @@ -0,0 +1,64 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# datadog +from datadog.api.resources import GetableAPIResource, UpdatableAPIResource + + +class Metadata(GetableAPIResource, UpdatableAPIResource): + """ + A wrapper around Metric Metadata HTTP API + """ + + _resource_name = "metrics" + + @classmethod + def get(cls, metric_name): + """ + Get metadata information on an existing Datadog metric + + param metric_name: metric name (ex. system.cpu.idle) + + :returns: Dictionary representing the API's JSON response + """ + if not metric_name: + raise KeyError("'metric_name' parameter is required") + + return super(Metadata, cls).get(metric_name) + + @classmethod + def update(cls, metric_name, **params): + """ + Update metadata fields for an existing Datadog metric. + If the metadata does not exist for the metric it is created by + the update. + + :param type: type of metric (ex. "gauge", "rate", etc.) + see http://docs.datadoghq.com/metrictypes/ + :type type: string + + :param description: description of the metric + :type description: string + + :param short_name: short name of the metric + :type short_name: string + + :param unit: unit type associated with the metric (ex. "byte", "operation") + see http://docs.datadoghq.com/units/ for full list + :type unit: string + + :param per_unit: per unit type (ex. "second" as in "queries per second") + see http://docs.datadoghq.com/units/ for full list + :type per_unit: string + + :param statsd_interval: statsd flush interval for metric in seconds (if applicable) + :type statsd_interval: integer + + :returns: Dictionary representing the API's JSON response + + >>> api.Metadata.update(metric_name='api.requests.served', metric_type="counter") + """ + if not metric_name: + raise KeyError("'metric_name' parameter is required") + + return super(Metadata, cls).update(id=metric_name, **params) diff --git a/datadog/api/metrics.py b/datadog/api/metrics.py index f3fb0874f..252ea88cb 100644 --- a/datadog/api/metrics.py +++ b/datadog/api/metrics.py @@ -1,37 +1,72 @@ -import time - -from datadog.api.base import SearchableAPIResource, SendableAPIResource +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# datadog from datadog.api.exceptions import ApiError +from datadog.api.format import format_points +from datadog.api.resources import SearchableAPIResource, SendableAPIResource, ListableAPIResource -class Metric(SearchableAPIResource, SendableAPIResource): +class Metric(SearchableAPIResource, SendableAPIResource, ListableAPIResource): """ A wrapper around Metric HTTP API """ - _class_url = None - _json_name = 'series' - _METRIC_QUERY_ENDPOINT = '/query' - _METRIC_SUBMIT_ENDPOINT = '/series' + _resource_name = None + + _METRIC_QUERY_ENDPOINT = "query" + _METRIC_SUBMIT_ENDPOINT = "series" + _METRIC_LIST_ENDPOINT = "metrics" @classmethod - def _process_points(cls, points): - now = time.time() - if isinstance(points, (float, int)): - points = [(now, points)] - elif isinstance(points, tuple): - points = [points] - return points + def list(cls, from_epoch): + """ + Get a list of active metrics since a given time (Unix Epoc) + + :param from_epoch: Start time in Unix Epoc (seconds) + + :returns: Dictionary containing a list of active metrics + """ + + cls._resource_name = cls._METRIC_LIST_ENDPOINT + + try: + seconds = int(from_epoch) + params = {"from": seconds} + except ValueError: + raise ApiError("Parameter 'from_epoch' must be an integer") + + return super(Metric, cls).get_all(**params) + + @staticmethod + def _rename_metric_type(metric): + """ + FIXME DROPME in 1.0: + + API documentation was illegitimately promoting usage of `metric_type` parameter + instead of `type`. + To be consistent and avoid 'backward incompatibilities', properly rename this parameter. + """ + if "metric_type" in metric: + metric["type"] = metric.pop("metric_type") @classmethod - def send(cls, metrics=None, **single_metric): + def send(cls, metrics=None, attach_host_name=True, compress_payload=False, **single_metric): """ Submit a metric or a list of metrics to the metric API + A metric dictionary should consist of 5 keys: metric, points, host, tags, type (some of which optional), + see below: :param metric: the name of the time series :type metric: string - :param points: list of points to submit + :param compress_payload: compress the payload using zlib + :type compress_payload: bool + + :param metrics: a list of dictionaries, each item being a metric to send + :type metrics: list + + :param points: a (timestamp, value) pair or list of (timestamp, value) pairs :type points: list :param host: host name that produced the metric @@ -41,42 +76,39 @@ def send(cls, metrics=None, **single_metric): :type tags: string list :param type: type of the metric - :type type: 'gauge' or 'counter' string + :type type: 'gauge' or 'count' or 'rate' string - :returns: JSON response from HTTP request - """ - def rename_metric_type(metric): - """ - FIXME DROPME in 1.0: + >>> api.Metric.send(metric='my.series', points=[(now, 15), (future_10s, 16)]) - API documentation was illegitimately promoting usage of `metric_type` parameter - instead of `type`. - To be consistent and avoid 'backward incompatibilities', properly rename this parameter. - """ - if 'metric_type' in metric: - metric['type'] = metric.pop('metric_type') + >>> metrics = [{'metric': 'my.series', 'type': 'gauge', 'points': [(now, 15), (future_10s, 16)]}, + {'metric': 'my.series2', 'type': 'gauge', 'points': [(now, 15), (future_10s, 16)]}] + >>> api.Metric.send(metrics=metrics) + :returns: Dictionary representing the API's JSON response + """ # Set the right endpoint - cls._class_url = cls._METRIC_SUBMIT_ENDPOINT + cls._resource_name = cls._METRIC_SUBMIT_ENDPOINT # Format the payload try: if metrics: for metric in metrics: if isinstance(metric, dict): - rename_metric_type(metric) - metric['points'] = cls._process_points(metric['points']) + cls._rename_metric_type(metric) + metric["points"] = format_points(metric["points"]) metrics_dict = {"series": metrics} else: - rename_metric_type(single_metric) - single_metric['points'] = cls._process_points(single_metric['points']) + cls._rename_metric_type(single_metric) + single_metric["points"] = format_points(single_metric["points"]) metrics = [single_metric] metrics_dict = {"series": metrics} except KeyError: raise KeyError("'points' parameter is required") - return super(Metric, cls).send(attach_host_name=True, **metrics_dict) + return super(Metric, cls).send( + attach_host_name=attach_host_name, compress_payload=compress_payload, **metrics_dict + ) @classmethod def query(cls, **params): @@ -92,7 +124,7 @@ def query(cls, **params): :param query: metric query :type query: string query - :return: JSON response from HTTP request + :returns: Dictionary representing the API's JSON response *start* and *end* should be less than 24 hours apart. It is *not* meant to retrieve metric data in bulk. @@ -101,14 +133,14 @@ def query(cls, **params): query='avg:system.cpu.idle{*}') """ # Set the right endpoint - cls._class_url = cls._METRIC_QUERY_ENDPOINT + cls._resource_name = cls._METRIC_QUERY_ENDPOINT # `from` is a reserved keyword in Python, therefore - # `api.Metric.query(from=...)` is not permited + # `api.Metric.query(from=...)` is not permitted # -> map `start` to `from` and `end` to `to` try: - params['from'] = params.pop('start') - params['to'] = params.pop('end') + params["from"] = params.pop("start") + params["to"] = params.pop("end") except KeyError as e: raise ApiError("The parameter '{0}' is required".format(e.args[0])) diff --git a/datadog/api/monitors.py b/datadog/api/monitors.py index 5c4487905..a2d9e7478 100644 --- a/datadog/api/monitors.py +++ b/datadog/api/monitors.py @@ -1,14 +1,29 @@ -from datadog.api.base import GetableAPIResource, CreateableAPIResource, \ - UpdatableAPIResource, ListableAPIResource, DeletableAPIResource, \ - ActionAPIResource - - -class Monitor(GetableAPIResource, CreateableAPIResource, UpdatableAPIResource, - ListableAPIResource, DeletableAPIResource, ActionAPIResource): +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.api.resources import ( + GetableAPIResource, + CreateableAPIResource, + UpdatableAPIResource, + ListableAPIResource, + DeletableAPIResource, + ActionAPIResource, +) + + +class Monitor( + GetableAPIResource, + CreateableAPIResource, + UpdatableAPIResource, + ListableAPIResource, + DeletableAPIResource, + ActionAPIResource, +): """ A wrapper around Monitor HTTP API. """ - _class_url = '/monitor' + + _resource_name = "monitor" @classmethod def get(cls, id, **params): @@ -22,10 +37,10 @@ def get(cls, id, **params): :type group_states: string list, strings are chosen from one or more \ from 'all', 'alert', 'warn', or 'no data' - :returns: JSON response from HTTP request + :returns: Dictionary representing the API's JSON response """ - if 'group_states' in params and isinstance(params['group_states'], list): - params['group_states'] = ','.join(params['group_states']) + if "group_states" in params and isinstance(params["group_states"], list): + params["group_states"] = ",".join(params["group_states"]) return super(Monitor, cls).get(id, **params) @@ -38,18 +53,26 @@ def get_all(cls, **params): :type group_states: string list, strings are chosen from one or more \ from 'all', 'alert', 'warn', or 'no data' + :param name: name to filter the list of monitors by + :type name: string + :param tags: tags to filter the list of monitors by scope :type tags: string list - :returns: JSON response from HTTP request + :param monitor_tags: list indicating what service and/or custom tags, if any, \ + should be used to filter the list of monitors + :type monitor_tags: string list + + :returns: Dictionary representing the API's JSON response """ - if 'group_states' in params and isinstance(params['group_states'], list): - params['group_states'] = ','.join(params['group_states']) + for p in ["group_states", "tags", "monitor_tags"]: + if p in params and isinstance(params[p], list): + params[p] = ",".join(params[p]) return super(Monitor, cls).get_all(**params) @classmethod - def mute(cls, id, **params): + def mute(cls, id, **body): """ Mute a monitor. @@ -60,12 +83,12 @@ def mute(cls, id, **params): :type end: POSIX timestamp - :returns: JSON response from HTTP request + :returns: Dictionary representing the API's JSON response """ - return super(Monitor, cls)._trigger_class_action('POST', 'mute', id, **params) + return super(Monitor, cls)._trigger_class_action("POST", "mute", id, **body) @classmethod - def unmute(cls, id, **params): + def unmute(cls, id, **body): """ Unmute a monitor. @@ -75,24 +98,60 @@ def unmute(cls, id, **params): :param all_scopes: if True, clears mute settings for all scopes :type all_scopes: boolean - :returns: JSON response from HTTP request + :returns: Dictionary representing the API's JSON response """ - return super(Monitor, cls)._trigger_class_action('POST', 'unmute', id, **params) + return super(Monitor, cls)._trigger_class_action("POST", "unmute", id, **body) @classmethod def mute_all(cls): """ Globally mute monitors. - :returns: JSON response from HTTP request + :returns: Dictionary representing the API's JSON response """ - return super(Monitor, cls)._trigger_class_action('POST', 'mute_all') + return super(Monitor, cls)._trigger_class_action("POST", "mute_all") @classmethod def unmute_all(cls): """ Cancel global monitor mute setting (does not remove mute settings for individual monitors). - :returns: JSON response from HTTP request + :returns: Dictionary representing the API's JSON response + """ + return super(Monitor, cls)._trigger_class_action("POST", "unmute_all") + + @classmethod + def search(cls, **params): + """ + Search monitors. + + :returns: Dictionary representing the API's JSON response + """ + return super(Monitor, cls)._trigger_class_action("GET", "search", params=params) + + @classmethod + def search_groups(cls, **params): + """ + Search monitor groups. + + :returns: Dictionary representing the API's JSON response + """ + return super(Monitor, cls)._trigger_class_action("GET", "groups/search", params=params) + + @classmethod + def can_delete(cls, **params): + """ + Checks if the monitors corresponding to the monitor ids can be deleted. + + :returns: Dictionary representing the API's JSON response + """ + return super(Monitor, cls)._trigger_class_action("GET", "can_delete", params=params) + + @classmethod + def validate(cls, **body): + """ + Checks if the monitors definition is valid. + + :returns: Dictionary representing the API's JSON response """ - return super(Monitor, cls)._trigger_class_action('POST', 'unmute_all') + return super(Monitor, cls)._trigger_class_action("POST", "validate", **body) diff --git a/datadog/api/permissions.py b/datadog/api/permissions.py new file mode 100644 index 000000000..f12dad7a1 --- /dev/null +++ b/datadog/api/permissions.py @@ -0,0 +1,27 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.api.resources import ( + ActionAPIResource, + CreateableAPIResource, + CustomUpdatableAPIResource, + DeletableAPIResource, + GetableAPIResource, + ListableAPIResource, +) + + +class Permissions( + ActionAPIResource, + CreateableAPIResource, + CustomUpdatableAPIResource, + GetableAPIResource, + ListableAPIResource, + DeletableAPIResource, +): + """ + A wrapper around Tag HTTP API. + """ + + _resource_name = "permissions" + _api_version = "v2" diff --git a/datadog/api/resources.py b/datadog/api/resources.py new file mode 100644 index 000000000..67bcc39bf --- /dev/null +++ b/datadog/api/resources.py @@ -0,0 +1,539 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +""" +Datadog API resources. +""" + +from datadog.api.api_client import APIClient + + +class CreateableAPIResource(object): + """ + Creatable API Resource + """ + + @classmethod + def create(cls, attach_host_name=False, method="POST", id=None, params=None, **body): + """ + Create a new API resource object + + :param attach_host_name: link the new resource object to the host name + :type attach_host_name: bool + + :param method: HTTP method to use to contact API endpoint + :type method: HTTP method string + + :param id: create a new resource object as a child of the given object + :type id: id + + :param params: new resource object source + :type params: dictionary + + :param body: new resource object attributes + :type body: dictionary + + :returns: Dictionary representing the API's JSON response + """ + if params is None: + params = {} + + path = cls._resource_name + api_version = getattr(cls, "_api_version", None) + + if method == "GET": + return APIClient.submit("GET", path, api_version, **body) + if id is None: + return APIClient.submit("POST", path, api_version, body, attach_host_name=attach_host_name, **params) + + path = "{resource_name}/{resource_id}".format(resource_name=cls._resource_name, resource_id=id) + return APIClient.submit("POST", path, api_version, body, attach_host_name=attach_host_name, **params) + + +class SendableAPIResource(object): + """ + Fork of CreateableAPIResource class with different method names + """ + + @classmethod + def send(cls, attach_host_name=False, id=None, compress_payload=False, **body): + """ + Create an API resource object + + :param attach_host_name: link the new resource object to the host name + :type attach_host_name: bool + + :param id: create a new resource object as a child of the given object + :type id: id + + :param compress_payload: compress the payload using zlib + :type compress_payload: bool + + :param body: new resource object attributes + :type body: dictionary + + :returns: Dictionary representing the API's JSON response + """ + api_version = getattr(cls, "_api_version", None) + + if id is None: + return APIClient.submit( + "POST", + cls._resource_name, + api_version, + body, + attach_host_name=attach_host_name, + compress_payload=compress_payload, + ) + + path = "{resource_name}/{resource_id}".format(resource_name=cls._resource_name, resource_id=id) + return APIClient.submit( + "POST", path, api_version, body, attach_host_name=attach_host_name, compress_payload=compress_payload + ) + + +class UpdatableAPIResource(object): + """ + Updatable API Resource + """ + + @classmethod + def update(cls, id, params=None, **body): + """ + Update an API resource object + + :param params: updated resource object source + :type params: dictionary + + :param body: updated resource object attributes + :type body: dictionary + + :returns: Dictionary representing the API's JSON response + """ + if params is None: + params = {} + + path = "{resource_name}/{resource_id}".format(resource_name=cls._resource_name, resource_id=id) + api_version = getattr(cls, "_api_version", None) + + return APIClient.submit("PUT", path, api_version, body, **params) + + +class CustomUpdatableAPIResource(object): + """ + Updatable API Resource with custom HTTP Verb + """ + + @classmethod + def update(cls, method=None, id=None, params=None, **body): + """ + Update an API resource object + + :param method: HTTP method, defaults to PUT + :type params: string + + :param params: updatable resource id + :type params: string + + :param params: updated resource object source + :type params: dictionary + + :param body: updated resource object attributes + :type body: dictionary + + :returns: Dictionary representing the API's JSON response + """ + + if method is None: + method = "PUT" + if params is None: + params = {} + + path = "{resource_name}/{resource_id}".format(resource_name=cls._resource_name, resource_id=id) + api_version = getattr(cls, "_api_version", None) + + return APIClient.submit(method, path, api_version, body, **params) + + +class DeletableAPIResource(object): + """ + Deletable API Resource + """ + + @classmethod + def delete(cls, id, **params): + """ + Delete an API resource object + + :param id: resource object to delete + :type id: id + + :returns: Dictionary representing the API's JSON response + """ + path = "{resource_name}/{resource_id}".format(resource_name=cls._resource_name, resource_id=id) + api_version = getattr(cls, "_api_version", None) + + return APIClient.submit("DELETE", path, api_version, **params) + + +class GetableAPIResource(object): + """ + Getable API Resource + """ + + @classmethod + def get(cls, id, **params): + """ + Get information about an API resource object + + :param id: resource object id to retrieve + :type id: id + + :param params: parameters to filter API resource stream + :type params: dictionary + + :returns: Dictionary representing the API's JSON response + """ + path = "{resource_name}/{resource_id}".format(resource_name=cls._resource_name, resource_id=id) + api_version = getattr(cls, "_api_version", None) + + return APIClient.submit("GET", path, api_version, **params) + + +class ListableAPIResource(object): + """ + Listable API Resource + """ + + @classmethod + def get_all(cls, **params): + """ + List API resource objects + + :param params: parameters to filter API resource stream + :type params: dictionary + + :returns: Dictionary representing the API's JSON response + """ + api_version = getattr(cls, "_api_version", None) + + return APIClient.submit("GET", cls._resource_name, api_version, **params) + + +class ListableAPISubResource(object): + """ + Listable API Sub-Resource + """ + + @classmethod + def get_items(cls, id, **params): + """ + List API sub-resource objects from a resource + + :param id: resource id to retrieve sub-resource objects from + :type id: id + + :param params: parameters to filter API sub-resource stream + :type params: dictionary + + :returns: Dictionary representing the API's JSON response + """ + + path = "{resource_name}/{resource_id}/{sub_resource_name}".format( + resource_name=cls._resource_name, resource_id=id, sub_resource_name=cls._sub_resource_name + ) + api_version = getattr(cls, "_api_version", None) + + return APIClient.submit("GET", path, api_version, **params) + + +class AddableAPISubResource(object): + """ + Addable API Sub-Resource + """ + + @classmethod + def add_items(cls, id, params=None, **body): + """ + Add new API sub-resource objects to a resource + + :param id: resource id to add sub-resource objects to + :type id: id + + :param params: request parameters + :type params: dictionary + + :param body: new sub-resource objects attributes + :type body: dictionary + + :returns: Dictionary representing the API's JSON response + """ + if params is None: + params = {} + + path = "{resource_name}/{resource_id}/{sub_resource_name}".format( + resource_name=cls._resource_name, resource_id=id, sub_resource_name=cls._sub_resource_name + ) + api_version = getattr(cls, "_api_version", None) + + return APIClient.submit("POST", path, api_version, body, **params) + + +class UpdatableAPISubResource(object): + """ + Updatable API Sub-Resource + """ + + @classmethod + def update_items(cls, id, params=None, **body): + """ + Update API sub-resource objects of a resource + + :param id: resource id to update sub-resource objects from + :type id: id + + :param params: request parameters + :type params: dictionary + + :param body: updated sub-resource objects attributes + :type body: dictionary + + :returns: Dictionary representing the API's JSON response + """ + if params is None: + params = {} + + path = "{resource_name}/{resource_id}/{sub_resource_name}".format( + resource_name=cls._resource_name, resource_id=id, sub_resource_name=cls._sub_resource_name + ) + api_version = getattr(cls, "_api_version", None) + + return APIClient.submit("PUT", path, api_version, body, **params) + + +class DeletableAPISubResource(object): + """ + Deletable API Sub-Resource + """ + + @classmethod + def delete_items(cls, id, params=None, **body): + """ + Delete API sub-resource objects from a resource + + :param id: resource id to delete sub-resource objects from + :type id: id + + :param params: request parameters + :type params: dictionary + + :param body: deleted sub-resource objects attributes + :type body: dictionary + + :returns: Dictionary representing the API's JSON response + """ + if params is None: + params = {} + + path = "{resource_name}/{resource_id}/{sub_resource_name}".format( + resource_name=cls._resource_name, resource_id=id, sub_resource_name=cls._sub_resource_name + ) + api_version = getattr(cls, "_api_version", None) + + return APIClient.submit("DELETE", path, api_version, body, **params) + + +class SearchableAPIResource(object): + """ + Fork of ListableAPIResource class with different method names + """ + + @classmethod + def _search(cls, **params): + """ + Query an API resource stream + + :param params: parameters to filter API resource stream + :type params: dictionary + + :returns: Dictionary representing the API's JSON response + """ + api_version = getattr(cls, "_api_version", None) + + return APIClient.submit("GET", cls._resource_name, api_version, **params) + + +class ActionAPIResource(object): + """ + Actionable API Resource + """ + + @classmethod + def _trigger_class_action(cls, method, action_name, id=None, params=None, **body): + """ + Trigger an action + + :param method: HTTP method to use to contact API endpoint + :type method: HTTP method string + + :param action_name: action name + :type action_name: string + + :param id: trigger the action for the specified resource object + :type id: id + + :param params: action parameters + :type params: dictionary + + :param body: action body + :type body: dictionary + + :returns: Dictionary representing the API's JSON response + """ + if params is None: + params = {} + + api_version = getattr(cls, "_api_version", None) + + if id is None: + path = "{resource_name}/{action_name}".format(resource_name=cls._resource_name, action_name=action_name) + else: + path = "{resource_name}/{resource_id}/{action_name}".format( + resource_name=cls._resource_name, resource_id=id, action_name=action_name + ) + if method == "GET": + # Do not add body to GET requests, it causes 400 Bad request responses on EU site + body = None + return APIClient.submit(method, path, api_version, body, **params) + + @classmethod + def _trigger_action(cls, method, name, id=None, **body): + """ + Trigger an action + + :param method: HTTP method to use to contact API endpoint + :type method: HTTP method string + + :param name: action name + :type name: string + + :param id: trigger the action for the specified resource object + :type id: id + + :param body: action body + :type body: dictionary + + :returns: Dictionary representing the API's JSON response + """ + api_version = getattr(cls, "_api_version", None) + if id is None: + return APIClient.submit(method, name, api_version, body) + + path = "{action_name}/{resource_id}".format(action_name=name, resource_id=id) + if method == "GET": + # Do not add body to GET requests, it causes 400 Bad request responses on EU site + body = None + return APIClient.submit(method, path, api_version, body) + + +class UpdatableAPISyntheticsSubResource(object): + """ + Update Synthetics sub resource + """ + + @classmethod + def update_synthetics_items(cls, id, params=None, **body): + """ + Update API sub-resource objects of a resource + + :param id: resource id to update sub-resource objects from + :type id: id + + :param params: request parameters + :type params: dictionary + + :param body: updated sub-resource objects attributes + :type body: dictionary + + :returns: Dictionary representing the API's JSON response + """ + if params is None: + params = {} + + path = "{resource_name}/tests/{resource_id}/{sub_resource_name}".format( + resource_name=cls._resource_name, resource_id=id, sub_resource_name=cls._sub_resource_name + ) + api_version = getattr(cls, "_api_version", None) + + return APIClient.submit("PUT", path, api_version, body, **params) + + +class UpdatableAPISyntheticsResource(object): + """ + Update Synthetics resource + """ + + @classmethod + def update_synthetics(cls, id, params=None, **body): + """ + Update an API resource object + + :param params: updated resource object source + :type params: dictionary + + :param body: updated resource object attributes + :type body: dictionary + + :returns: Dictionary representing the API's JSON response + """ + if params is None: + params = {} + + path = "{resource_name}/tests/{resource_id}".format(resource_name=cls._resource_name, resource_id=id) + api_version = getattr(cls, "_api_version", None) + + return APIClient.submit("PUT", path, api_version, body, **params) + + +class ActionAPISyntheticsResource(object): + """ + Actionable Synthetics API Resource + """ + + @classmethod + def _trigger_synthetics_class_action(cls, method, name, id=None, params=None, **body): + """ + Trigger an action + + :param method: HTTP method to use to contact API endpoint + :type method: HTTP method string + + :param name: action name + :type name: string + + :param id: trigger the action for the specified resource object + :type id: id + + :param params: action parameters + :type params: dictionary + + :param body: action body + :type body: dictionary + + :returns: Dictionary representing the API's JSON response + """ + if params is None: + params = {} + + api_version = getattr(cls, "_api_version", None) + + if id is None: + path = "{resource_name}/{action_name}".format(resource_name=cls._resource_name, action_name=name) + else: + path = "{resource_name}/{action_name}/{resource_id}".format( + resource_name=cls._resource_name, resource_id=id, action_name=name + ) + if method == "GET": + # Do not add body to GET requests, it causes 400 Bad request responses on EU site + body = None + return APIClient.submit(method, path, api_version, body, **params) diff --git a/datadog/api/roles.py b/datadog/api/roles.py new file mode 100644 index 000000000..2fce1dd2e --- /dev/null +++ b/datadog/api/roles.py @@ -0,0 +1,71 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.api.resources import ( + ActionAPIResource, + CreateableAPIResource, + CustomUpdatableAPIResource, + DeletableAPIResource, + GetableAPIResource, + ListableAPIResource, +) + +from datadog.api.api_client import APIClient + + +class Roles( + ActionAPIResource, + CreateableAPIResource, + CustomUpdatableAPIResource, + GetableAPIResource, + ListableAPIResource, + DeletableAPIResource, +): + """ + A wrapper around Tag HTTP API. + """ + + _resource_name = "roles" + _api_version = "v2" + + @classmethod + def update(cls, id, **body): + """ + Update a role's attributes + + :param id: uuid of the role + :param body: dict with type of the input, role `id`, and modified attributes + :returns: Dictionary representing the API's JSON response + """ + params = {} + return super(Roles, cls).update("PATCH", id, params=params, **body) + + @classmethod + def assign_permission(cls, id, **body): + """ + Assign permission to a role + + :param id: uuid of the role to assign permission to + :param body: dict with "type": "permissions" and uuid of permission to assign + :returns: Dictionary representing the API's JSON response + """ + params = {} + path = "{resource_name}/{resource_id}/permissions".format(resource_name=cls._resource_name, resource_id=id) + api_version = getattr(cls, "_api_version", None) + + return APIClient.submit("POST", path, api_version, body, **params) + + @classmethod + def unassign_permission(cls, id, **body): + """ + Unassign permission from a role + + :param id: uuid of the role to unassign permission from + :param body: dict with "type": "permissions" and uuid of permission to unassign + :returns: Dictionary representing the API's JSON response + """ + params = {} + path = "{resource_name}/{resource_id}/permissions".format(resource_name=cls._resource_name, resource_id=id) + api_version = getattr(cls, "_api_version", None) + + return APIClient.submit("DELETE", path, api_version, body, **params) diff --git a/datadog/api/screenboards.py b/datadog/api/screenboards.py index c8d2df031..9367ab7cb 100644 --- a/datadog/api/screenboards.py +++ b/datadog/api/screenboards.py @@ -1,16 +1,29 @@ -from datadog.api.base import GetableAPIResource, CreateableAPIResource, \ - UpdatableAPIResource, DeletableAPIResource, ActionAPIResource, ListableAPIResource - - -class Screenboard(GetableAPIResource, CreateableAPIResource, - UpdatableAPIResource, DeletableAPIResource, - ActionAPIResource, ListableAPIResource): +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.api.resources import ( + GetableAPIResource, + CreateableAPIResource, + UpdatableAPIResource, + DeletableAPIResource, + ActionAPIResource, + ListableAPIResource, +) + + +class Screenboard( + GetableAPIResource, + CreateableAPIResource, + UpdatableAPIResource, + DeletableAPIResource, + ActionAPIResource, + ListableAPIResource, +): """ A wrapper around Screenboard HTTP API. """ - _class_name = 'screen' - _class_url = '/screen' - _json_name = 'board' + + _resource_name = "screen" @classmethod def share(cls, board_id): @@ -20,9 +33,9 @@ def share(cls, board_id): :param board_id: screenboard to share :type board_id: id - :returns: JSON response from HTTP request + :returns: Dictionary representing the API's JSON response """ - return super(Screenboard, cls)._trigger_action('GET', 'screen/share', board_id) + return super(Screenboard, cls)._trigger_action("POST", "screen/share", board_id) @classmethod def revoke(cls, board_id): @@ -32,6 +45,6 @@ def revoke(cls, board_id): :param board_id: screenboard to revoke :type board_id: id - :returns: JSON response from HTTP request + :returns: Dictionary representing the API's JSON response """ - return super(Screenboard, cls)._trigger_action('DELETE', 'screen/share', board_id) + return super(Screenboard, cls)._trigger_action("DELETE", "screen/share", board_id) diff --git a/datadog/api/service_checks.py b/datadog/api/service_checks.py index 6bb33c63e..72fcb9a61 100644 --- a/datadog/api/service_checks.py +++ b/datadog/api/service_checks.py @@ -1,14 +1,18 @@ -from datadog.api.base import ActionAPIResource -from datadog.api.exceptions import ApiError +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc from datadog.api.constants import CheckStatus +from datadog.api.exceptions import ApiError +from datadog.api.resources import ActionAPIResource class ServiceCheck(ActionAPIResource): """ A wrapper around ServiceCheck HTTP API. """ + @classmethod - def check(cls, **params): + def check(cls, **body): """ Post check statuses for use with monitors @@ -30,10 +34,12 @@ def check(cls, **params): :param tags: list of tags for this check :type tags: string list - :returns: JSON response from HTTP request + :returns: Dictionary representing the API's JSON response """ - if 'status' in params and params['status'] not in CheckStatus.ALL: - raise ApiError('Invalid status, expected one of: %s' - % ', '.join(str(v) for v in CheckStatus.ALL)) - return super(ServiceCheck, cls)._trigger_action('POST', 'check_run', **params) + # Validate checks, include only non-null values + for param, value in body.items(): + if param == "status" and body[param] not in CheckStatus.ALL: + raise ApiError("Invalid status, expected one of: %s" % ", ".join(str(v) for v in CheckStatus.ALL)) + + return super(ServiceCheck, cls)._trigger_action("POST", "check_run", **body) diff --git a/datadog/api/service_level_objectives.py b/datadog/api/service_level_objectives.py new file mode 100644 index 000000000..abb5a5da8 --- /dev/null +++ b/datadog/api/service_level_objectives.py @@ -0,0 +1,213 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.util.format import force_to_epoch_seconds +from datadog.api.resources import ( + GetableAPIResource, + CreateableAPIResource, + UpdatableAPIResource, + ListableAPIResource, + DeletableAPIResource, + ActionAPIResource, +) + + +class ServiceLevelObjective( + GetableAPIResource, + CreateableAPIResource, + UpdatableAPIResource, + ListableAPIResource, + DeletableAPIResource, + ActionAPIResource, +): + """ + A wrapper around Service Level Objective HTTP API. + """ + + _resource_name = "slo" + + @classmethod + def create(cls, attach_host_name=False, method="POST", id=None, params=None, **body): + """ + Create a SLO + + :returns: created SLO details + """ + return super(ServiceLevelObjective, cls).create( + attach_host_name=False, method="POST", id=None, params=params, **body + ) + + @classmethod + def get(cls, id, **params): + """ + Get a specific SLO details. + + :param id: SLO id to get details for + :type id: str + + :returns: SLO details + """ + return super(ServiceLevelObjective, cls).get(id, **params) + + @classmethod + def get_all(cls, query=None, tags_query=None, metrics_query=None, ids=None, offset=0, limit=100, **params): + """ + Get all SLO details. + + :param query: optional search query to filter results for SLO name + :type query: str + + :param tags_query: optional search query to filter results for a single SLO tag + :type query: str + + :param metrics_query: optional search query to filter results based on SLO numerator and denominator + :type query: str + + :param ids: optional list of SLO ids to get many specific SLOs at once. + :type ids: list(str) + + :param offset: offset of results to use (default 0) + :type offset: int + + :param limit: limit of results to return (default: 100) + :type limit: int + + :returns: SLOs matching the query + """ + search_terms = {} + if query: + search_terms["query"] = query + if ids: + search_terms["ids"] = ids + if tags_query: + search_terms["tags_query"] = tags_query + if metrics_query: + search_terms["metrics_query"] = metrics_query + search_terms["offset"] = offset + search_terms["limit"] = limit + + return super(ServiceLevelObjective, cls).get_all(**search_terms) + + @classmethod + def update(cls, id, params=None, **body): + """ + Update a specific SLO details. + + :param id: SLO id to update details for + :type id: str + + :returns: SLO details + """ + return super(ServiceLevelObjective, cls).update(id, params, **body) + + @classmethod + def delete(cls, id, **params): + """ + Delete a specific SLO. + + :param id: SLO id to delete + :type id: str + + :returns: SLO ids removed + """ + return super(ServiceLevelObjective, cls).delete(id, **params) + + @classmethod + def bulk_delete(cls, ops, **params): + """ + Bulk Delete Timeframes from multiple SLOs. + + :param ops: a dictionary mapping of SLO ID to timeframes to remove. + :type ops: dict(str, list(str)) + + :returns: Dictionary representing the API's JSON response + `errors` - errors with operation + `data` - updates and deletions + """ + return super(ServiceLevelObjective, cls)._trigger_class_action( + "POST", + "bulk_delete", + body=ops, + params=params, + suppress_response_errors_on_codes=[200], + ) + + @classmethod + def delete_many(cls, ids, **params): + """ + Delete Multiple SLOs + + :param ids: a list of SLO IDs to remove + :type ids: list(str) + + :returns: Dictionary representing the API's JSON response see `data` list(slo ids) && `errors` + """ + return super(ServiceLevelObjective, cls)._trigger_class_action( + "DELETE", + "", + params=params, + body={"ids": ids}, + suppress_response_errors_on_codes=[200], + ) + + @classmethod + def can_delete(cls, ids, **params): + """ + Check if the following SLOs can be safely deleted. + + This is used to check if SLO has any references to it. + + :param ids: a list of SLO IDs to check + :type ids: list(str) + + :returns: Dictionary representing the API's JSON response + "data.ok" represents a list of SLO ids that have no known references. + "errors" contains a dictionary of SLO ID to known reference(s). + """ + params["ids"] = ids + return super(ServiceLevelObjective, cls)._trigger_class_action( + "GET", + "can_delete", + params=params, + body=None, + suppress_response_errors_on_codes=[200], + ) + + @classmethod + def history(cls, id, from_ts, to_ts, **params): + """ + Get the SLO's history from the given time range. + + :param id: SLO ID to query + :type id: str + + :param from_ts: `from` timestamp in epoch seconds to query + :type from_ts: int|datetime.datetime + + :param to_ts: `to` timestamp in epoch seconds to query, must be > `from_ts` + :type to_ts: int|datetime.datetime + + :returns: Dictionary representing the API's JSON response + "data.ok" represents a list of SLO ids that have no known references. + "errors" contains a dictionary of SLO ID to known reference(s). + """ + params["id"] = id + params["from_ts"] = force_to_epoch_seconds(from_ts) + params["to_ts"] = force_to_epoch_seconds(to_ts) + return super(ServiceLevelObjective, cls)._trigger_class_action( + "GET", + "history", + id=id, + params=params, + body=None, + suppress_response_errors_on_codes=[200], + ) + + @classmethod + def search(cls, **params): + """ + Search SLOs. + + :returns: Dictionary representing the API's JSON response + """ + return super(ServiceLevelObjective, cls)._trigger_class_action("GET", "search", params=params) diff --git a/datadog/api/synthetics.py b/datadog/api/synthetics.py new file mode 100644 index 000000000..88c0e3add --- /dev/null +++ b/datadog/api/synthetics.py @@ -0,0 +1,214 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.api.exceptions import ApiError +from datadog.api.resources import ( + CreateableAPIResource, + GetableAPIResource, + ActionAPIResource, + UpdatableAPISyntheticsResource, + UpdatableAPISyntheticsSubResource, + ActionAPISyntheticsResource, +) + + +class Synthetics( + ActionAPIResource, + ActionAPISyntheticsResource, + CreateableAPIResource, + GetableAPIResource, + UpdatableAPISyntheticsResource, + UpdatableAPISyntheticsSubResource, +): + """ + A wrapper around Sythetics HTTP API. + """ + + _resource_name = "synthetics" + _sub_resource_name = "status" + + @classmethod + def get_test(cls, id, **params): + """ + Get test's details. + + :param id: public id of the test to retrieve + :type id: string + + :returns: Dictionary representing the API's JSON response + """ + + # API path = "synthetics/tests/ + + name = "tests" + + return super(Synthetics, cls)._trigger_synthetics_class_action("GET", id=id, name=name, params=params) + + @classmethod + def get_all_tests(cls, **params): + """ + Get all tests' details. + + :returns: Dictionary representing the API's JSON response + """ + + for p in ["locations", "tags"]: + if p in params and isinstance(params[p], list): + params[p] = ",".join(params[p]) + + # API path = "synthetics/tests" + + return super(Synthetics, cls).get(id="tests", params=params) + + @classmethod + def get_devices(cls, **params): + """ + Get a list of devices for browser checks + + :returns: Dictionary representing the API's JSON response + """ + + # API path = "synthetics/browser/devices" + + name = "browser/devices" + + return super(Synthetics, cls)._trigger_synthetics_class_action("GET", name=name, params=params) + + @classmethod + def get_locations(cls, **params): + """ + Get a list of all available locations + + :return: Dictionary representing the API's JSON response + """ + + name = "locations" + + # API path = "synthetics/locations + + return super(Synthetics, cls)._trigger_synthetics_class_action("GET", name=name, params=params) + + @classmethod + def get_results(cls, id, **params): + """ + Get the most recent results for a test + + :param id: public id of the test to retrieve results for + :type id: id + + :return: Dictionary representing the API's JSON response + """ + + # API path = "synthetics/tests//results + + path = "tests/{}/results".format(id) + + return super(Synthetics, cls)._trigger_synthetics_class_action("GET", path, params=params) + + @classmethod + def get_result(cls, id, result_id, **params): + """ + Get a specific result for a given test. + + :param id: public ID of the test to retrieve the most recent result for + :type id: id + + :param result_id: result ID of the test to retrieve the most recent result for + :type result_id: id + + :returns: Dictionary representing the API's JSON response + """ + + # API path = "synthetics/tests/results/ + + path = "tests/{}/results/{}".format(id, result_id) + + return super(Synthetics, cls)._trigger_synthetics_class_action("GET", path, params=params) + + @classmethod + def create_test(cls, **params): + """ + Create a test + + :param name: A unique name for the test + :type name: string + + :param type: The type of test. Valid values are api and browser + :type type: string + + :param subtype: required for SSL test - For a SSL API test, specify ssl as the value. + :Otherwise, you should omit this argument. + :type subtype: string + + :param config: The test configuration, contains the request specification and the assertions. + :type config: dict + + :param options: List of options to customize the test + :type options: dict + + :param message: A description of the test + :type message: string + + :param locations: A list of the locations to send the tests from + :type locations: list + + :param tags: A list of tags used to filter the test + :type tags: list + + :return: Dictionary representing the API's JSON response + """ + + # API path = "synthetics/tests" + + return super(Synthetics, cls).create(id="tests", **params) + + @classmethod + def edit_test(cls, id, **params): + """ + Edit a test + + :param id: Public id of the test to edit + :type id: string + + :return: Dictionary representing the API's JSON response + """ + + # API path = "synthetics/tests/" + + return super(Synthetics, cls).update_synthetics(id=id, **params) + + @classmethod + def start_or_pause_test(cls, id, **body): + """ + Pause a given test + + :param id: public id of the test to pause + :type id: string + + :param new_status: mew status for the test + :type id: string + + :returns: Dictionary representing the API's JSON response + """ + + # API path = "synthetics/tests//status" + + return super(Synthetics, cls).update_synthetics_items(id=id, **body) + + @classmethod + def delete_test(cls, **body): + """ + Delete a test + + :param public_ids: list of public IDs to delete corresponding tests + :type public_ids: list of strings + + :return: Dictionary representing the API's JSON response + """ + + if not isinstance(body["public_ids"], list): + raise ApiError("Parameter 'public_ids' must be a list") + + # API path = "synthetics/tests/delete + + return super(Synthetics, cls)._trigger_action("POST", name="synthetics", id="tests/delete", **body) diff --git a/datadog/api/tags.py b/datadog/api/tags.py index d03369f22..2226cdbee 100644 --- a/datadog/api/tags.py +++ b/datadog/api/tags.py @@ -1,15 +1,21 @@ -from datadog.api.base import CreateableAPIResource, UpdatableAPIResource,\ - DeletableAPIResource, GetableAPIResource, ListableAPIResource - - -class Tag(CreateableAPIResource, UpdatableAPIResource, GetableAPIResource, - ListableAPIResource, DeletableAPIResource): +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.api.resources import ( + CreateableAPIResource, + UpdatableAPIResource, + DeletableAPIResource, + GetableAPIResource, + ListableAPIResource, +) + + +class Tag(CreateableAPIResource, UpdatableAPIResource, GetableAPIResource, ListableAPIResource, DeletableAPIResource): """ A wrapper around Tag HTTP API. """ - _class_name = 'tags' - _class_url = '/tags/hosts' - _plural_class_name = 'tags' + + _resource_name = "tags/hosts" @classmethod def create(cls, host, **body): @@ -22,11 +28,11 @@ def create(cls, host, **body): :param source: source of the tags :type source: string - :return: JSON response from HTTP request + :returns: Dictionary representing the API's JSON response """ params = {} - if 'source' in body: - params['source'] = body['source'] + if "source" in body: + params["source"] = body["source"] return super(Tag, cls).create(id=host, params=params, **body) @classmethod @@ -40,9 +46,9 @@ def update(cls, host, **body): :param source: source of the tags :type source: string - :return: JSON response from HTTP request + :returns: Dictionary representing the API's JSON response """ params = {} - if 'source' in body: - params['source'] = body['source'] + if "source" in body: + params["source"] = body["source"] return super(Tag, cls).update(id=host, params=params, **body) diff --git a/datadog/api/timeboards.py b/datadog/api/timeboards.py index f8877abab..42d34daef 100644 --- a/datadog/api/timeboards.py +++ b/datadog/api/timeboards.py @@ -1,14 +1,20 @@ -from datadog.api.base import GetableAPIResource, CreateableAPIResource, \ - UpdatableAPIResource, ListableAPIResource, DeletableAPIResource +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.api.resources import ( + GetableAPIResource, + CreateableAPIResource, + UpdatableAPIResource, + ListableAPIResource, + DeletableAPIResource, +) -class Timeboard(GetableAPIResource, CreateableAPIResource, - UpdatableAPIResource, ListableAPIResource, - DeletableAPIResource): +class Timeboard( + GetableAPIResource, CreateableAPIResource, UpdatableAPIResource, ListableAPIResource, DeletableAPIResource +): """ A wrapper around Timeboard HTTP API. """ - _class_name = 'dash' - _class_url = '/dash' - _plural_class_name = 'dashes' - _json_name = 'dash' + + _resource_name = "dash" diff --git a/datadog/api/users.py b/datadog/api/users.py index d22f2d52a..ff0b2f2b4 100644 --- a/datadog/api/users.py +++ b/datadog/api/users.py @@ -1,10 +1,31 @@ -from datadog.api.base import ActionAPIResource +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.api.resources import ( + ActionAPIResource, + GetableAPIResource, + CreateableAPIResource, + UpdatableAPIResource, + ListableAPIResource, + DeletableAPIResource, +) -class User(ActionAPIResource): +class User( + ActionAPIResource, + GetableAPIResource, + CreateableAPIResource, + UpdatableAPIResource, + ListableAPIResource, + DeletableAPIResource, +): + + _resource_name = "user" + """ A wrapper around User HTTP API. """ + @classmethod def invite(cls, emails): """ @@ -12,16 +33,18 @@ def invite(cls, emails): *emails* list. If *emails* is a string, it will be wrapped in a list and sent. Returns a list of email addresses for which an email was sent. - :param emails: emails adresses to invite to join datadog + :param emails: emails addresses to invite to join datadog :type emails: string list - :returns: JSON response from HTTP request + :returns: Dictionary representing the API's JSON response """ + print("[DEPRECATION] User.invite() is deprecated. Use `create` instead.") + if not isinstance(emails, list): emails = [emails] body = { - 'emails': emails, + "emails": emails, } - return super(User, cls)._trigger_action('POST', '/invite_users', **body) + return super(User, cls)._trigger_action("POST", "/invite_users", **body) diff --git a/datadog/dogshell/__init__.py b/datadog/dogshell/__init__.py index 4232139bc..cb4aab6f5 100644 --- a/datadog/dogshell/__init__.py +++ b/datadog/dogshell/__init__.py @@ -1,54 +1,88 @@ -import argparse +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# stdlib import os -import pkg_resources as pkg +import warnings +import sys -import logging +# 3p +import argparse -from datadog.dogshell.common import DogshellConfig +# datadog +from datadog import initialize, __version__ from datadog.dogshell.comment import CommentClient -from datadog.dogshell.search import SearchClient -from datadog.dogshell.metric import MetricClient -from datadog.dogshell.tag import TagClient +from datadog.dogshell.common import DogshellConfig +from datadog.dogshell.dashboard_list import DashboardListClient +from datadog.dogshell.downtime import DowntimeClient from datadog.dogshell.event import EventClient +from datadog.dogshell.host import HostClient +from datadog.dogshell.metric import MetricClient from datadog.dogshell.monitor import MonitorClient -from datadog.dogshell.downtime import DowntimeClient from datadog.dogshell.screenboard import ScreenboardClient -from datadog.dogshell.timeboard import TimeboardClient -from datadog.dogshell.host import HostClient +from datadog.dogshell.search import SearchClient from datadog.dogshell.service_check import ServiceCheckClient -from datadog import initialize - -logging.getLogger('dd.datadogpy').setLevel(logging.CRITICAL) +from datadog.dogshell.service_level_objective import ServiceLevelObjectiveClient +from datadog.dogshell.tag import TagClient +from datadog.dogshell.timeboard import TimeboardClient +from datadog.dogshell.dashboard import DashboardClient def main(): + if sys.argv[0].endswith("dog"): + warnings.warn("dog is pending deprecation. Please use dogshell instead.", PendingDeprecationWarning) + + parser = argparse.ArgumentParser( + description="Interact with the Datadog API", formatter_class=argparse.ArgumentDefaultsHelpFormatter + ) + parser.add_argument( + "--config", help="location of your dogrc file (default ~/.dogrc)", default=os.path.expanduser("~/.dogrc") + ) + parser.add_argument( + "--api-key", + help="your API key, from " + "https://app.datadoghq.com/account/settings#api. " + "You can also set the environment variables DATADOG_API_KEY or DD_API_KEY", + dest="api_key", + default=os.environ.get("DATADOG_API_KEY", os.environ.get("DD_API_KEY")), + ) + parser.add_argument( + "--application-key", + help="your Application key, from " + "https://app.datadoghq.com/account/settings#api. " + "You can also set the environment variables DATADOG_APP_KEY or DD_APP_KEY", + dest="app_key", + default=os.environ.get("DATADOG_APP_KEY", os.environ.get("DD_APP_KEY")), + ) + parser.add_argument( + "--pretty", + help="pretty-print output (suitable for human consumption, " "less useful for scripting)", + dest="format", + action="store_const", + const="pretty", + ) + parser.add_argument( + "--raw", help="raw JSON as returned by the HTTP service", dest="format", action="store_const", const="raw" + ) + parser.add_argument( + "--timeout", help="time to wait in seconds before timing" " out an API call (default 10)", default=10, type=int + ) + parser.add_argument( + "-v", "--version", help="Dog API version", action="version", version="%(prog)s {0}".format(__version__) + ) - parser = argparse.ArgumentParser(description="Interact with the Datadog API", - formatter_class=argparse.ArgumentDefaultsHelpFormatter) - parser.add_argument('--config', help="location of your dogrc file (default ~/.dogrc)", - default=os.path.expanduser('~/.dogrc')) - parser.add_argument('--api-key', help="your API key, from " - "https://app.datadoghq.com/account/settings#api", - dest='api_key', default=None) - parser.add_argument('--application-key', help="your Application key, from " - "https://app.datadoghq.com/account/settings#api", - dest='app_key', default=None) - parser.add_argument('--pretty', help="pretty-print output (suitable for human consumption, " - "less useful for scripting)", dest='format', - action='store_const', const='pretty') - parser.add_argument('--raw', help="raw JSON as returned by the HTTP service", - dest='format', action='store_const', const='raw') - parser.add_argument('--timeout', help="time to wait in seconds before timing" - " out an API call (default 10)", default=10, type=int) - parser.add_argument('-v', '--version', help='Dog API version', action='version', - version='%(prog)s {version}' - .format(version=pkg.require("datadog")[0].version)) + parser.add_argument( + "--api_host", + help="Datadog site to send data, us (datadoghq.com), eu (datadoghq.eu), us3 (us3.datadoghq.com), \ + us5 (us5.datadoghq.com), ap1 (ap1.datadoghq.com), gov (ddog-gov.com), or custom url. default: us", + dest="api_host", + ) config = DogshellConfig() # Set up subparsers for each service - - subparsers = parser.add_subparsers(title='Modes') + subparsers = parser.add_subparsers(title="Modes", dest="mode") + subparsers.required = True CommentClient.setup_parser(subparsers) SearchClient.setup_parser(subparsers) @@ -57,18 +91,23 @@ def main(): EventClient.setup_parser(subparsers) MonitorClient.setup_parser(subparsers) TimeboardClient.setup_parser(subparsers) + DashboardClient.setup_parser(subparsers) ScreenboardClient.setup_parser(subparsers) + DashboardListClient.setup_parser(subparsers) HostClient.setup_parser(subparsers) DowntimeClient.setup_parser(subparsers) ServiceCheckClient.setup_parser(subparsers) + ServiceLevelObjectiveClient.setup_parser(subparsers) args = parser.parse_args() - config.load(args.config, args.api_key, args.app_key) + + config.load(args.config, args.api_key, args.app_key, args.api_host) # Initialize datadog.api package initialize(**config) args.func(args) -if __name__ == '__main__': + +if __name__ == "__main__": main() diff --git a/datadog/dogshell/comment.py b/datadog/dogshell/comment.py index 8b9c61815..208d0093e 100644 --- a/datadog/dogshell/comment.py +++ b/datadog/dogshell/comment.py @@ -1,48 +1,44 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# stdlib +import json import sys -from datadog.dogshell.common import report_errors, report_warnings +# datadog from datadog import api -from datadog.util.compat import json +from datadog.dogshell.common import report_errors, report_warnings class CommentClient(object): - @classmethod def setup_parser(cls, subparsers): - parser = subparsers.add_parser('comment', help="Post, update, and delete comments.") - verb_parsers = parser.add_subparsers(title='Verbs') + parser = subparsers.add_parser("comment", help="Post, update, and delete comments.") - post_parser = verb_parsers.add_parser('post', help="Post comments.") - post_parser.add_argument('--handle', help="handle to post as. if unset, posts as the owner" - " of the application key used to authenticate") - post_parser.add_argument('comment', help="comment message to post. if unset," - " reads from stdin.", nargs='?') + verb_parsers = parser.add_subparsers(title="Verbs", dest="verb") + verb_parsers.required = True + + post_parser = verb_parsers.add_parser("post", help="Post comments.") + post_parser.add_argument("handle", help="handle to post as.") + post_parser.add_argument("comment", help="comment message to post. if unset," " reads from stdin.", nargs="?") post_parser.set_defaults(func=cls._post) - update_parser = verb_parsers.add_parser('update', help="Update existing comments.") - update_parser.add_argument('comment_id', help="comment to update (by id)") - update_parser.add_argument('--handle', help="handle to post as. if unset, posts" - " as the owner of the application key used to authenticate") - update_parser.add_argument('comment', help="comment message to post." - " if unset, reads from stdin.", nargs="?") + update_parser = verb_parsers.add_parser("update", help="Update existing comments.") + update_parser.add_argument("comment_id", help="comment to update (by id)") + update_parser.add_argument("handle", help="handle to post as.") + update_parser.add_argument("comment", help="comment message to post." " if unset, reads from stdin.", nargs="?") update_parser.set_defaults(func=cls._update) - reply_parser = verb_parsers.add_parser('reply', help="Reply to existing comments.") - reply_parser.add_argument('comment_id', help="comment to reply to (by id)") - reply_parser.add_argument('--handle', help="handle to post as. if unset, " - "posts as the owner of the application key used to authenticate") - reply_parser.add_argument('comment', help="comment message to post." - " if unset, reads from stdin.", nargs="?") + reply_parser = verb_parsers.add_parser("reply", help="Reply to existing comments.") + reply_parser.add_argument("comment_id", help="comment to reply to (by id)") + reply_parser.add_argument("handle", help="handle to post as.") + reply_parser.add_argument("comment", help="comment message to post." " if unset, reads from stdin.", nargs="?") reply_parser.set_defaults(func=cls._reply) - show_parser = verb_parsers.add_parser('show', help="Show comment details.") - show_parser.add_argument('comment_id', help="comment to show") + show_parser = verb_parsers.add_parser("show", help="Show comment details.") + show_parser.add_argument("comment_id", help="comment to show") show_parser.set_defaults(func=cls._show) - delete_parser = verb_parsers.add_parser('delete', help="Delete comments.") - delete_parser.add_argument('comment_id', help="comment to delete (by id)") - delete_parser.set_defaults(func=cls._delete) - @classmethod def _post(cls, args): api._timeout = args.timeout @@ -54,23 +50,23 @@ def _post(cls, args): res = api.Comment.create(handle=handle, message=comment) report_warnings(res) report_errors(res) - if format == 'pretty': - message = res['comment']['message'] - lines = message.split('\n') - message = '\n'.join([' ' + line for line in lines]) - print('id\t\t' + str(res['comment']['id'])) - print('url\t\t' + res['comment']['url']) - print('resource\t' + res['comment']['resource']) - print('handle\t\t' + res['comment']['handle']) - print('message\n' + message) - elif format == 'raw': + if format == "pretty": + message = res["comment"]["message"] + lines = message.split("\n") + message = "\n".join([" " + line for line in lines]) + print("id\t\t" + str(res["comment"]["id"])) + print("url\t\t" + res["comment"]["url"]) + print("resource\t" + res["comment"]["resource"]) + print("handle\t\t" + res["comment"]["handle"]) + print("message\n" + message) + elif format == "raw": print(json.dumps(res)) else: - print('id\t\t' + str(res['comment']['id'])) - print('url\t\t' + res['comment']['url']) - print('resource\t' + res['comment']['resource']) - print('handle\t\t' + res['comment']['handle']) - print('message\t\t' + res['comment']['message'].__repr__()) + print("id\t\t" + str(res["comment"]["id"])) + print("url\t\t" + res["comment"]["url"]) + print("resource\t" + res["comment"]["resource"]) + print("handle\t\t" + res["comment"]["handle"]) + print("message\t\t" + res["comment"]["message"].__repr__()) @classmethod def _update(cls, args): @@ -83,23 +79,23 @@ def _update(cls, args): res = api.Comment.update(id, handle=handle, message=comment) report_warnings(res) report_errors(res) - if format == 'pretty': - message = res['comment']['message'] - lines = message.split('\n') - message = '\n'.join([' ' + line for line in lines]) - print('id\t\t' + str(res['comment']['id'])) - print('url\t\t' + res['comment']['url']) - print('resource\t' + res['comment']['resource']) - print('handle\t\t' + res['comment']['handle']) - print('message\n' + message) - elif format == 'raw': + if format == "pretty": + message = res["comment"]["message"] + lines = message.split("\n") + message = "\n".join([" " + line for line in lines]) + print("id\t\t" + str(res["comment"]["id"])) + print("url\t\t" + res["comment"]["url"]) + print("resource\t" + res["comment"]["resource"]) + print("handle\t\t" + res["comment"]["handle"]) + print("message\n" + message) + elif format == "raw": print(json.dumps(res)) else: - print('id\t\t' + str(res['comment']['id'])) - print('url\t\t' + res['comment']['url']) - print('resource\t' + res['comment']['resource']) - print('handle\t\t' + res['comment']['handle']) - print('message\t\t' + res['comment']['message'].__repr__()) + print("id\t\t" + str(res["comment"]["id"])) + print("url\t\t" + res["comment"]["url"]) + print("resource\t" + res["comment"]["resource"]) + print("handle\t\t" + res["comment"]["handle"]) + print("message\t\t" + res["comment"]["message"].__repr__()) @classmethod def _reply(cls, args): @@ -113,23 +109,23 @@ def _reply(cls, args): res = api.Comment.create(handle=handle, message=comment, related_event_id=id) report_warnings(res) report_errors(res) - if format == 'pretty': - message = res['comment']['message'] - lines = message.split('\n') - message = '\n'.join([' ' + line for line in lines]) - print('id\t\t' + str(res['comment']['id'])) - print('url\t\t' + res['comment']['url']) - print('resource\t' + res['comment']['resource']) - print('handle\t\t' + res['comment']['handle']) - print('message\n' + message) - elif format == 'raw': + if format == "pretty": + message = res["comment"]["message"] + lines = message.split("\n") + message = "\n".join([" " + line for line in lines]) + print("id\t\t" + str(res["comment"]["id"])) + print("url\t\t" + res["comment"]["url"]) + print("resource\t" + res["comment"]["resource"]) + print("handle\t\t" + res["comment"]["handle"]) + print("message\n" + message) + elif format == "raw": print(json.dumps(res)) else: - print('id\t\t' + str(res['comment']['id'])) - print('url\t\t' + res['comment']['url']) - print('resource\t' + res['comment']['resource']) - print('handle\t\t' + res['comment']['handle']) - print('message\t\t' + res['comment']['message'].__repr__()) + print("id\t\t" + str(res["comment"]["id"])) + print("url\t\t" + res["comment"]["url"]) + print("resource\t" + res["comment"]["resource"]) + print("handle\t\t" + res["comment"]["handle"]) + print("message\t\t" + res["comment"]["message"].__repr__()) @classmethod def _show(cls, args): @@ -139,27 +135,18 @@ def _show(cls, args): res = api.Event.get(id) report_warnings(res) report_errors(res) - if format == 'pretty': - message = res['event']['text'] - lines = message.split('\n') - message = '\n'.join([' ' + line for line in lines]) - print('id\t\t' + str(res['event']['id'])) - print('url\t\t' + res['event']['url']) - print('resource\t' + res['event']['resource']) - print('message\n' + message) - elif format == 'raw': + if format == "pretty": + message = res["event"]["text"] + lines = message.split("\n") + message = "\n".join([" " + line for line in lines]) + print("id\t\t" + str(res["event"]["id"])) + print("url\t\t" + res["event"]["url"]) + print("resource\t" + res["event"]["resource"]) + print("message\n" + message) + elif format == "raw": print(json.dumps(res)) else: - print('id\t\t' + str(res['event']['id'])) - print('url\t\t' + res['event']['url']) - print('resource\t' + res['event']['resource']) - print('message\t\t' + res['event']['text'].__repr__()) - - @classmethod - def _delete(cls, args): - api._timeout = args.timeout - id = args.comment_id - res = api.Comment.delete(id) - if res is not None: - report_warnings(res) - report_errors(res) + print("id\t\t" + str(res["event"]["id"])) + print("url\t\t" + res["event"]["url"]) + print("resource\t" + res["event"]["resource"]) + print("message\t\t" + res["event"]["text"].__repr__()) diff --git a/datadog/dogshell/common.py b/datadog/dogshell/common.py index 33fc3ad45..251e6582b 100644 --- a/datadog/dogshell/common.py +++ b/datadog/dogshell/common.py @@ -1,99 +1,122 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# stdlib from __future__ import print_function import os import sys -import logging -import socket -from datadog.util.compat import is_p3k, configparser, IterableUserDict,\ - get_input - -log = logging.getLogger('dd.datadogpy') +# datadog +from datadog.util.compat import is_p3k, configparser, IterableUserDict, get_input def print_err(msg): if is_p3k(): - print('ERROR: ' + msg + '\n', file=sys.stderr) + print(msg + "\n", file=sys.stderr) else: - sys.stderr.write(msg + '\n') + sys.stderr.write(msg + "\n") + sys.stderr.flush() def report_errors(res): - if 'errors' in res: - for e in res['errors']: - print_err('ERROR: ' + e) + if "errors" in res: + errors = res["errors"] + if isinstance(errors, list): + for error in errors: + print_err("ERROR: {}".format(error)) + else: + print_err("ERROR: {}".format(errors)) sys.exit(1) return False def report_warnings(res): - if 'warnings' in res: - for e in res['warnings']: - print_err('WARNING: ' + e) + if "warnings" in res: + warnings = res["warnings"] + if isinstance(warnings, list): + for warning in warnings: + print_err("WARNING: {}".format(warning)) + else: + print_err("WARNING: {}".format(warnings)) return True return False -memoized_hostname = None - - -def find_localhost(): - try: - global memoized_hostname - if memoized_hostname is None: - memoized_hostname = socket.getfqdn() - return memoized_hostname - except Exception: - logging.exception("Cannot determine local hostname") - - class DogshellConfig(IterableUserDict): - - def load(self, config_file, api_key, app_key): + def load(self, config_file, api_key, app_key, api_host): config = configparser.ConfigParser() + if api_host is not None: + if api_host in ("datadoghq.com", "us"): + self["api_host"] = "https://api.datadoghq.com" + elif api_host in ("datadoghq.eu", "eu"): + self["api_host"] = "https://api.datadoghq.eu" + elif api_host in ("us3.datadoghq.com", "us3"): + self["api_host"] = "https://api.us3.datadoghq.com" + elif api_host in ("us5.datadoghq.com", "us5"): + self["api_host"] = "https://api.us5.datadoghq.com" + elif api_host in ("ap1.datadoghq.com", "ap1"): + self["api_host"] = "https://api.ap1.datadoghq.com" + elif api_host in ("ddog-gov.com", "gov"): + self["api_host"] = "https://api.ddog-gov.com" + else: + self["api_host"] = api_host if api_key is not None and app_key is not None: - self['api_key'] = api_key - self['app_key'] = app_key + self["api_key"] = api_key + self["app_key"] = app_key else: if os.access(config_file, os.F_OK): config.read(config_file) - if not config.has_section('Connection'): - report_errors({'errors': ['%s has no [Connection] section' % config_file]}) + if not config.has_section("Connection"): + report_errors({"errors": ["%s has no [Connection] section" % config_file]}) else: try: - response = '' - while response.strip().lower() not in ['y', 'n']: - response = get_input('%s does not exist. Would you like to' - ' create it? [Y/n] ' % config_file) - if response.strip().lower() in ['', 'y', 'yes']: + response = None + while response is None or response.strip().lower() not in ["", "y", "n"]: + response = get_input("%s does not exist. Would you like to" " create it? [Y/n] " % config_file) + if response.strip().lower() in ["", "y"]: # Read the api and app keys from stdin - api_key = get_input("What is your api key? (Get it here: " - "https://app.datadoghq.com/account/settings#api) ") - app_key = get_input("What is your application key? (Generate one here: " - "https://app.datadoghq.com/account/settings#api) ") + while True: + api_key = get_input( + "What is your api key? (Get it here: " + "https://app.datadoghq.com/account/settings#api) " + ) + if api_key.isalnum(): + break + print("Datadog api keys can only contain alphanumeric characters.") + while True: + app_key = get_input( + "What is your app key? (Get it here: " + "https://app.datadoghq.com/account/settings#api) " + ) + if app_key.isalnum(): + break + print("Datadog app keys can only contain alphanumeric characters.") # Write the config file - config.add_section('Connection') - config.set('Connection', 'apikey', api_key) - config.set('Connection', 'appkey', app_key) + config.add_section("Connection") + config.set("Connection", "apikey", api_key) + config.set("Connection", "appkey", app_key) - f = open(config_file, 'w') + f = open(config_file, "w") config.write(f) f.close() - print('Wrote %s' % config_file) - elif response.strip().lower() == 'n': + print("Wrote %s" % config_file) + elif response.strip().lower() == "n": # Abort - print_err('Exiting\n') + print_err("Exiting\n") sys.exit(1) - except KeyboardInterrupt: + except (KeyboardInterrupt, EOFError): # Abort - print_err('\nExiting') + print_err("\nExiting") sys.exit(1) - self['api_key'] = config.get('Connection', 'apikey') - self['app_key'] = config.get('Connection', 'appkey') - if config.has_option('Connection', 'host_name'): - self['host_name'] = config.get('Connection', 'host_name') - if config.has_option('Connection', 'api_host'): - self['api_host'] = config.get('Connection', 'api_host') - assert self['api_key'] is not None and self['app_key'] is not None + self["api_key"] = config.get("Connection", "apikey") + self["app_key"] = config.get("Connection", "appkey") + if config.has_section("Proxy"): + self["proxies"] = dict(config.items("Proxy")) + if config.has_option("Connection", "host_name"): + self["host_name"] = config.get("Connection", "host_name") + if config.has_option("Connection", "api_host"): + self["api_host"] = config.get("Connection", "api_host") + assert self["api_key"] is not None and self["app_key"] is not None diff --git a/datadog/dogshell/dashboard.py b/datadog/dogshell/dashboard.py new file mode 100644 index 000000000..bc37bd6ae --- /dev/null +++ b/datadog/dogshell/dashboard.py @@ -0,0 +1,174 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# stdlib +import json +import sys + +# 3p +import argparse + +# datadog +from datadog import api +from datadog.dogshell.common import report_errors, report_warnings +from datadog.util.format import pretty_json + + +class DashboardClient(object): + @classmethod + def setup_parser(cls, subparsers): + parser = subparsers.add_parser("dashboard", help="Create, edit, and delete dashboards") + + verb_parsers = parser.add_subparsers(title="Verbs", dest="verb") + verb_parsers.required = True + + post_parser = verb_parsers.add_parser("post", help="Create dashboards") + # Required arguments: + post_parser.add_argument("title", help="title for the new dashboard") + post_parser.add_argument( + "widgets", help="widget definitions as a JSON string. If unset," " reads from stdin.", nargs="?" + ) + post_parser.add_argument("layout_type", choices=["ordered", "free"], help="Layout type of the dashboard.") + # Optional arguments: + post_parser.add_argument("--description", help="Short description of the dashboard") + post_parser.add_argument( + "--read_only", + help="Whether this dashboard is read-only. " "If True, only the author and admins can make changes to it.", + action="store_true", + ) + post_parser.add_argument( + "--notify_list", + type=_json_string, + help="A json list of user handles, e.g. " '\'["user1@domain.com", "user2@domain.com"]\'', + ) + post_parser.add_argument( + "--template_variables", + type=_json_string, + help="A json list of template variable dicts, e.g. " + '\'[{"name": "host", "prefix": "host", ' + '"default": "my-host"}]\'', + ) + post_parser.set_defaults(func=cls._post) + + update_parser = verb_parsers.add_parser("update", help="Update existing dashboards") + # Required arguments: + update_parser.add_argument("dashboard_id", help="Dashboard to replace" " with the new definition") + update_parser.add_argument("title", help="New title for the dashboard") + update_parser.add_argument( + "widgets", help="Widget definitions as a JSON string." " If unset, reads from stdin", nargs="?" + ) + update_parser.add_argument("layout_type", choices=["ordered", "free"], help="Layout type of the dashboard.") + # Optional arguments: + update_parser.add_argument("--description", help="Short description of the dashboard") + update_parser.add_argument( + "--read_only", + help="Whether this dashboard is read-only. " "If True, only the author and admins can make changes to it.", + action="store_true", + ) + update_parser.add_argument( + "--notify_list", + type=_json_string, + help="A json list of user handles, e.g. " '\'["user1@domain.com", "user2@domain.com"]\'', + ) + update_parser.add_argument( + "--template_variables", + type=_json_string, + help="A json list of template variable dicts, e.g. " + '\'[{"name": "host", "prefix": "host", ' + '"default": "my-host"}]\'', + ) + update_parser.set_defaults(func=cls._update) + + show_parser = verb_parsers.add_parser("show", help="Show a dashboard definition") + show_parser.add_argument("dashboard_id", help="Dashboard to show") + show_parser.set_defaults(func=cls._show) + + delete_parser = verb_parsers.add_parser("delete", help="Delete dashboards") + delete_parser.add_argument("dashboard_id", help="Dashboard to delete") + delete_parser.set_defaults(func=cls._delete) + + @classmethod + def _post(cls, args): + api._timeout = args.timeout + format = args.format + widgets = args.widgets + if args.widgets is None: + widgets = sys.stdin.read() + widgets = json.loads(widgets) + + # Required arguments + payload = {"title": args.title, "widgets": widgets, "layout_type": args.layout_type} + # Optional arguments + if args.description: + payload["description"] = args.description + if args.read_only: + payload["is_read_only"] = args.read_only + if args.notify_list: + payload["notify_list"] = args.notify_list + if args.template_variables: + payload["template_variables"] = args.template_variables + + res = api.Dashboard.create(**payload) + report_warnings(res) + report_errors(res) + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _update(cls, args): + api._timeout = args.timeout + format = args.format + widgets = args.widgets + if args.widgets is None: + widgets = sys.stdin.read() + widgets = json.loads(widgets) + + # Required arguments + payload = {"title": args.title, "widgets": widgets, "layout_type": args.layout_type} + # Optional arguments + if args.description: + payload["description"] = args.description + if args.read_only: + payload["is_read_only"] = args.read_only + if args.notify_list: + payload["notify_list"] = args.notify_list + if args.template_variables: + payload["template_variables"] = args.template_variables + + res = api.Dashboard.update(args.dashboard_id, **payload) + report_warnings(res) + report_errors(res) + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _show(cls, args): + api._timeout = args.timeout + format = args.format + res = api.Dashboard.get(args.dashboard_id) + report_warnings(res) + report_errors(res) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _delete(cls, args): + api._timeout = args.timeout + res = api.Dashboard.delete(args.dashboard_id) + if res is not None: + report_warnings(res) + report_errors(res) + + +def _json_string(str): + try: + return json.loads(str) + except Exception: + raise argparse.ArgumentTypeError("bad json parameter") diff --git a/datadog/dogshell/dashboard_list.py b/datadog/dogshell/dashboard_list.py new file mode 100644 index 000000000..9164ba70d --- /dev/null +++ b/datadog/dogshell/dashboard_list.py @@ -0,0 +1,339 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# stdlib +import json + +# 3p +from datadog.util.format import pretty_json + +# datadog +from datadog import api +from datadog.dogshell.common import report_errors, report_warnings + + +class DashboardListClient(object): + @classmethod + def setup_parser(cls, subparsers): + parser = subparsers.add_parser("dashboard_list", help="Create, edit, and delete dashboard lists") + verb_parsers = parser.add_subparsers(title="Verbs", dest="verb") + verb_parsers.required = True + + # Create Dashboard List parser + post_parser = verb_parsers.add_parser("post", help="Create a dashboard list") + post_parser.add_argument("name", help="Name for the dashboard list") + post_parser.set_defaults(func=cls._post) + + # Update Dashboard List parser + update_parser = verb_parsers.add_parser("update", help="Update existing dashboard list") + update_parser.add_argument("dashboard_list_id", help="Dashboard list to replace with the new definition") + update_parser.add_argument("name", help="Name for the dashboard list") + update_parser.set_defaults(func=cls._update) + + # Show Dashboard List parser + show_parser = verb_parsers.add_parser("show", help="Show a dashboard list definition") + show_parser.add_argument("dashboard_list_id", help="Dashboard list to show") + show_parser.set_defaults(func=cls._show) + + # Show All Dashboard Lists parser + show_all_parser = verb_parsers.add_parser("show_all", help="Show a list of all dashboard lists") + show_all_parser.set_defaults(func=cls._show_all) + + # Delete Dashboard List parser + delete_parser = verb_parsers.add_parser("delete", help="Delete existing dashboard list") + delete_parser.add_argument("dashboard_list_id", help="Dashboard list to delete") + delete_parser.set_defaults(func=cls._delete) + + # Get Dashboards for Dashboard List parser + get_dashboards_parser = verb_parsers.add_parser( + "show_dashboards", help="Show a list of all dashboards for an existing dashboard list" + ) + get_dashboards_parser.add_argument("dashboard_list_id", help="Dashboard list to show dashboards from") + get_dashboards_parser.set_defaults(func=cls._show_dashboards) + + # Get Dashboards for Dashboard List parser (v2) + get_dashboards_v2_parser = verb_parsers.add_parser( + "show_dashboards_v2", help="Show a list of all dashboards for an existing dashboard list" + ) + get_dashboards_v2_parser.add_argument("dashboard_list_id", help="Dashboard list to show dashboards from") + get_dashboards_v2_parser.set_defaults(func=cls._show_dashboards_v2) + + # Add Dashboards to Dashboard List parser + add_dashboards_parser = verb_parsers.add_parser( + "add_dashboards", help="Add dashboards to an existing dashboard list" + ) + add_dashboards_parser.add_argument("dashboard_list_id", help="Dashboard list to add dashboards to") + + add_dashboards_parser.add_argument( + "dashboards", + help="A JSON list of dashboard dicts, e.g. " + + '[{"type": "custom_timeboard", "id": 1234}, ' + + '{"type": "custom_screenboard", "id": 123}]', + ) + add_dashboards_parser.set_defaults(func=cls._add_dashboards) + + # Add Dashboards to Dashboard List parser (v2) + add_dashboards_v2_parser = verb_parsers.add_parser( + "add_dashboards_v2", help="Add dashboards to an existing dashboard list" + ) + add_dashboards_v2_parser.add_argument("dashboard_list_id", help="Dashboard list to add dashboards to") + add_dashboards_v2_parser.add_argument( + "dashboards", + help="A JSON list of dashboard dicts, e.g. " + + '[{"type": "custom_timeboard", "id": "ewc-a4f-8ps"}, ' + + '{"type": "custom_screenboard", "id": "kwj-3t3-d3m"}]', + ) + add_dashboards_v2_parser.set_defaults(func=cls._add_dashboards_v2) + + # Update Dashboards of Dashboard List parser + update_dashboards_parser = verb_parsers.add_parser( + "update_dashboards", help="Update dashboards of an existing dashboard list" + ) + update_dashboards_parser.add_argument("dashboard_list_id", help="Dashboard list to update with dashboards") + update_dashboards_parser.add_argument( + "dashboards", + help="A JSON list of dashboard dicts, e.g. " + + '[{"type": "custom_timeboard", "id": 1234}, ' + + '{"type": "custom_screenboard", "id": 123}]', + ) + update_dashboards_parser.set_defaults(func=cls._update_dashboards) + + # Update Dashboards of Dashboard List parser (v2) + update_dashboards_v2_parser = verb_parsers.add_parser( + "update_dashboards_v2", help="Update dashboards of an existing dashboard list" + ) + update_dashboards_v2_parser.add_argument("dashboard_list_id", help="Dashboard list to update with dashboards") + update_dashboards_v2_parser.add_argument( + "dashboards", + help="A JSON list of dashboard dicts, e.g. " + + '[{"type": "custom_timeboard", "id": "ewc-a4f-8ps"}, ' + + '{"type": "custom_screenboard", "id": "kwj-3t3-d3m"}]', + ) + update_dashboards_v2_parser.set_defaults(func=cls._update_dashboards_v2) + + # Delete Dashboards from Dashboard List parser + delete_dashboards_parser = verb_parsers.add_parser( + "delete_dashboards", help="Delete dashboards from an existing dashboard list" + ) + delete_dashboards_parser.add_argument("dashboard_list_id", help="Dashboard list to delete dashboards from") + delete_dashboards_parser.add_argument( + "dashboards", + help="A JSON list of dashboard dicts, e.g. " + + '[{"type": "custom_timeboard", "id": 1234}, ' + + '{"type": "custom_screenboard", "id": 123}]', + ) + delete_dashboards_parser.set_defaults(func=cls._delete_dashboards) + + # Delete Dashboards from Dashboard List parser + delete_dashboards_v2_parser = verb_parsers.add_parser( + "delete_dashboards_v2", help="Delete dashboards from an existing dashboard list" + ) + delete_dashboards_v2_parser.add_argument("dashboard_list_id", help="Dashboard list to delete dashboards from") + delete_dashboards_v2_parser.add_argument( + "dashboards", + help="A JSON list of dashboard dicts, e.g. " + + '[{"type": "custom_timeboard", "id": "ewc-a4f-8ps"}, ' + + '{"type": "custom_screenboard", "id": "kwj-3t3-d3m"}]', + ) + delete_dashboards_v2_parser.set_defaults(func=cls._delete_dashboards_v2) + + @classmethod + def _post(cls, args): + api._timeout = args.timeout + format = args.format + name = args.name + + res = api.DashboardList.create(name=name) + report_warnings(res) + report_errors(res) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _update(cls, args): + api._timeout = args.timeout + format = args.format + dashboard_list_id = args.dashboard_list_id + name = args.name + + res = api.DashboardList.update(dashboard_list_id, name=name) + report_warnings(res) + report_errors(res) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _show(cls, args): + api._timeout = args.timeout + format = args.format + dashboard_list_id = args.dashboard_list_id + + res = api.DashboardList.get(dashboard_list_id) + report_warnings(res) + report_errors(res) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _show_all(cls, args): + api._timeout = args.timeout + format = args.format + + res = api.DashboardList.get_all() + report_warnings(res) + report_errors(res) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _delete(cls, args): + api._timeout = args.timeout + format = args.format + dashboard_list_id = args.dashboard_list_id + + res = api.DashboardList.delete(dashboard_list_id) + report_warnings(res) + report_errors(res) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _show_dashboards(cls, args): + api._timeout = args.timeout + format = args.format + dashboard_list_id = args.dashboard_list_id + + res = api.DashboardList.get_items(dashboard_list_id) + report_warnings(res) + report_errors(res) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _show_dashboards_v2(cls, args): + api._timeout = args.timeout + format = args.format + dashboard_list_id = args.dashboard_list_id + + res = api.DashboardList.v2.get_items(dashboard_list_id) + report_warnings(res) + report_errors(res) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _add_dashboards(cls, args): + api._timeout = args.timeout + format = args.format + dashboard_list_id = args.dashboard_list_id + dashboards = json.loads(args.dashboards) + + res = api.DashboardList.add_items(dashboard_list_id, dashboards=dashboards) + report_warnings(res) + report_errors(res) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _add_dashboards_v2(cls, args): + api._timeout = args.timeout + format = args.format + dashboard_list_id = args.dashboard_list_id + dashboards = json.loads(args.dashboards) + + res = api.DashboardList.v2.add_items(dashboard_list_id, dashboards=dashboards) + report_warnings(res) + report_errors(res) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _update_dashboards(cls, args): + api._timeout = args.timeout + format = args.format + dashboard_list_id = args.dashboard_list_id + dashboards = json.loads(args.dashboards) + + res = api.DashboardList.update_items(dashboard_list_id, dashboards=dashboards) + report_warnings(res) + report_errors(res) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _update_dashboards_v2(cls, args): + api._timeout = args.timeout + format = args.format + dashboard_list_id = args.dashboard_list_id + dashboards = json.loads(args.dashboards) + + res = api.DashboardList.v2.update_items(dashboard_list_id, dashboards=dashboards) + report_warnings(res) + report_errors(res) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _delete_dashboards(cls, args): + api._timeout = args.timeout + format = args.format + dashboard_list_id = args.dashboard_list_id + dashboards = json.loads(args.dashboards) + + res = api.DashboardList.delete_items(dashboard_list_id, dashboards=dashboards) + report_warnings(res) + report_errors(res) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _delete_dashboards_v2(cls, args): + api._timeout = args.timeout + format = args.format + dashboard_list_id = args.dashboard_list_id + dashboards = json.loads(args.dashboards) + + res = api.DashboardList.v2.delete_items(dashboard_list_id, dashboards=dashboards) + report_warnings(res) + report_errors(res) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) diff --git a/datadog/dogshell/downtime.py b/datadog/dogshell/downtime.py index 335c242fd..1c53b4600 100644 --- a/datadog/dogshell/downtime.py +++ b/datadog/dogshell/downtime.py @@ -1,62 +1,77 @@ -from datadog.util.compat import json -from datadog.dogshell.common import report_errors, report_warnings +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# stdlib +import json + +# 3p +from datadog.util.format import pretty_json +# datadog from datadog import api +from datadog.dogshell.common import report_errors, report_warnings class DowntimeClient(object): @classmethod def setup_parser(cls, subparsers): - parser = subparsers.add_parser('downtime', help="Create, edit, and delete downtimes") - parser.add_argument('--string_ids', action='store_true', dest='string_ids', - help="Represent downtime IDs as strings instead of ints in JSON") - - verb_parsers = parser.add_subparsers(title='Verbs') - - post_parser = verb_parsers.add_parser('post', help="Create a downtime") - post_parser.add_argument('scope', help="scope to apply downtime to") - post_parser.add_argument('start', help="POSIX timestamp to start the downtime", - default=None) - post_parser.add_argument('--end', help="POSIX timestamp to end the downtime", default=None) - post_parser.add_argument('--message', help="message to include with notifications" - " for this downtime", default=None) + parser = subparsers.add_parser("downtime", help="Create, edit, and delete downtimes") + parser.add_argument( + "--string_ids", + action="store_true", + dest="string_ids", + help="Represent downtime IDs as strings instead of ints in JSON", + ) + + verb_parsers = parser.add_subparsers(title="Verbs", dest="verb") + verb_parsers.required = True + + post_parser = verb_parsers.add_parser("post", help="Create a downtime") + post_parser.add_argument("scope", help="scope to apply downtime to") + post_parser.add_argument("start", help="POSIX timestamp to start the downtime", default=None) + post_parser.add_argument("--end", help="POSIX timestamp to end the downtime", default=None) + post_parser.add_argument( + "--message", help="message to include with notifications" " for this downtime", default=None + ) post_parser.set_defaults(func=cls._schedule_downtime) - update_parser = verb_parsers.add_parser('update', help="Update existing downtime") - update_parser.add_argument('downtime_id', help="downtime to replace" - " with the new definition") - update_parser.add_argument('--scope', help="scope to apply downtime to") - update_parser.add_argument('--start', help="POSIX timestamp to start" - " the downtime", default=None) - update_parser.add_argument('--end', help="POSIX timestamp to" - " end the downtime", default=None) - update_parser.add_argument('--message', help="message to include with notifications" - " for this downtime", default=None) + update_parser = verb_parsers.add_parser("update", help="Update existing downtime") + update_parser.add_argument("downtime_id", help="downtime to replace" " with the new definition") + update_parser.add_argument("--scope", help="scope to apply downtime to") + update_parser.add_argument("--start", help="POSIX timestamp to start" " the downtime", default=None) + update_parser.add_argument("--end", help="POSIX timestamp to" " end the downtime", default=None) + update_parser.add_argument( + "--message", help="message to include with notifications" " for this downtime", default=None + ) update_parser.set_defaults(func=cls._update_downtime) - show_parser = verb_parsers.add_parser('show', help="Show a downtime definition") - show_parser.add_argument('downtime_id', help="downtime to show") + show_parser = verb_parsers.add_parser("show", help="Show a downtime definition") + show_parser.add_argument("downtime_id", help="downtime to show") show_parser.set_defaults(func=cls._show_downtime) - show_all_parser = verb_parsers.add_parser('show_all', help="Show a list of all downtimes") - show_all_parser.add_argument('--current_only', help="only return downtimes that" - " are active when the request is made", default=None) + show_all_parser = verb_parsers.add_parser("show_all", help="Show a list of all downtimes") + show_all_parser.add_argument( + "--current_only", help="only return downtimes that" " are active when the request is made", default=None + ) show_all_parser.set_defaults(func=cls._show_all_downtime) - delete_parser = verb_parsers.add_parser('delete', help="Delete a downtime") - delete_parser.add_argument('downtime_id', help="downtime to delete") + delete_parser = verb_parsers.add_parser("delete", help="Delete a downtime") + delete_parser.add_argument("downtime_id", help="downtime to delete") delete_parser.set_defaults(func=cls._cancel_downtime) + cancel_parser = verb_parsers.add_parser("cancel_by_scope", help="Cancel all downtimes with a given scope") + cancel_parser.add_argument("scope", help="The scope of the downtimes to cancel") + cancel_parser.set_defaults(func=cls._cancel_downtime_by_scope) + @classmethod def _schedule_downtime(cls, args): api._timeout = args.timeout format = args.format - res = api.Downtime.create(scope=args.scope, start=args.start, - end=args.end, message=args.message) + res = api.Downtime.create(scope=args.scope, start=args.start, end=args.end, message=args.message) report_warnings(res) report_errors(res) - if format == 'pretty': - print(cls._pretty_json(res)) + if format == "pretty": + print(pretty_json(res)) else: print(json.dumps(res)) @@ -64,12 +79,13 @@ def _schedule_downtime(cls, args): def _update_downtime(cls, args): api._timeout = args.timeout format = args.format - res = api.Downtime.update(args.downtime_id, scope=args.scope, start=args.start, - end=args.end, message=args.message) + res = api.Downtime.update( + args.downtime_id, scope=args.scope, start=args.start, end=args.end, message=args.message + ) report_warnings(res) report_errors(res) - if format == 'pretty': - print(cls._pretty_json(res)) + if format == "pretty": + print(pretty_json(res)) else: print(json.dumps(res)) @@ -88,8 +104,8 @@ def _show_downtime(cls, args): res = api.Downtime.get(args.downtime_id) report_warnings(res) report_errors(res) - if format == 'pretty': - print(cls._pretty_json(res)) + if format == "pretty": + print(pretty_json(res)) else: print(json.dumps(res)) @@ -100,7 +116,17 @@ def _show_all_downtime(cls, args): res = api.Downtime.get_all(current_only=args.current_only) report_warnings(res) report_errors(res) - if format == 'pretty': - print(cls._pretty_json(res)) + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _cancel_downtime_by_scope(cls, args): + api._timeout = args.timeout + format = args.format + res = api.Downtime.cancel_downtime_by_scope(scope=args.scope) + if format == "pretty": + print(pretty_json(res)) else: print(json.dumps(res)) diff --git a/datadog/dogshell/event.py b/datadog/dogshell/event.py index e8480e8a0..89d68a60c 100644 --- a/datadog/dogshell/event.py +++ b/datadog/dogshell/event.py @@ -1,25 +1,32 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# stdlib import datetime import time import re import sys +import json -from datadog.util.compat import json -from datadog.dogshell.common import report_errors, report_warnings +# datadog from datadog import api +from datadog.dogshell.common import report_errors, report_warnings + + +time_pat = re.compile(r"(?P[0-9]*\.?[0-9]+)(?P[mhd])") def prettyprint_event(event): - title = event['title'] or '' - text = event.get('text', '') or '' - handle = event.get('handle', '') or '' - date = event['date_happened'] + title = event["title"] or "" + text = event.get("text", "") or "" + handle = event.get("handle", "") or "" + date = event["date_happened"] dt = datetime.datetime.fromtimestamp(date) - link = event['url'] - # Encode UTF-8 - title = title.encode('utf8') - handle = handle.encode('utf8') - print((title + ' ' + text + ' ' + b' (' + handle + b')').strip()) - print(dt.isoformat(' ') + ' | ' + link) + link = event["url"] + + # Print + print((title + " " + text + " " + " (" + handle + ")").strip()) + print(dt.isoformat(" ") + " | " + link) def print_event(event): @@ -33,8 +40,6 @@ def prettyprint_event_details(event): def print_event_details(event): prettyprint_event(event) -time_pat = re.compile(r'(?P[0-9]*\.?[0-9]+)(?P[mhd])') - def parse_time(timestring): now = time.mktime(datetime.datetime.now().timetuple()) @@ -43,94 +48,113 @@ def parse_time(timestring): else: try: t = int(timestring) - except: + except Exception: match = time_pat.match(timestring) if match is None: raise Exception - delta = float(match.group('delta')) - unit = match.group('unit') - if unit == 'm': + delta = float(match.group("delta")) + unit = match.group("unit") + if unit == "m": delta = delta * 60 - if unit == 'h': + if unit == "h": delta = delta * 60 * 60 - if unit == 'd': + if unit == "d": delta = delta * 60 * 60 * 24 t = now - int(delta) return int(t) class EventClient(object): - @classmethod def setup_parser(cls, subparsers): - parser = subparsers.add_parser('event', help="Post events, get event details," - " and view the event stream.") - verb_parsers = parser.add_subparsers(title='Verbs') - - post_parser = verb_parsers.add_parser('post', help="Post events.") - post_parser.add_argument('title', help="event title") - post_parser.add_argument('--date_happened', help="POSIX timestamp" - " when the event occurred. if unset defaults to the current time.") - post_parser.add_argument('--handle', help="user to post as. if unset, submits " - "as the generic API user.") - post_parser.add_argument('--priority', help='"normal" or "low". defaults to "normal"') - post_parser.add_argument('--related_event_id', help="event to post as a child of." - " if unset, posts a top-level event") - post_parser.add_argument('--tags', help="comma separated list of tags") - post_parser.add_argument('--host', help="related host") - post_parser.add_argument('--device', help="related device (e.g. eth0, /dev/sda1)") - post_parser.add_argument('--aggregation_key', help="key to aggregate the event with") - post_parser.add_argument('--type', help="type of event, e.g. nagios, jenkins, etc.") - post_parser.add_argument('message', help="event message body. " - "if unset, reads from stdin.", nargs="?") + parser = subparsers.add_parser("event", help="Post events, get event details," " and view the event stream.") + verb_parsers = parser.add_subparsers(title="Verbs", dest="verb") + verb_parsers.required = True + + post_parser = verb_parsers.add_parser("post", help="Post events.") + post_parser.add_argument("title", help="event title") + post_parser.add_argument( + "--date_happened", + type=int, + help="POSIX timestamp" " when the event occurred. if unset defaults to the current time.", + ) + post_parser.add_argument("--handle", help="user to post as. if unset, submits " "as the generic API user.") + post_parser.add_argument("--priority", help='"normal" or "low". defaults to "normal"', default="normal") + post_parser.add_argument( + "--related_event_id", help="event to post as a child of." " if unset, posts a top-level event" + ) + post_parser.add_argument("--tags", help="comma separated list of tags") + post_parser.add_argument("--host", help="related host (default to the local host name)", default="") + post_parser.add_argument( + "--no_host", help="no host is associated with the event" " (overrides --host))", action="store_true" + ) + post_parser.add_argument("--device", help="related device (e.g. eth0, /dev/sda1)") + post_parser.add_argument("--aggregation_key", help="key to aggregate the event with") + post_parser.add_argument("--type", help="type of event, e.g. nagios, jenkins, etc.") + post_parser.add_argument("--alert_type", help='"error", "warning", "info" or "success". defaults to "info"') + post_parser.add_argument("message", help="event message body. " "if unset, reads from stdin.", nargs="?") post_parser.set_defaults(func=cls._post) - show_parser = verb_parsers.add_parser('show', help="Show event details.") - show_parser.add_argument('event_id', help="event to show") + show_parser = verb_parsers.add_parser("show", help="Show event details.") + show_parser.add_argument("event_id", help="event to show") show_parser.set_defaults(func=cls._show) stream_parser = verb_parsers.add_parser( - 'stream', - help="Delete comments.", + "stream", + help="Retrieve events from the Event Stream", description="Stream start and end times can be specified as either a POSIX" " timestamp (e.g. the output of `date +%s`) or as a period of" - " time in the past (e.g. '5m', '6h', '3d').") - stream_parser.add_argument('start', help="start date for the stream request") - stream_parser.add_argument('end', help="end date for the stream request " - "(defaults to 'now')", nargs='?') - stream_parser.add_argument('--priority', help="filter by priority." - " 'normal' or 'low'. defaults to 'normal'") - stream_parser.add_argument('--sources', help="comma separated list of sources to filter by") - stream_parser.add_argument('--tags', help="comma separated list of tags to filter by") + " time in the past (e.g. '5m', '6h', '3d').", + ) + stream_parser.add_argument("start", help="start date for the stream request") + stream_parser.add_argument("end", help="end date for the stream request " "(defaults to 'now')", nargs="?") + stream_parser.add_argument("--priority", help="filter by priority." " 'normal' or 'low'. defaults to 'normal'") + stream_parser.add_argument("--sources", help="comma separated list of sources to filter by") + stream_parser.add_argument("--tags", help="comma separated list of tags to filter by") stream_parser.set_defaults(func=cls._stream) @classmethod def _post(cls, args): + """ + Post an event. + """ api._timeout = args.timeout format = args.format message = args.message if message is None: message = sys.stdin.read() if args.tags is not None: - tags = [t.strip() for t in args.tags.split(',')] + tags = [t.strip() for t in args.tags.split(",")] else: tags = None + + host = None if args.no_host else args.host + + # Submit event res = api.Event.create( - title=args.title, text=message, - # TODO FXIME - # date_happened=args.date_happened, - handle=args.handle, priority=args.priority, - related_event_id=args.related_event_id, tags=tags, host=args.host, - device=args.device, aggregation_key=args.aggregation_key, - source_type_name=args.type) + title=args.title, + text=message, + date_happened=args.date_happened, + handle=args.handle, + priority=args.priority, + related_event_id=args.related_event_id, + tags=tags, + host=host, + device=args.device, + aggregation_key=args.aggregation_key, + source_type_name=args.type, + alert_type=args.alert_type, + ) + + # Report report_warnings(res) report_errors(res) - if format == 'pretty': - prettyprint_event(res['event']) - elif format == 'raw': + if format == "pretty": + prettyprint_event(res["event"]) + elif format == "raw": print(json.dumps(res)) else: - print_event(res['event']) + print_event(res["event"]) @classmethod def _show(cls, args): @@ -139,40 +163,39 @@ def _show(cls, args): res = api.Event.get(args.event_id) report_warnings(res) report_errors(res) - if format == 'pretty': - prettyprint_event_details(res['event']) - elif format == 'raw': + if format == "pretty": + prettyprint_event_details(res["event"]) + elif format == "raw": print(json.dumps(res)) else: - print_event_details(res['event']) + print_event_details(res["event"]) @classmethod def _stream(cls, args): api._timeout = args.timeout format = args.format if args.sources is not None: - sources = [s.strip() for s in args.sources.split(',')] + sources = [s.strip() for s in args.sources.split(",")] else: sources = None if args.tags is not None: - tags = [t.strip() for t in args.tags.split(',')] + tags = [t.strip() for t in args.tags.split(",")] else: tags = None start = parse_time(args.start) end = parse_time(args.end) # res = api.Event.query(start=start, end=end) # TODO FIXME - res = api.Event.query(start=start, end=end, priority=args.priority, - sources=sources, tags=tags) + res = api.Event.query(start=start, end=end, priority=args.priority, sources=sources, tags=tags) report_warnings(res) report_errors(res) - if format == 'pretty': - for event in res['events']: + if format == "pretty": + for event in res["events"]: prettyprint_event(event) print() - elif format == 'raw': + elif format == "raw": print(json.dumps(res)) else: - for event in res['events']: + for event in res["events"]: print_event(event) print() diff --git a/datadog/dogshell/host.py b/datadog/dogshell/host.py index d0a33af88..1f93a78bb 100644 --- a/datadog/dogshell/host.py +++ b/datadog/dogshell/host.py @@ -1,39 +1,50 @@ -from datadog.dogshell.common import report_errors, report_warnings -from datadog.util.compat import json +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# stdlib +import json + +# 3p +from datadog.util.format import pretty_json + +# datadog from datadog import api +from datadog.dogshell.common import report_errors, report_warnings class HostClient(object): @classmethod def setup_parser(cls, subparsers): - parser = subparsers.add_parser('host', help='Mute, unmute hosts') - verb_parsers = parser.add_subparsers(title='Verbs') - - mute_parser = verb_parsers.add_parser('mute', help='Mute a host') - mute_parser.add_argument('host_name', help='host to mute') - mute_parser.add_argument('--end', help="POSIX timestamp, if omitted," - " host will be muted until explicitly unmuted", default=None) - mute_parser.add_argument('--message', help="string to associate with the" - " muting of this host", default=None) - mute_parser.add_argument('--override', help="true/false, if true and the host is already" - " muted, will overwrite existing end on the host", - action='store_true') + parser = subparsers.add_parser("host", help="Mute, unmute hosts") + verb_parsers = parser.add_subparsers(title="Verbs", dest="verb") + verb_parsers.required = True + + mute_parser = verb_parsers.add_parser("mute", help="Mute a host") + mute_parser.add_argument("host_name", help="host to mute") + mute_parser.add_argument( + "--end", help="POSIX timestamp, if omitted," " host will be muted until explicitly unmuted", default=None + ) + mute_parser.add_argument("--message", help="string to associate with the" " muting of this host", default=None) + mute_parser.add_argument( + "--override", + help="true/false, if true and the host is already" " muted, will overwrite existing end on the host", + action="store_true", + ) mute_parser.set_defaults(func=cls._mute) - unmute_parser = verb_parsers.add_parser('unmute', help='Unmute a host') - unmute_parser.add_argument('host_name', help='host to mute') + unmute_parser = verb_parsers.add_parser("unmute", help="Unmute a host") + unmute_parser.add_argument("host_name", help="host to mute") unmute_parser.set_defaults(func=cls._unmute) @classmethod def _mute(cls, args): api._timeout = args.timeout format = args.format - res = api.Host.mute(args.host_name, end=args.end, message=args.message, - override=args.override) + res = api.Host.mute(args.host_name, end=args.end, message=args.message, override=args.override) report_warnings(res) report_errors(res) - if format == 'pretty': - print(cls._pretty_json(res)) + if format == "pretty": + print(pretty_json(res)) else: print(json.dumps(res)) @@ -44,7 +55,7 @@ def _unmute(cls, args): res = api.Host.unmute(args.host_name) report_warnings(res) report_errors(res) - if format == 'pretty': - print(cls._pretty_json(res)) + if format == "pretty": + print(pretty_json(res)) else: print(json.dumps(res)) diff --git a/datadog/dogshell/metric.py b/datadog/dogshell/metric.py index d44b2a023..135e4b93f 100644 --- a/datadog/dogshell/metric.py +++ b/datadog/dogshell/metric.py @@ -1,43 +1,72 @@ -from datadog.dogshell.common import report_errors, report_warnings, find_localhost +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# stdlib +from collections import defaultdict + +# datadog from datadog import api +from datadog.dogshell.common import report_errors, report_warnings class MetricClient(object): - @classmethod def setup_parser(cls, subparsers): - parser = subparsers.add_parser('metric', help="Post metrics.") - verb_parsers = parser.add_subparsers(title='Verbs') - - post_parser = verb_parsers.add_parser('post', help="Post metrics") - post_parser.add_argument('name', help="metric name") - post_parser.add_argument('value', help="metric value (integer or decimal value)", - type=float) - post_parser.add_argument('--host', help="scopes your metric to a specific host", - default=None) - post_parser.add_argument('--device', help="scopes your metric to a specific device", - default=None) - post_parser.add_argument('--tags', help="comma-separated list of tags", default=None) - post_parser.add_argument('--localhostname', help="same as --host=`hostname`" - " (overrides --host)", action='store_true') - post_parser.add_argument('--type', help="type of the metric - gauge(32bit float)" - " or counter(64bit integer)", default=None) + parser = subparsers.add_parser("metric", help="Post metrics.") + verb_parsers = parser.add_subparsers(title="Verbs", dest="verb") + verb_parsers.required = True + + post_parser = verb_parsers.add_parser("post", help="Post metrics") + post_parser.add_argument("name", help="metric name") + post_parser.add_argument("value", help="metric value (integer or decimal value)", type=float) + post_parser.add_argument( + "--host", help="scopes your metric to a specific host " "(default to the local host name)", default="" + ) + post_parser.add_argument( + "--no_host", help="no host is associated with the metric" " (overrides --host))", action="store_true" + ) + post_parser.add_argument("--device", help="scopes your metric to a specific device", default=None) + post_parser.add_argument("--tags", help="comma-separated list of tags", default=None) + post_parser.add_argument( + "--localhostname", + help="deprecated, used to force `--host`" + " to the local hostname " + "(now default when no `--host` is specified)", + action="store_true", + ) + post_parser.add_argument( + "--type", help="type of the metric - gauge(32bit float)" " or counter(64bit integer)", default=None + ) parser.set_defaults(func=cls._post) @classmethod def _post(cls, args): + """ + Post a metric. + """ + # Format parameters api._timeout = args.timeout - if args.localhostname: - host = find_localhost() - else: - host = args.host + + host = None if args.no_host else args.host + if args.tags: - tags = sorted(set([t.strip() for t in - args.tags.split(',') if t])) + tags = sorted(set([t.strip() for t in args.tags.split(",") if t])) else: tags = None + + # Submit metric res = api.Metric.send( - metric=args.name, points=args.value, host=host, - device=args.device, tags=tags, metric_type=args.type) + metric=args.name, points=args.value, host=host, device=args.device, tags=tags, metric_type=args.type + ) + + # Report + res = defaultdict(list, res) + + if args.localhostname: + # Warn about`--localhostname` command line flag deprecation + res["warnings"].append( + u"`--localhostname` command line flag is deprecated, made default when no `--host` " + u"is specified. See the `--host` option for more information." + ) report_warnings(res) report_errors(res) diff --git a/datadog/dogshell/monitor.py b/datadog/dogshell/monitor.py index f2b9d8ff3..877e6cd06 100644 --- a/datadog/dogshell/monitor.py +++ b/datadog/dogshell/monitor.py @@ -1,91 +1,239 @@ -from datadog.util.compat import json -from datadog.dogshell.common import report_errors, report_warnings +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# stdlib +import argparse +import json + +# 3p +from datadog.util.format import pretty_json + +# datadog from datadog import api +from datadog.dogshell.common import report_errors, report_warnings, print_err class MonitorClient(object): @classmethod def setup_parser(cls, subparsers): - parser = subparsers.add_parser('monitor', help="Create, edit, and delete monitors") - parser.add_argument('--string_ids', action='store_true', dest='string_ids', - help="Represent monitor IDs as strings instead of ints in JSON") - - verb_parsers = parser.add_subparsers(title='Verbs') - - post_parser = verb_parsers.add_parser('post', help="Create a monitor") - post_parser.add_argument('type', help="type of the monitor, e.g." - "'metric alert' 'service check'") - post_parser.add_argument('query', help="query to notify on with syntax varying " - "depending on what type of monitor you are creating") - post_parser.add_argument('--name', help="name of the alert", default=None) - post_parser.add_argument('--message', help="message to include with notifications" - " for this monitor", default=None) - post_parser.add_argument('--options', help="json options for the monitor", default=None) + parser = subparsers.add_parser("monitor", help="Create, edit, and delete monitors") + parser.add_argument( + "--string_ids", + action="store_true", + dest="string_ids", + help="Represent monitor IDs as strings instead of ints in JSON", + ) + + verb_parsers = parser.add_subparsers(title="Verbs", dest="verb") + verb_parsers.required = True + + post_parser = verb_parsers.add_parser("post", help="Create a monitor") + post_parser.add_argument("type", help="type of the monitor, e.g." "'metric alert' 'service check'") + post_parser.add_argument( + "query", help="query to notify on with syntax varying " "depending on what type of monitor you are creating" + ) + post_parser.add_argument("--name", help="name of the alert", default=None) + post_parser.add_argument( + "--message", help="message to include with notifications" " for this monitor", default=None + ) + post_parser.add_argument( + "--restricted_roles", help="comma-separated list of unique role identifiers allowed to edit the monitor", + default=None + ) + post_parser.add_argument("--tags", help="comma-separated list of tags", default=None) + post_parser.add_argument( + "--priority", + help="Integer from 1 (high) to 5 (low) indicating alert severity.", + default=None + ) + post_parser.add_argument("--options", help="json options for the monitor", default=None) post_parser.set_defaults(func=cls._post) - update_parser = verb_parsers.add_parser('update', help="Update existing monitor") - update_parser.add_argument('monitor_id', help="monitor to replace with the new definition") - update_parser.add_argument('type', help="type of the monitor, e.g. " - "'metric alert' 'service check'") - update_parser.add_argument('query', help="query to notify on with syntax varying" - " depending on what type of monitor you are creating") - update_parser.add_argument('--name', help="name of the alert", default=None) - update_parser.add_argument('--message', help="message to include with " - "notifications for this monitor", default=None) - update_parser.add_argument('--options', help="json options for the monitor", default=None) + file_post_parser = verb_parsers.add_parser("fpost", help="Create a monitor from file") + file_post_parser.add_argument("file", help="json file holding all details", type=argparse.FileType("r")) + file_post_parser.set_defaults(func=cls._file_post) + + update_parser = verb_parsers.add_parser("update", help="Update existing monitor") + update_parser.add_argument("monitor_id", help="monitor to replace with the new definition") + update_parser.add_argument( + "type", + nargs="?", + help="[Deprecated] optional argument preferred" "type of the monitor, e.g. 'metric alert' 'service check'", + default=None, + ) + update_parser.add_argument( + "query", + nargs="?", + help="[Deprecated] optional argument preferred" + "query to notify on with syntax varying depending on monitor type", + default=None, + ) + update_parser.add_argument( + "--type", help="type of the monitor, e.g. " "'metric alert' 'service check'", default=None, dest="type_opt" + ) + update_parser.add_argument( + "--query", + help="query to notify on with syntax varying" " depending on monitor type", + default=None, + dest="query_opt", + ) + update_parser.add_argument("--name", help="name of the alert", default=None) + update_parser.add_argument( + "--restricted_roles", help="comma-separated list of unique role identifiers allowed to edit the monitor", + default=None + ) + update_parser.add_argument("--tags", help="comma-separated list of tags", default=None) + update_parser.add_argument( + "--message", help="message to include with " "notifications for this monitor", default=None + ) + update_parser.add_argument( + "--priority", + help="Integer from 1 (high) to 5 (low) indicating alert severity.", + default=None + ) + update_parser.add_argument("--options", help="json options for the monitor", default=None) update_parser.set_defaults(func=cls._update) - show_parser = verb_parsers.add_parser('show', help="Show a monitor definition") - show_parser.add_argument('monitor_id', help="monitor to show") + file_update_parser = verb_parsers.add_parser("fupdate", help="Update existing" " monitor from file") + file_update_parser.add_argument("file", help="json file holding all details", type=argparse.FileType("r")) + file_update_parser.set_defaults(func=cls._file_update) + + show_parser = verb_parsers.add_parser("show", help="Show a monitor definition") + show_parser.add_argument("monitor_id", help="monitor to show") show_parser.set_defaults(func=cls._show) - show_all_parser = verb_parsers.add_parser('show_all', help="Show a list of all monitors") + show_all_parser = verb_parsers.add_parser("show_all", help="Show a list of all monitors") + show_all_parser.add_argument( + "--group_states", + help="comma separated list of group states to filter by" + "(choose one or more from 'all', 'alert', 'warn', or 'no data')", + ) + show_all_parser.add_argument("--name", help="string to filter monitors by name") + show_all_parser.add_argument( + "--tags", + help="comma separated list indicating what tags, if any, " + "should be used to filter the list of monitors by scope (e.g. 'host:host0')", + ) + show_all_parser.add_argument( + "--monitor_tags", + help="comma separated list indicating what service " + "and/or custom tags, if any, should be used to filter the list of monitors", + ) + show_all_parser.set_defaults(func=cls._show_all) - delete_parser = verb_parsers.add_parser('delete', help="Delete a monitor") - delete_parser.add_argument('monitor_id', help="monitor to delete") + delete_parser = verb_parsers.add_parser("delete", help="Delete a monitor") + delete_parser.add_argument("monitor_id", help="monitor to delete") delete_parser.set_defaults(func=cls._delete) - mute_all_parser = verb_parsers.add_parser('mute_all', help="Globally mute " - "monitors (downtime over *)") + mute_all_parser = verb_parsers.add_parser("mute_all", help="Globally mute " "monitors (downtime over *)") mute_all_parser.set_defaults(func=cls._mute_all) - unmute_all_parser = verb_parsers.add_parser('unmute_all', help="Globally unmute " - "monitors (cancel downtime over *)") + unmute_all_parser = verb_parsers.add_parser( + "unmute_all", help="Globally unmute " "monitors (cancel downtime over *)" + ) unmute_all_parser.set_defaults(func=cls._unmute_all) - mute_parser = verb_parsers.add_parser('mute', help="Mute a monitor") - mute_parser.add_argument('monitor_id', help="monitor to mute") - mute_parser.add_argument('--scope', help="scope to apply the mute to," - " e.g. role:db (optional)", default=[]) - mute_parser.add_argument('--end', help="POSIX timestamp for when" - " the mute should end (optional)", default=None) + mute_parser = verb_parsers.add_parser("mute", help="Mute a monitor") + mute_parser.add_argument("monitor_id", help="monitor to mute") + mute_parser.add_argument("--scope", help="scope to apply the mute to," " e.g. role:db (optional)", default=[]) + mute_parser.add_argument( + "--end", help="POSIX timestamp for when" " the mute should end (optional)", default=None + ) mute_parser.set_defaults(func=cls._mute) - unmute_parser = verb_parsers.add_parser('unmute', help="Unmute a monitor") - unmute_parser.add_argument('monitor_id', help="monitor to unmute") - unmute_parser.add_argument('--scope', help="scope to unmute (must be muted), " - "e.g. role:db", default=[]) - unmute_parser.add_argument('--all_scopes', help="clear muting across all scopes", - action='store_true') + unmute_parser = verb_parsers.add_parser("unmute", help="Unmute a monitor") + unmute_parser.add_argument("monitor_id", help="monitor to unmute") + unmute_parser.add_argument("--scope", help="scope to unmute (must be muted), " "e.g. role:db", default=[]) + unmute_parser.add_argument("--all_scopes", help="clear muting across all scopes", action="store_true") unmute_parser.set_defaults(func=cls._unmute) + can_delete_parser = verb_parsers.add_parser("can_delete", help="Check if you can delete some monitors") + can_delete_parser.add_argument("monitor_ids", help="monitors to check if they can be deleted") + can_delete_parser.set_defaults(func=cls._can_delete) + + validate_parser = verb_parsers.add_parser("validate", help="Validates if a monitor definition is correct") + validate_parser.add_argument("type", help="type of the monitor, e.g." "'metric alert' 'service check'") + validate_parser.add_argument("query", help="the monitor query") + validate_parser.add_argument("--name", help="name of the alert", default=None) + validate_parser.add_argument( + "--message", help="message to include with notifications" " for this monitor", default=None + ) + validate_parser.add_argument( + "--restricted_roles", help="comma-separated list of unique role identifiers allowed to edit the monitor", + default=None + ) + validate_parser.add_argument("--tags", help="comma-separated list of tags", default=None) + validate_parser.add_argument("--options", help="json options for the monitor", default=None) + validate_parser.set_defaults(func=cls._validate) + @classmethod def _post(cls, args): api._timeout = args.timeout format = args.format options = None if args.options is not None: - try: - options = json.loads(args.options) - except: - raise Exception('bad json parameter') - res = api.Monitor.create(type=args.type, query=args.query, name=args.name, - message=args.message, options=options) + options = json.loads(args.options) + + if args.tags: + tags = sorted(set([t.strip() for t in args.tags.split(",") if t.strip()])) + else: + tags = None + + if args.restricted_roles: + restricted_roles = sorted(set([rr.strip() for rr in args.restricted_roles.split(",") if rr.strip()])) + else: + restricted_roles = None + + body = { + "type": args.type, + "query": args.query, + "name": args.name, + "message": args.message, + "options": options + } + if tags: + body["tags"] = tags + if restricted_roles: + body["restricted_roles"] = restricted_roles + if args.priority: + body["priority"] = args.priority + + res = api.Monitor.create(**body) report_warnings(res) report_errors(res) - if format == 'pretty': - print(cls._pretty_json(res)) + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _file_post(cls, args): + api._timeout = args.timeout + format = args.format + monitor = json.load(args.file) + body = { + "type": monitor["type"], + "query": monitor["query"], + "name": monitor["name"], + "message": monitor["message"], + "options": monitor["options"] + } + restricted_roles = monitor.get("restricted_roles", None) + if restricted_roles: + body["restricted_roles"] = restricted_roles + tags = monitor.get("tags", None) + if tags: + body["tags"] = tags + priority = monitor.get("priority", None) + if priority: + body["priority"] = priority + + res = api.Monitor.create(**body) + report_warnings(res) + report_errors(res) + if format == "pretty": + print(pretty_json(res)) else: print(json.dumps(res)) @@ -93,18 +241,84 @@ def _post(cls, args): def _update(cls, args): api._timeout = args.timeout format = args.format - options = None + + to_update = {} + if args.type: + if args.type_opt: + msg = "Duplicate arguments for `type`. Using optional value --type" + print_err("WARNING: {}".format(msg)) + else: + to_update["type"] = args.type + msg = "[DEPRECATION] `type` is no longer required to `update` and may be omitted" + print_err("WARNING: {}".format(msg)) + if args.query: + if args.query_opt: + msg = "Duplicate arguments for `query`. Using optional value --query" + print_err("WARNING: {}".format(msg)) + else: + to_update["query"] = args.query + msg = "[DEPRECATION] `query` is no longer required to `update` and may be omitted" + print_err("WARNING: {}".format(msg)) + if args.name: + to_update["name"] = args.name + if args.message: + to_update["message"] = args.message + if args.type_opt: + to_update["type"] = args.type_opt + if args.query_opt: + to_update["query"] = args.query_opt + if args.restricted_roles is not None: + if args.restricted_roles == "": + to_update["restricted_roles"] = None + else: + to_update["restricted_roles"] = sorted( + set([rr.strip() for rr in args.restricted_roles.split(",") if rr.strip()])) + if args.tags: + to_update["tags"] = sorted(set([t.strip() for t in args.tags.split(",") if t.strip()])) + if args.priority: + to_update["priority"] = args.priority + if args.options is not None: - try: - options = json.loads(args.options) - except: - raise Exception('bad json parameter') - res = api.Monitor.update(args.monitor_id, type=args.type, query=args.query, - name=args.name, message=args.message, options=options) + to_update["options"] = json.loads(args.options) + + res = api.Monitor.update(args.monitor_id, **to_update) + report_warnings(res) report_errors(res) - if format == 'pretty': - print(cls._pretty_json(res)) + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _file_update(cls, args): + api._timeout = args.timeout + format = args.format + monitor = json.load(args.file) + body = { + "type": monitor["type"], + "query": monitor["query"], + "name": monitor["name"], + "message": monitor["message"], + "options": monitor["options"] + } + # Default value is False to defferentiate between explicit None and not set + restricted_roles = monitor.get("restricted_roles", False) + if restricted_roles is not False: + body["restricted_roles"] = restricted_roles + tags = monitor.get("tags", None) + if tags: + body["tags"] = tags + priority = monitor.get("priority", None) + if priority: + body["priority"] = priority + + res = api.Monitor.update(monitor["id"], **body) + + report_warnings(res) + report_errors(res) + if format == "pretty": + print(pretty_json(res)) else: print(json.dumps(res)) @@ -119,8 +333,8 @@ def _show(cls, args): if args.string_ids: res["id"] = str(res["id"]) - if format == 'pretty': - print(cls._pretty_json(res)) + if format == "pretty": + print(pretty_json(res)) else: print(json.dumps(res)) @@ -128,7 +342,10 @@ def _show(cls, args): def _show_all(cls, args): api._timeout = args.timeout format = args.format - res = api.Monitor.get_all() + + res = api.Monitor.get_all( + group_states=args.group_states, name=args.name, tags=args.tags, monitor_tags=args.monitor_tags + ) report_warnings(res) report_errors(res) @@ -136,19 +353,25 @@ def _show_all(cls, args): for d in res: d["id"] = str(d["id"]) - if format == 'pretty': - print(cls._pretty_json(res)) - elif format == 'raw': + if format == "pretty": + print(pretty_json(res)) + elif format == "raw": print(json.dumps(res)) else: for d in res: - print("\t".join([(str(d["id"])), - (cls._escape(d["message"])), - (cls._escape(d["name"])), - (str(d["options"])), - (str(d["org_id"])), - (d["query"]), - (d["type"])])) + print( + "\t".join( + [ + (str(d["id"])), + (cls._escape(d["message"])), + (cls._escape(d["name"])), + (str(d["options"])), + (str(d["org_id"])), + (d["query"]), + (d["type"]), + ] + ) + ) @classmethod def _delete(cls, args): @@ -170,8 +393,8 @@ def _mute_all(cls, args): res = api.Monitor.mute_all() report_warnings(res) report_errors(res) - if format == 'pretty': - print(cls._pretty_json(res)) + if format == "pretty": + print(pretty_json(res)) else: print(json.dumps(res)) @@ -190,8 +413,8 @@ def _mute(cls, args): res = api.Monitor.mute(args.monitor_id, scope=args.scope, end=args.end) report_warnings(res) report_errors(res) - if format == 'pretty': - print(cls._pretty_json(res)) + if format == "pretty": + print(pretty_json(res)) else: print(json.dumps(res)) @@ -201,7 +424,51 @@ def _unmute(cls, args): res = api.Monitor.unmute(args.monitor_id, scope=args.scope, all_scopes=args.all_scopes) report_warnings(res) report_errors(res) - if format == 'pretty': - print(cls._pretty_json(res)) + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _can_delete(cls, args): + api._timeout = args.timeout + monitor_ids = [i.strip() for i in args.monitor_ids.split(",") if i.strip()] + res = api.Monitor.can_delete(monitor_ids=monitor_ids) + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _validate(cls, args): + api._timeout = args.timeout + format = args.format + options = None + if args.options is not None: + options = json.loads(args.options) + + if args.tags: + tags = sorted(set([t.strip() for t in args.tags.split(",") if t.strip()])) + else: + tags = None + + if args.restricted_roles: + restricted_roles = sorted(set([rr.strip() for rr in args.restricted_roles.split(",") if rr.strip()])) + else: + restricted_roles = None + + res = api.Monitor.validate( + type=args.type, + query=args.query, + name=args.name, + message=args.message, + tags=tags, + restricted_roles=restricted_roles, + options=options + ) + # report_warnings(res) + # report_errors(res) + if format == "pretty": + print(pretty_json(res)) else: print(json.dumps(res)) diff --git a/datadog/dogshell/screenboard.py b/datadog/dogshell/screenboard.py index d8345cca0..093a3e19d 100644 --- a/datadog/dogshell/screenboard.py +++ b/datadog/dogshell/screenboard.py @@ -1,142 +1,158 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# stdlib import argparse -import sys +import json import platform +import sys import webbrowser -from datetime import datetime -from datadog.util.compat import json -from datadog.dogshell.common import report_errors, report_warnings, print_err +# 3p +from datadog.util.format import pretty_json + +# datadog from datadog import api +from datadog.dogshell.common import report_errors, report_warnings, print_err +from datetime import datetime class ScreenboardClient(object): - @classmethod def setup_parser(cls, subparsers): - parser = subparsers.add_parser('screenboard', help="Create, edit, and delete screenboards.") - parser.add_argument('--string_ids', action='store_true', dest='string_ids', - help="Represent screenboard IDs as strings instead of ints in JSON") - - verb_parsers = parser.add_subparsers(title='Verbs') - - post_parser = verb_parsers.add_parser('post', help="Create screenboards.") - post_parser.add_argument('title', help="title for the new screenboard") - post_parser.add_argument('description', help="short description of the screenboard") - post_parser.add_argument('graphs', help="graph definitions as a JSON string." - " if unset, reads from stdin.", nargs="?") - post_parser.add_argument('--template_variables', type=_template_variables, default=[], - help="a json list of template variable dicts, e.g. " - "[{'name': 'host', 'prefix': 'host', 'default': 'host:my-host'}]") - post_parser.add_argument('--width', type=int, default=None, - help="screenboard width in pixels") - post_parser.add_argument('--height', type=int, default=None, - help="screenboard height in pixels") + parser = subparsers.add_parser("screenboard", help="Create, edit, and delete screenboards.") + parser.add_argument( + "--string_ids", + action="store_true", + dest="string_ids", + help="Represent screenboard IDs as strings instead of ints in JSON", + ) + + verb_parsers = parser.add_subparsers(title="Verbs", dest="verb") + verb_parsers.required = True + + post_parser = verb_parsers.add_parser("post", help="Create screenboards.") + post_parser.add_argument("title", help="title for the new screenboard") + post_parser.add_argument("description", help="short description of the screenboard") + post_parser.add_argument( + "graphs", help="graph definitions as a JSON string." " if unset, reads from stdin.", nargs="?" + ) + post_parser.add_argument( + "--template_variables", + type=_template_variables, + default=[], + help="a json list of template variable dicts, e.g. " + "[{'name': 'host', 'prefix': 'host', 'default': 'host:my-host'}]", + ) + post_parser.add_argument("--width", type=int, default=None, help="screenboard width in pixels") + post_parser.add_argument("--height", type=int, default=None, help="screenboard height in pixels") post_parser.set_defaults(func=cls._post) - update_parser = verb_parsers.add_parser('update', help="Update existing screenboards.") - update_parser.add_argument('screenboard_id', help="screenboard to replace " - " with the new definition") - update_parser.add_argument('title', help="title for the new screenboard") - update_parser.add_argument('description', help="short description of the screenboard") - update_parser.add_argument('graphs', help="graph definitions as a JSON string." - " if unset, reads from stdin.", nargs="?") - update_parser.add_argument('--template_variables', type=_template_variables, default=[], - help="a json list of template variable dicts, e.g. " - "[{'name': 'host', 'prefix': 'host', 'default': " - "'host:my-host'}]") - update_parser.add_argument('--width', type=int, default=None, - help="screenboard width in pixels") - update_parser.add_argument('--height', type=int, default=None, - help="screenboard height in pixels") + update_parser = verb_parsers.add_parser("update", help="Update existing screenboards.") + update_parser.add_argument("screenboard_id", help="screenboard to replace " " with the new definition") + update_parser.add_argument("title", help="title for the new screenboard") + update_parser.add_argument("description", help="short description of the screenboard") + update_parser.add_argument( + "graphs", help="graph definitions as a JSON string." " if unset, reads from stdin.", nargs="?" + ) + update_parser.add_argument( + "--template_variables", + type=_template_variables, + default=[], + help="a json list of template variable dicts, e.g. " + "[{'name': 'host', 'prefix': 'host', 'default': " + "'host:my-host'}]", + ) + update_parser.add_argument("--width", type=int, default=None, help="screenboard width in pixels") + update_parser.add_argument("--height", type=int, default=None, help="screenboard height in pixels") update_parser.set_defaults(func=cls._update) - show_parser = verb_parsers.add_parser('show', help="Show a screenboard definition.") - show_parser.add_argument('screenboard_id', help="screenboard to show") + show_parser = verb_parsers.add_parser("show", help="Show a screenboard definition.") + show_parser.add_argument("screenboard_id", help="screenboard to show") show_parser.set_defaults(func=cls._show) - delete_parser = verb_parsers.add_parser('delete', help="Delete a screenboard.") - delete_parser.add_argument('screenboard_id', help="screenboard to delete") + delete_parser = verb_parsers.add_parser("delete", help="Delete a screenboard.") + delete_parser.add_argument("screenboard_id", help="screenboard to delete") delete_parser.set_defaults(func=cls._delete) - share_parser = verb_parsers.add_parser('share', help="Share an existing screenboard's" - " with a public URL.") - share_parser.add_argument('screenboard_id', help="screenboard to share") + share_parser = verb_parsers.add_parser("share", help="Share an existing screenboard's" " with a public URL.") + share_parser.add_argument("screenboard_id", help="screenboard to share") share_parser.set_defaults(func=cls._share) - revoke_parser = verb_parsers.add_parser('revoke', help="Revoke an existing screenboard's" - " with a public URL.") - revoke_parser.add_argument('screenboard_id', help="screenboard to revoke") + revoke_parser = verb_parsers.add_parser("revoke", help="Revoke an existing screenboard's" " with a public URL.") + revoke_parser.add_argument("screenboard_id", help="screenboard to revoke") revoke_parser.set_defaults(func=cls._revoke) - pull_parser = verb_parsers.add_parser('pull', help="Pull a screenboard on the server" - " into a local file") - pull_parser.add_argument('screenboard_id', help="ID of screenboard to pull") - pull_parser.add_argument('filename', help="file to pull screenboard into") + pull_parser = verb_parsers.add_parser("pull", help="Pull a screenboard on the server" " into a local file") + pull_parser.add_argument("screenboard_id", help="ID of screenboard to pull") + pull_parser.add_argument("filename", help="file to pull screenboard into") pull_parser.set_defaults(func=cls._pull) - push_parser = verb_parsers.add_parser('push', help="Push updates to screenboards" - " from local files to the server") - push_parser.add_argument('--append_auto_text', action='store_true', dest='append_auto_text', - help="When pushing to the server, appends filename and" - " timestamp to the end of the screenboard description") - push_parser.add_argument('file', help="screenboard files to push to the server", - nargs='+', type=argparse.FileType('r')) + push_parser = verb_parsers.add_parser( + "push", help="Push updates to screenboards" " from local files to the server" + ) + push_parser.add_argument( + "--append_auto_text", + action="store_true", + dest="append_auto_text", + help="When pushing to the server, appends filename and" + " timestamp to the end of the screenboard description", + ) + push_parser.add_argument( + "file", help="screenboard files to push to the server", nargs="+", type=argparse.FileType("r") + ) push_parser.set_defaults(func=cls._push) - new_file_parser = verb_parsers.add_parser('new_file', help="Create a new screenboard" - " and put its contents in a file") - new_file_parser.add_argument('filename', help="name of file to create with" - " empty screenboard") - new_file_parser.add_argument('graphs', help="graph definitions as a JSON string." - " if unset, reads from stdin.", nargs="?") + new_file_parser = verb_parsers.add_parser( + "new_file", help="Create a new screenboard" " and put its contents in a file" + ) + new_file_parser.add_argument("filename", help="name of file to create with" " empty screenboard") + new_file_parser.add_argument( + "graphs", help="graph definitions as a JSON string." " if unset, reads from stdin.", nargs="?" + ) new_file_parser.set_defaults(func=cls._new_file) @classmethod def _pull(cls, args): - cls._write_screen_to_file(args.screenboard_id, args.filename, args.timeout, - args.format, args.string_ids) + cls._write_screen_to_file(args.screenboard_id, args.filename, args.timeout, args.format, args.string_ids) # TODO Is there a test for this one ? @classmethod def _push(cls, args): api._timeout = args.timeout for f in args.file: - try: - screen_obj = json.load(f) - except Exception as err: - raise Exception("Could not parse {0}: {1}".format(f.name, err)) + screen_obj = json.load(f) if args.append_auto_text: - datetime_str = datetime.now().strftime('%x %X') - auto_text = ("
\nUpdated at {0} from {1} ({2}) on {3}" - .format(datetime_str, f.name, screen_obj["id"], platform.node())) + datetime_str = datetime.now().strftime("%x %X") + auto_text = "
\nUpdated at {0} from {1} ({2}) on {3}".format( + datetime_str, f.name, screen_obj["id"], platform.node() + ) screen_obj["description"] += auto_text - if 'id' in screen_obj: + if "id" in screen_obj: # Always convert to int, in case it was originally a string. screen_obj["id"] = int(screen_obj["id"]) res = api.Screenboard.update(**screen_obj) else: res = api.Screenboard.create(**screen_obj) - if 'errors' in res: - print_err('Upload of screenboard {0} from file {1} failed.' - .format(screen_obj["id"], f.name)) + if "errors" in res: + print_err("Upload of screenboard {0} from file {1} failed.".format(screen_obj["id"], f.name)) report_warnings(res) report_errors(res) - if format == 'pretty': - print(cls._pretty_json(res)) + if format == "pretty": + print(pretty_json(res)) else: print(json.dumps(res)) - if args.format == 'pretty': + if args.format == "pretty": print("Uploaded file {0} (screenboard {1})".format(f.name, screen_obj["id"])) @classmethod - def _write_screen_to_file(cls, screenboard_id, filename, timeout, - format='raw', string_ids=False): + def _write_screen_to_file(cls, screenboard_id, filename, timeout, format="raw", string_ids=False): with open(filename, "w") as f: res = api.Screenboard.get(screenboard_id) report_warnings(res) @@ -153,7 +169,7 @@ def _write_screen_to_file(cls, screenboard_id, filename, timeout, json.dump(screen_obj, f, indent=2) - if format == 'pretty': + if format == "pretty": print("Downloaded screenboard {0} to file {1}".format(screenboard_id, filename)) else: print("{0} {1}".format(screenboard_id, filename)) @@ -166,17 +182,19 @@ def _post(cls, args): graphs = args.graphs if args.graphs is None: graphs = sys.stdin.read() - try: - graphs = json.loads(graphs) - except: - raise Exception('bad json parameter') + graphs = json.loads(graphs) res = api.Screenboard.create( - title=args.title, description=args.description, graphs=[graphs], - template_variables=args.template_variables, width=args.width, height=args.height) + title=args.title, + description=args.description, + graphs=[graphs], + template_variables=args.template_variables, + width=args.width, + height=args.height, + ) report_warnings(res) report_errors(res) - if format == 'pretty': - print(cls._pretty_json(res)) + if format == "pretty": + print(pretty_json(res)) else: print(json.dumps(res)) @@ -187,25 +205,27 @@ def _update(cls, args): graphs = args.graphs if args.graphs is None: graphs = sys.stdin.read() - try: - graphs = json.loads(graphs) - except: - raise Exception('bad json parameter') + graphs = json.loads(graphs) res = api.Screenboard.update( - args.screenboard_id, title=args.title, description=args.description, - graphs=graphs, template_variables=args.template_variables, - width=args.width, height=args.height) + args.screenboard_id, + board_title=args.title, + description=args.description, + widgets=graphs, + template_variables=args.template_variables, + width=args.width, + height=args.height, + ) report_warnings(res) report_errors(res) - if format == 'pretty': - print(cls._pretty_json(res)) + if format == "pretty": + print(pretty_json(res)) else: print(json.dumps(res)) @classmethod def _web_view(cls, args): - dash_id = json.load(args.file)['id'] + dash_id = json.load(args.file)["id"] url = api._api_host + "/dash/dash/{0}".format(dash_id) webbrowser.open(url) @@ -220,8 +240,8 @@ def _show(cls, args): if args.string_ids: res["id"] = str(res["id"]) - if format == 'pretty': - print(cls._pretty_json(res)) + if format == "pretty": + print(pretty_json(res)) else: print(json.dumps(res)) @@ -240,8 +260,8 @@ def _share(cls, args): format = args.format res = api.Screenboard.share(args.screenboard_id) - if format == 'pretty': - print(cls._pretty_json(res)) + if format == "pretty": + print(pretty_json(res)) else: print(json.dumps(res)) @@ -251,8 +271,8 @@ def _revoke(cls, args): format = args.format res = api.Screenboard.revoke(args.screenboard_id) - if format == 'pretty': - print(cls._pretty_json(res)) + if format == "pretty": + print(pretty_json(res)) else: print(json.dumps(res)) @@ -263,29 +283,26 @@ def _new_file(cls, args): graphs = args.graphs if args.graphs is None: graphs = sys.stdin.read() - try: - graphs = json.loads(graphs) - except: - raise Exception('bad json parameter') - res = api.Screenboard.create(title=args.filename, - description="Description for {0}".format(args.filename), - graphs=[graphs]) + graphs = json.loads(graphs) + res = api.Screenboard.create( + board_title=args.filename, description="Description for {0}".format(args.filename), widgets=[graphs] + ) report_warnings(res) report_errors(res) - cls._write_screen_to_file(res['id'], args.filename, args.timeout, format, args.string_ids) + cls._write_screen_to_file(res["id"], args.filename, args.timeout, format, args.string_ids) - if format == 'pretty': - print(cls._pretty_json(res)) + if format == "pretty": + print(pretty_json(res)) else: print(json.dumps(res)) def _template_variables(tpl_var_input): - if '[' not in tpl_var_input: - return [v.strip() for v in tpl_var_input.split(',')] + if "[" not in tpl_var_input: + return [v.strip() for v in tpl_var_input.split(",")] else: try: return json.loads(tpl_var_input) except Exception: - raise argparse.ArgumentTypeError('bad template_variable json parameter') + raise argparse.ArgumentTypeError("bad template_variable json parameter") diff --git a/datadog/dogshell/search.py b/datadog/dogshell/search.py index a4ec4a106..9c1cb4703 100644 --- a/datadog/dogshell/search.py +++ b/datadog/dogshell/search.py @@ -1,18 +1,24 @@ -from datadog.util.compat import json -from datadog.dogshell.common import report_errors, report_warnings +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# stdlib +import json + +# datadog from datadog import api +from datadog.dogshell.common import report_errors, report_warnings # TODO IS there a test ? class SearchClient(object): - @classmethod def setup_parser(cls, subparsers): - parser = subparsers.add_parser('search', help="search datadog") - verb_parsers = parser.add_subparsers(title='Verbs') + parser = subparsers.add_parser("search", help="search datadog") + verb_parsers = parser.add_subparsers(title="Verbs", dest="verb") + verb_parsers.required = True - query_parser = verb_parsers.add_parser('query', help="Search datadog.") - query_parser.add_argument('query', help="optionally faceted search query") + query_parser = verb_parsers.add_parser("query", help="Search datadog.") + query_parser.add_argument("query", help="optionally faceted search query") query_parser.set_defaults(func=cls._query) @classmethod @@ -21,17 +27,17 @@ def _query(cls, args): res = api.Infrastructure.search(q=args.query) report_warnings(res) report_errors(res) - if format == 'pretty': - for facet, results in list(res['results'].items()): + if format == "pretty": + for facet, results in list(res["results"].items()): for idx, result in enumerate(results): if idx == 0: - print('\n') + print("\n") print("%s\t%s" % (facet, result)) else: - print("%s\t%s" % (' ' * len(facet), result)) - elif format == 'raw': + print("%s\t%s" % (" " * len(facet), result)) + elif format == "raw": print(json.dumps(res)) else: - for facet, results in list(res['results'].items()): + for facet, results in list(res["results"].items()): for result in results: print("%s\t%s" % (facet, result)) diff --git a/datadog/dogshell/service_check.py b/datadog/dogshell/service_check.py index c5cb844e6..b30f33c58 100644 --- a/datadog/dogshell/service_check.py +++ b/datadog/dogshell/service_check.py @@ -1,35 +1,55 @@ -from datadog.util.compat import json -from datadog.dogshell.common import report_errors, report_warnings +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# stdlib +import json + +# 3p +from datadog.util.format import pretty_json + +# datadog from datadog import api +from datadog.dogshell.common import report_errors, report_warnings class ServiceCheckClient(object): @classmethod def setup_parser(cls, subparsers): - parser = subparsers.add_parser('service_check', help="Perform service checks") - verb_parsers = parser.add_subparsers(title='Verbs') + parser = subparsers.add_parser("service_check", help="Perform service checks") + verb_parsers = parser.add_subparsers(title="Verbs", dest="verb") + verb_parsers.required = True - check_parser = verb_parsers.add_parser('check', help="text for the message") - check_parser.add_argument('check', help="text for the message") - check_parser.add_argument('host_name', help="name of the host submitting the check") - check_parser.add_argument('status', help="integer for the status of the check." - " i.e: '0': OK, '1': WARNING, '2': CRITICAL, '3': UNKNOWN") - check_parser.add_argument('--timestamp', help="POSIX timestamp of the event", default=None) - check_parser.add_argument('--message', help="description of why this status occurred", - default=None) - check_parser.add_argument('--tags', help="comma separated list of tags", default=None) + check_parser = verb_parsers.add_parser("check", help="text for the message") + check_parser.add_argument("check", help="text for the message") + check_parser.add_argument("host_name", help="name of the host submitting the check") + check_parser.add_argument( + "status", + help="integer for the status of the check." " i.e: '0': OK, '1': WARNING, '2': CRITICAL, '3': UNKNOWN", + ) + check_parser.add_argument("--timestamp", help="POSIX timestamp of the event", default=None) + check_parser.add_argument("--message", help="description of why this status occurred", default=None) + check_parser.add_argument("--tags", help="comma separated list of tags", default=None) check_parser.set_defaults(func=cls._check) @classmethod def _check(cls, args): api._timeout = args.timeout format = args.format + if args.tags: + tags = sorted(set([t.strip() for t in args.tags.split(",") if t.strip()])) + else: + tags = None res = api.ServiceCheck.check( - check=args.check, host_name=args.host_name, status=int(args.status), - timestamp=args.timestamp, message=args.message, tags=args.tags) + check=args.check, + host_name=args.host_name, + status=int(args.status), + timestamp=args.timestamp, + message=args.message, + tags=tags, + ) report_warnings(res) report_errors(res) - if format == 'pretty': - print(cls._pretty_json(res)) + if format == "pretty": + print(pretty_json(res)) else: print(json.dumps(res)) diff --git a/datadog/dogshell/service_level_objective.py b/datadog/dogshell/service_level_objective.py new file mode 100644 index 000000000..13ec92818 --- /dev/null +++ b/datadog/dogshell/service_level_objective.py @@ -0,0 +1,426 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# stdlib +import argparse +import json + +# 3p +from datadog.util.cli import ( + set_of_ints, + comma_set, + comma_list_or_empty, + parse_date_as_epoch_timestamp, +) +from datadog.util.format import pretty_json + +# datadog +from datadog import api +from datadog.dogshell.common import report_errors, report_warnings + + +class ServiceLevelObjectiveClient(object): + @classmethod + def setup_parser(cls, subparsers): + parser = subparsers.add_parser( + "service_level_objective", + help="Create, edit, and delete service level objectives", + ) + + verb_parsers = parser.add_subparsers(title="Verbs", dest="verb") + verb_parsers.required = True + + create_parser = verb_parsers.add_parser("create", help="Create a SLO") + create_parser.add_argument( + "--type", + help="type of the SLO, e.g.", + choices=["metric", "monitor"], + ) + create_parser.add_argument("--name", help="name of the SLO", default=None) + create_parser.add_argument("--description", help="description of the SLO", default=None) + create_parser.add_argument( + "--tags", + help="comma-separated list of tags", + default=None, + type=comma_list_or_empty, + ) + create_parser.add_argument( + "--thresholds", + help="comma separated list of :[:[:[:]]", + ) + create_parser.add_argument( + "--numerator", + help="numerator metric query (sum of good events)", + default=None, + ) + create_parser.add_argument( + "--denominator", + help="denominator metric query (sum of total events)", + default=None, + ) + create_parser.add_argument( + "--monitor_ids", + help="explicit monitor_ids to use (CSV)", + default=None, + type=set_of_ints, + ) + create_parser.add_argument("--monitor_search", help="monitor search terms to use", default=None) + create_parser.add_argument( + "--groups", + help="for a single monitor you can specify the specific groups as a pipe (|) delimited string", + default=None, + type=comma_list_or_empty, + ) + create_parser.set_defaults(func=cls._create) + + file_create_parser = verb_parsers.add_parser("fcreate", help="Create a SLO from file") + file_create_parser.add_argument("file", help="json file holding all details", type=argparse.FileType("r")) + file_create_parser.set_defaults(func=cls._file_create) + + update_parser = verb_parsers.add_parser("update", help="Update existing SLO") + update_parser.add_argument("slo_id", help="SLO to replace with the new definition") + update_parser.add_argument( + "--type", + help="type of the SLO (must specify it's original type)", + choices=["metric", "monitor"], + ) + update_parser.add_argument("--name", help="name of the SLO", default=None) + update_parser.add_argument("--description", help="description of the SLO", default=None) + update_parser.add_argument( + "--thresholds", + help="comma separated list of :[:[:[:]]", + ) + update_parser.add_argument( + "--tags", + help="comma-separated list of tags", + default=None, + type=comma_list_or_empty, + ) + update_parser.add_argument( + "--numerator", + help="numerator metric query (sum of good events)", + default=None, + ) + update_parser.add_argument( + "--denominator", + help="denominator metric query (sum of total events)", + default=None, + ) + update_parser.add_argument( + "--monitor_ids", + help="explicit monitor_ids to use (CSV)", + default=[], + type=list, + ) + update_parser.add_argument("--monitor_search", help="monitor search terms to use", default=None) + update_parser.add_argument( + "--groups", + help="for a single monitor you can specify the specific groups as a pipe (|) delimited string", + default=None, + ) + update_parser.set_defaults(func=cls._update) + + file_update_parser = verb_parsers.add_parser("fupdate", help="Update existing SLO from file") + file_update_parser.add_argument("file", help="json file holding all details", type=argparse.FileType("r")) + file_update_parser.set_defaults(func=cls._file_update) + + show_parser = verb_parsers.add_parser("show", help="Show a SLO definition") + show_parser.add_argument("slo_id", help="SLO to show") + show_parser.set_defaults(func=cls._show) + + show_all_parser = verb_parsers.add_parser("show_all", help="Show a list of all SLOs") + show_all_parser.add_argument("--query", help="string to filter SLOs by query (see UI or documentation)") + show_all_parser.add_argument( + "--slo_ids", + help="comma separated list indicating what SLO IDs to get at once", + type=comma_set, + ) + show_all_parser.add_argument("--offset", help="offset of query pagination", default=0) + show_all_parser.add_argument("--limit", help="limit of query pagination", default=100) + show_all_parser.set_defaults(func=cls._show_all) + + delete_parser = verb_parsers.add_parser("delete", help="Delete a SLO") + delete_parser.add_argument("slo_id", help="SLO to delete") + delete_parser.set_defaults(func=cls._delete) + + delete_many_parser = verb_parsers.add_parser("delete_many", help="Delete a SLO") + delete_many_parser.add_argument("slo_ids", help="comma separated list of SLO IDs to delete", type=comma_set) + delete_many_parser.set_defaults(func=cls._delete_many) + + delete_timeframe_parser = verb_parsers.add_parser("delete_many_timeframe", help="Delete a SLO timeframe") + delete_timeframe_parser.add_argument("slo_id", help="SLO ID to update") + delete_timeframe_parser.add_argument( + "timeframes", + help="CSV of timeframes to delete, e.g. 7d,30d,90d", + type=comma_set, + ) + delete_timeframe_parser.set_defaults(func=cls._delete_timeframe) + + can_delete_parser = verb_parsers.add_parser("can_delete", help="Check if can delete SLOs") + can_delete_parser.add_argument("slo_ids", help="comma separated list of SLO IDs to delete", type=comma_set) + can_delete_parser.set_defaults(func=cls._can_delete) + + history_parser = verb_parsers.add_parser("history", help="Get the SLO history") + history_parser.add_argument("slo_id", help="SLO to query the history") + history_parser.add_argument( + "from_ts", + type=parse_date_as_epoch_timestamp, + help="`from` date or timestamp", + ) + history_parser.add_argument( + "to_ts", + type=parse_date_as_epoch_timestamp, + help="`to` date or timestamp", + ) + history_parser.set_defaults(func=cls._history) + + @classmethod + def _create(cls, args): + api._timeout = args.timeout + format = args.format + + params = {"type": args.type, "name": args.name} + + if args.tags: + tags = sorted(set([t.strip() for t in args.tags.split(",") if t.strip()])) + params["tags"] = tags + + thresholds = [] + for threshold_str in args.thresholds.split(","): + parts = threshold_str.split(":") + timeframe = parts[0] + target = float(parts[1]) + + threshold = {"timeframe": timeframe, "target": target} + + if len(parts) > 2: + threshold["warning"] = float(parts[2]) + + if len(parts) > 3 and parts[3]: + threshold["target_display"] = parts[3] + + if len(parts) > 4 and parts[4]: + threshold["warning_display"] = parts[4] + + thresholds.append(threshold) + params["thresholds"] = thresholds + + if args.description: + params["description"] = args.description + + if args.type == "metric": + params["query"] = { + "numerator": args.numerator, + "denominator": args.denominator, + } + elif args.monitor_search: + params["monitor_search"] = args.monitor_search + else: + params["monitor_ids"] = list(args.monitor_ids) + if args.groups and len(args.monitor_ids) == 1: + groups = args.groups.split("|") + params["groups"] = groups + + if args.tags: + params["tags"] = args.tags + + res = api.ServiceLevelObjective.create(**params) + report_warnings(res) + report_errors(res) + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _file_create(cls, args): + api._timeout = args.timeout + format = args.format + slo = json.load(args.file) + res = api.ServiceLevelObjective.create(return_raw=True, **slo) + report_warnings(res) + report_errors(res) + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _update(cls, args): + api._timeout = args.timeout + format = args.format + + params = {"type": args.type} + + if args.thresholds: + thresholds = [] + for threshold_str in args.thresholds.split(","): + parts = threshold_str.split(":") + timeframe = parts[0] + target = parts[1] + + threshold = {"timeframe": timeframe, "target": target} + + if len(parts) > 2: + threshold["warning"] = float(parts[2]) + + if len(parts) > 3 and parts[3]: + threshold["target_display"] = parts[3] + + if len(parts) > 4 and parts[4]: + threshold["warning_display"] = parts[4] + + thresholds.append(threshold) + params["thresholds"] = thresholds + + if args.description: + params["description"] = args.description + + if args.type == "metric": + if args.numerator and args.denominator: + params["query"] = { + "numerator": args.numerator, + "denominator": args.denominator, + } + elif args.monitor_search: + params["monitor_search"] = args.monitor_search + else: + params["monitor_ids"] = args.monitor_ids + if args.groups and len(args.monitor_ids) == 1: + groups = args.groups.split("|") + params["groups"] = groups + + if args.tags: + tags = sorted(set([t.strip() for t in args.tags if t.strip()])) + params["tags"] = tags + res = api.ServiceLevelObjective.update(args.slo_id, return_raw=True, **params) + report_warnings(res) + report_errors(res) + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _file_update(cls, args): + api._timeout = args.timeout + format = args.format + slo = json.load(args.file) + + res = api.ServiceLevelObjective.update(slo["id"], return_raw=True, **slo) + report_warnings(res) + report_errors(res) + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _show(cls, args): + api._timeout = args.timeout + format = args.format + res = api.ServiceLevelObjective.get(args.slo_id, return_raw=True) + report_warnings(res) + report_errors(res) + + if args.string_ids: + res["id"] = str(res["id"]) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _show_all(cls, args): + api._timeout = args.timeout + format = args.format + + params = {"offset": args.offset, "limit": args.limit} + if args.query: + params["query"] = args.query + else: + params["ids"] = args.slo_ids + + res = api.ServiceLevelObjective.get_all(return_raw=True, **params) + report_warnings(res) + report_errors(res) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _delete(cls, args): + api._timeout = args.timeout + res = api.ServiceLevelObjective.delete(args.slo_id, return_raw=True) + if res is not None: + report_warnings(res) + report_errors(res) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _delete_many(cls, args): + api._timeout = args.timeout + res = api.ServiceLevelObjective.delete_many(args.slo_ids) + if res is not None: + report_warnings(res) + report_errors(res) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _delete_timeframe(cls, args): + api._timeout = args.timeout + + ops = {args.slo_id: args.timeframes} + + res = api.ServiceLevelObjective.bulk_delete(ops) + if res is not None: + report_warnings(res) + report_errors(res) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _can_delete(cls, args): + api._timeout = args.timeout + + res = api.ServiceLevelObjective.can_delete(args.slo_ids) + if res is not None: + report_warnings(res) + report_errors(res) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _history(cls, args): + api._timeout = args.timeout + + res = api.ServiceLevelObjective.history(args.slo_id) + if res is not None: + report_warnings(res) + report_errors(res) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _escape(cls, s): + return s.replace("\r", "\\r").replace("\n", "\\n").replace("\t", "\\t") diff --git a/datadog/dogshell/tag.py b/datadog/dogshell/tag.py index 1b78faec3..3d4d2b9e4 100644 --- a/datadog/dogshell/tag.py +++ b/datadog/dogshell/tag.py @@ -1,37 +1,47 @@ -from datadog.util.compat import json -from datadog.dogshell.common import report_errors, report_warnings +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# stdlib +import json + +# datadog from datadog import api +from datadog.dogshell.common import report_errors, report_warnings class TagClient(object): - @classmethod def setup_parser(cls, subparsers): - parser = subparsers.add_parser('tag', help="View and modify host tags.") - verb_parsers = parser.add_subparsers(title='Verbs') + parser = subparsers.add_parser("tag", help="View and modify host tags.") + verb_parsers = parser.add_subparsers(title="Verbs", dest="verb") + verb_parsers.required = True - add_parser = verb_parsers.add_parser('add', help="Add a host to one or more tags.", - description='Hosts can be specified by name or id.') - add_parser.add_argument('host', help="host to add") - add_parser.add_argument('tag', help="tag to add host to (one or more, space separated)", - nargs='+') + add_parser = verb_parsers.add_parser( + "add", help="Add a host to one or more tags.", description="Hosts can be specified by name or id." + ) + add_parser.add_argument("host", help="host to add") + add_parser.add_argument("tag", help="tag to add host to (one or more, space separated)", nargs="+") add_parser.set_defaults(func=cls._add) replace_parser = verb_parsers.add_parser( - 'replace', help="Replace all tags with one or more new tags.", - description='Hosts can be specified by name or id.') - replace_parser.add_argument('host', help="host to modify") - replace_parser.add_argument('tag', help="list of tags to add host to", nargs='+') + "replace", + help="Replace all tags with one or more new tags.", + description="Hosts can be specified by name or id.", + ) + replace_parser.add_argument("host", help="host to modify") + replace_parser.add_argument("tag", help="list of tags to add host to", nargs="+") replace_parser.set_defaults(func=cls._replace) - show_parser = verb_parsers.add_parser('show', help="Show host tags.", - description='Hosts can be specified by name or id.') - show_parser.add_argument('host', help="host to show (or 'all' to show all tags)") + show_parser = verb_parsers.add_parser( + "show", help="Show host tags.", description="Hosts can be specified by name or id." + ) + show_parser.add_argument("host", help="host to show (or 'all' to show all tags)") show_parser.set_defaults(func=cls._show) - detach_parser = verb_parsers.add_parser('detach', help="Remove a host from all tags.", - description='Hosts can be specified by name or id.') - detach_parser.add_argument('host', help="host to detach") + detach_parser = verb_parsers.add_parser( + "detach", help="Remove a host from all tags.", description="Hosts can be specified by name or id." + ) + detach_parser.add_argument("host", help="host to detach") detach_parser.set_defaults(func=cls._detach) @classmethod @@ -41,14 +51,14 @@ def _add(cls, args): res = api.Tag.create(args.host, tags=args.tag) report_warnings(res) report_errors(res) - if format == 'pretty': - print("Tags for '%s':" % res['host']) - for c in res['tags']: - print(' ' + c) - elif format == 'raw': + if format == "pretty": + print("Tags for '%s':" % res["host"]) + for c in res["tags"]: + print(" " + c) + elif format == "raw": print(json.dumps(res)) else: - for c in res['tags']: + for c in res["tags"]: print(c) @classmethod @@ -58,47 +68,47 @@ def _replace(cls, args): res = api.Tag.update(args.host, tags=args.tag) report_warnings(res) report_errors(res) - if format == 'pretty': - print("Tags for '%s':" % res['host']) - for c in res['tags']: - print(' ' + c) - elif format == 'raw': + if format == "pretty": + print("Tags for '%s':" % res["host"]) + for c in res["tags"]: + print(" " + c) + elif format == "raw": print(json.dumps(res)) else: - for c in res['tags']: + for c in res["tags"]: print(c) @classmethod def _show(cls, args): api._timeout = args.timeout format = args.format - if args.host == 'all': + if args.host == "all": res = api.Tag.get_all() else: res = api.Tag.get(args.host) report_warnings(res) report_errors(res) - if args.host == 'all': - if format == 'pretty': - for tag, hosts in list(res['tags'].items()): + if args.host == "all": + if format == "pretty": + for tag, hosts in list(res["tags"].items()): for host in hosts: print(tag) - print(' ' + host) + print(" " + host) print() - elif format == 'raw': + elif format == "raw": print(json.dumps(res)) else: - for tag, hosts in list(res['tags'].items()): + for tag, hosts in list(res["tags"].items()): for host in hosts: - print(tag + '\t' + host) + print(tag + "\t" + host) else: - if format == 'pretty': - for tag in res['tags']: + if format == "pretty": + for tag in res["tags"]: print(tag) - elif format == 'raw': + elif format == "raw": print(json.dumps(res)) else: - for tag in res['tags']: + for tag in res["tags"]: print(tag) @classmethod diff --git a/datadog/dogshell/timeboard.py b/datadog/dogshell/timeboard.py index 596c3589e..477a1b669 100644 --- a/datadog/dogshell/timeboard.py +++ b/datadog/dogshell/timeboard.py @@ -1,98 +1,122 @@ -import argparse +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# stdlib +import json import os.path import platform import sys import webbrowser -from datetime import datetime -from datadog.util.compat import json -from datadog.dogshell.common import report_errors, report_warnings, print_err +# 3p +import argparse + +# datadog from datadog import api +from datadog.dogshell.common import report_errors, report_warnings, print_err +from datadog.util.format import pretty_json +from datetime import datetime class TimeboardClient(object): - @classmethod def setup_parser(cls, subparsers): - parser = subparsers.add_parser('timeboard', help="Create, edit, and delete timeboards") - parser.add_argument('--string_ids', action='store_true', dest='string_ids', - help="Represent timeboard IDs as strings instead of ints in JSON") - - verb_parsers = parser.add_subparsers(title='Verbs') - - post_parser = verb_parsers.add_parser('post', help="Create timeboards") - post_parser.add_argument('title', help="title for the new timeboard") - post_parser.add_argument('description', help="short description of the timeboard") - post_parser.add_argument('graphs', help="graph definitions as a JSON string. if unset," - " reads from stdin.", nargs="?") - post_parser.add_argument('--template_variables', type=_template_variables, default=[], - help="a json list of template variable dicts, e.g. " - "[{'name': 'host', 'prefix': 'host', " - "'default': 'host:my-host'}]\'") + parser = subparsers.add_parser("timeboard", help="Create, edit, and delete timeboards") + parser.add_argument( + "--string_ids", + action="store_true", + dest="string_ids", + help="Represent timeboard IDs as strings instead of ints in JSON", + ) + + verb_parsers = parser.add_subparsers(title="Verbs", dest="verb") + verb_parsers.required = True + + post_parser = verb_parsers.add_parser("post", help="Create timeboards") + post_parser.add_argument("title", help="title for the new timeboard") + post_parser.add_argument("description", help="short description of the timeboard") + post_parser.add_argument( + "graphs", help="graph definitions as a JSON string. if unset," " reads from stdin.", nargs="?" + ) + post_parser.add_argument( + "--template_variables", + type=_template_variables, + default=[], + help="a json list of template variable dicts, e.g. " + "[{'name': 'host', 'prefix': 'host', " + "'default': 'host:my-host'}]'", + ) post_parser.set_defaults(func=cls._post) - update_parser = verb_parsers.add_parser('update', help="Update existing timeboards") - update_parser.add_argument('timeboard_id', help="timeboard to replace" - " with the new definition") - update_parser.add_argument('title', help="new title for the timeboard") - update_parser.add_argument('description', help="short description of the timeboard") - update_parser.add_argument('graphs', help="graph definitions as a JSON string." - " if unset, reads from stdin", nargs="?") - update_parser.add_argument('--template_variables', type=_template_variables, default=[], - help="a json list of template variable dicts, e.g. " - "[{'name': 'host', 'prefix': 'host', " - "'default': 'host:my-host'}]\'") + update_parser = verb_parsers.add_parser("update", help="Update existing timeboards") + update_parser.add_argument("timeboard_id", help="timeboard to replace" " with the new definition") + update_parser.add_argument("title", help="new title for the timeboard") + update_parser.add_argument("description", help="short description of the timeboard") + update_parser.add_argument( + "graphs", help="graph definitions as a JSON string." " if unset, reads from stdin", nargs="?" + ) + update_parser.add_argument( + "--template_variables", + type=_template_variables, + default=[], + help="a json list of template variable dicts, e.g. " + "[{'name': 'host', 'prefix': 'host', " + "'default': 'host:my-host'}]'", + ) update_parser.set_defaults(func=cls._update) - show_parser = verb_parsers.add_parser('show', help="Show a timeboard definition") - show_parser.add_argument('timeboard_id', help="timeboard to show") + show_parser = verb_parsers.add_parser("show", help="Show a timeboard definition") + show_parser.add_argument("timeboard_id", help="timeboard to show") show_parser.set_defaults(func=cls._show) - show_all_parser = verb_parsers.add_parser('show_all', help="Show a list of all timeboards") + show_all_parser = verb_parsers.add_parser("show_all", help="Show a list of all timeboards") show_all_parser.set_defaults(func=cls._show_all) - pull_parser = verb_parsers.add_parser('pull', help="Pull a timeboard on the server" - " into a local file") - pull_parser.add_argument('timeboard_id', help="ID of timeboard to pull") - pull_parser.add_argument('filename', help="file to pull timeboard into") + pull_parser = verb_parsers.add_parser("pull", help="Pull a timeboard on the server" " into a local file") + pull_parser.add_argument("timeboard_id", help="ID of timeboard to pull") + pull_parser.add_argument("filename", help="file to pull timeboard into") pull_parser.set_defaults(func=cls._pull) - pull_all_parser = verb_parsers.add_parser('pull_all', help="Pull all timeboards" - " into files in a directory") - pull_all_parser.add_argument('pull_dir', help="directory to pull timeboards into") + pull_all_parser = verb_parsers.add_parser("pull_all", help="Pull all timeboards" " into files in a directory") + pull_all_parser.add_argument("pull_dir", help="directory to pull timeboards into") pull_all_parser.set_defaults(func=cls._pull_all) - push_parser = verb_parsers.add_parser('push', help="Push updates to timeboards" - " from local files to the server") - push_parser.add_argument('--append_auto_text', action='store_true', dest='append_auto_text', - help="When pushing to the server, appends filename" - " and timestamp to the end of the timeboard description") - push_parser.add_argument('file', help="timeboard files to push to the server", - nargs='+', type=argparse.FileType('r')) + push_parser = verb_parsers.add_parser( + "push", help="Push updates to timeboards" " from local files to the server" + ) + push_parser.add_argument( + "--append_auto_text", + action="store_true", + dest="append_auto_text", + help="When pushing to the server, appends filename" + " and timestamp to the end of the timeboard description", + ) + push_parser.add_argument( + "file", help="timeboard files to push to the server", nargs="+", type=argparse.FileType("r") + ) push_parser.set_defaults(func=cls._push) - new_file_parser = verb_parsers.add_parser('new_file', help="Create a new timeboard" - " and put its contents in a file") - new_file_parser.add_argument('filename', help="name of file to create with empty timeboard") - new_file_parser.add_argument('graphs', help="graph definitions as a JSON string." - " if unset, reads from stdin.", nargs="?") + new_file_parser = verb_parsers.add_parser( + "new_file", help="Create a new timeboard" " and put its contents in a file" + ) + new_file_parser.add_argument("filename", help="name of file to create with empty timeboard") + new_file_parser.add_argument( + "graphs", help="graph definitions as a JSON string." " if unset, reads from stdin.", nargs="?" + ) new_file_parser.set_defaults(func=cls._new_file) - web_view_parser = verb_parsers.add_parser('web_view', - help="View the timeboard in a web browser") - web_view_parser.add_argument('file', help="timeboard file", type=argparse.FileType('r')) + web_view_parser = verb_parsers.add_parser("web_view", help="View the timeboard in a web browser") + web_view_parser.add_argument("file", help="timeboard file", type=argparse.FileType("r")) web_view_parser.set_defaults(func=cls._web_view) - delete_parser = verb_parsers.add_parser('delete', help="Delete timeboards") - delete_parser.add_argument('timeboard_id', help="timeboard to delete") + delete_parser = verb_parsers.add_parser("delete", help="Delete timeboards") + delete_parser.add_argument("timeboard_id", help="timeboard to delete") delete_parser.set_defaults(func=cls._delete) @classmethod def _pull(cls, args): - cls._write_dash_to_file( - args.timeboard_id, args.filename, - args.timeout, args.format, args.string_ids) + cls._write_dash_to_file(args.timeboard_id, args.filename, args.timeout, args.format, args.string_ids) @classmethod def _pull_all(cls, args): @@ -100,12 +124,12 @@ def _pull_all(cls, args): def _title_to_filename(title): # Get a lowercased version with most punctuation stripped out... - no_punct = ''.join([c for c in title.lower() if c.isalnum() or c in [" ", "_", "-"]]) + no_punct = "".join([c for c in title.lower() if c.isalnum() or c in [" ", "_", "-"]]) # Now replace all -'s, _'s and spaces with "_", and strip trailing _ return no_punct.replace(" ", "_").replace("-", "_").strip("_") format = args.format - res = api.Timeboard.get_all()() + res = api.Timeboard.get_all() report_warnings(res) report_errors(res) @@ -113,18 +137,23 @@ def _title_to_filename(title): os.mkdir(args.pull_dir, 0o755) used_filenames = set() - for dash_summary in res['dashes']: - filename = _title_to_filename(dash_summary['title']) + for dash_summary in res["dashes"]: + filename = _title_to_filename(dash_summary["title"]) if filename in used_filenames: - filename = filename + "-" + dash_summary['id'] + filename = filename + "-" + dash_summary["id"] used_filenames.add(filename) cls._write_dash_to_file( - dash_summary['id'], os.path.join(args.pull_dir, filename + ".json"), - args.timeout, format, args.string_ids) - if format == 'pretty': - print(("\n### Total: {0} dashboards to {1} ###" - .format(len(used_filenames), os.path.realpath(args.pull_dir)))) + dash_summary["id"], + os.path.join(args.pull_dir, filename + ".json"), + args.timeout, + format, + args.string_ids, + ) + if format == "pretty": + print( + ("\n### Total: {0} dashboards to {1} ###".format(len(used_filenames), os.path.realpath(args.pull_dir))) + ) @classmethod def _new_file(cls, args): @@ -133,27 +162,23 @@ def _new_file(cls, args): graphs = args.graphs if args.graphs is None: graphs = sys.stdin.read() - try: - graphs = json.loads(graphs) - except: - raise Exception('bad json parameter') + graphs = json.loads(graphs) res = api.Timeboard.create( - title=args.filename, - description="Description for {0}".format(args.filename), - graphs=[graphs]) + title=args.filename, description="Description for {0}".format(args.filename), graphs=[graphs] + ) + report_warnings(res) report_errors(res) - cls._write_dash_to_file(res['dash']['id'], args.filename, - args.timeout, format, args.string_ids) + cls._write_dash_to_file(res["dash"]["id"], args.filename, args.timeout, format, args.string_ids) - if format == 'pretty': - print(cls._pretty_json(res)) + if format == "pretty": + print(pretty_json(res)) else: print(json.dumps(res)) @classmethod - def _write_dash_to_file(cls, dash_id, filename, timeout, format='raw', string_ids=False): + def _write_dash_to_file(cls, dash_id, filename, timeout, format="raw", string_ids=False): with open(filename, "w") as f: res = api.Timeboard.get(dash_id) report_warnings(res) @@ -168,12 +193,15 @@ def _write_dash_to_file(cls, dash_id, filename, timeout, format='raw', string_id if string_ids: dash_obj["id"] = str(dash_obj["id"]) + if not dash_obj.get("template_variables"): + dash_obj.pop("template_variables", None) + json.dump(dash_obj, f, indent=2) - if format == 'pretty': - print("Downloaded dashboard {0} to file {1}".format(dash_id, filename)) + if format == "pretty": + print(u"Downloaded dashboard {0} to file {1}".format(dash_id, filename)) else: - print("{0} {1}".format(dash_id, filename)) + print(u"{0} {1}".format(dash_id, filename)) @classmethod def _push(cls, args): @@ -185,36 +213,43 @@ def _push(cls, args): raise Exception("Could not parse {0}: {1}".format(f.name, err)) if args.append_auto_text: - datetime_str = datetime.now().strftime('%x %X') - auto_text = ("
\nUpdated at {0} from {1} ({2}) on {3}" - .format(datetime_str, f.name, dash_obj["id"], platform.node())) + datetime_str = datetime.now().strftime("%x %X") + auto_text = "
\nUpdated at {0} from {1} ({2}) on {3}".format( + datetime_str, f.name, dash_obj["id"], platform.node() + ) dash_obj["description"] += auto_text tpl_vars = dash_obj.get("template_variables", []) - if 'id' in dash_obj: + if "id" in dash_obj: # Always convert to int, in case it was originally a string. dash_obj["id"] = int(dash_obj["id"]) - res = api.Timeboard.update(dash_obj["id"], title=dash_obj["title"], - description=dash_obj["description"], - graphs=dash_obj["graphs"], template_variables=tpl_vars) + res = api.Timeboard.update( + dash_obj["id"], + title=dash_obj["title"], + description=dash_obj["description"], + graphs=dash_obj["graphs"], + template_variables=tpl_vars, + ) else: - res = api.Timeboard.create(title=dash_obj["title"], - description=dash_obj["description"], - graphs=dash_obj["graphs"], template_variables=tpl_vars) + res = api.Timeboard.create( + title=dash_obj["title"], + description=dash_obj["description"], + graphs=dash_obj["graphs"], + template_variables=tpl_vars, + ) - if 'errors' in res: - print_err('Upload of dashboard {0} from file {1} failed.' - .format(dash_obj["id"], f.name)) + if "errors" in res: + print_err("Upload of dashboard {0} from file {1} failed.".format(dash_obj["id"], f.name)) report_warnings(res) report_errors(res) - if format == 'pretty': - print(cls._pretty_json(res)) + if format == "pretty": + print(pretty_json(res)) else: print(json.dumps(res)) - if args.format == 'pretty': + if args.format == "pretty": print("Uploaded file {0} (dashboard {1})".format(f.name, dash_obj["id"])) @classmethod @@ -224,16 +259,14 @@ def _post(cls, args): graphs = args.graphs if args.graphs is None: graphs = sys.stdin.read() - try: - graphs = json.loads(graphs) - except: - raise Exception('bad json parameter') - res = api.Timeboard.create(title=args.title, description=args.description, graphs=[graphs], - template_variables=args.template_variables) + graphs = json.loads(graphs) + res = api.Timeboard.create( + title=args.title, description=args.description, graphs=[graphs], template_variables=args.template_variables + ) report_warnings(res) report_errors(res) - if format == 'pretty': - print(cls._pretty_json(res)) + if format == "pretty": + print(pretty_json(res)) else: print(json.dumps(res)) @@ -244,18 +277,19 @@ def _update(cls, args): graphs = args.graphs if args.graphs is None: graphs = sys.stdin.read() - try: - graphs = json.loads(graphs) - except: - raise Exception('bad json parameter') - - res = api.Timeboard.update(args.timeboard_id, title=args.title, - description=args.description, graphs=graphs, - template_variables=args.template_variables) + graphs = json.loads(graphs) + + res = api.Timeboard.update( + args.timeboard_id, + title=args.title, + description=args.description, + graphs=graphs, + template_variables=args.template_variables, + ) report_warnings(res) report_errors(res) - if format == 'pretty': - print(cls._pretty_json(res)) + if format == "pretty": + print(pretty_json(res)) else: print(json.dumps(res)) @@ -270,8 +304,8 @@ def _show(cls, args): if args.string_ids: res["dash"]["id"] = str(res["dash"]["id"]) - if format == 'pretty': - print(cls._pretty_json(res)) + if format == "pretty": + print(pretty_json(res)) else: print(json.dumps(res)) @@ -287,16 +321,13 @@ def _show_all(cls, args): for d in res["dashes"]: d["id"] = str(d["id"]) - if format == 'pretty': - print(cls._pretty_json(res)) - elif format == 'raw': + if format == "pretty": + print(pretty_json(res)) + elif format == "raw": print(json.dumps(res)) else: for d in res["dashes"]: - print("\t".join([(d["id"]), - (d["resource"]), - (d["title"]), - cls._escape(d["description"])])) + print("\t".join([(d["id"]), (d["resource"]), (d["title"]), cls._escape(d["description"])])) @classmethod def _delete(cls, args): @@ -308,24 +339,20 @@ def _delete(cls, args): @classmethod def _web_view(cls, args): - dash_id = json.load(args.file)['id'] + dash_id = json.load(args.file)["id"] url = api._api_host + "/dash/dash/{0}".format(dash_id) webbrowser.open(url) @classmethod def _escape(cls, s): - return s.replace("\r", "\\r").replace("\n", "\\n").replace("\t", "\\t") - - @classmethod - def _pretty_json(cls, obj): - return json.dumps(obj, sort_keys=True, indent=2) + return s.replace("\r", "\\r").replace("\n", "\\n").replace("\t", "\\t") if s else "" def _template_variables(tpl_var_input): - if '[' not in tpl_var_input: - return [v.strip() for v in tpl_var_input.split(',')] + if "[" not in tpl_var_input: + return [v.strip() for v in tpl_var_input.split(",")] else: try: return json.loads(tpl_var_input) except Exception: - raise argparse.ArgumentTypeError('bad template_variable json parameter') + raise argparse.ArgumentTypeError("bad template_variable json parameter") diff --git a/datadog/dogshell/wrap.py b/datadog/dogshell/wrap.py index 31f37517e..25df6d961 100644 --- a/datadog/dogshell/wrap.py +++ b/datadog/dogshell/wrap.py @@ -1,4 +1,7 @@ -''' +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +""" Wraps shell commands and sends the result to Datadog as events. Ex: @@ -16,20 +19,29 @@ dogwrap -n test-job -k $API_KEY --timeout=1 "sleep 3" -''' +""" +# stdlib +from __future__ import print_function -import sys -import time +import os +from copy import copy import optparse -import threading import subprocess -import pkg_resources as pkg +import sys +import threading +import time +import warnings + +# datadog +from datadog import initialize, api, __version__ +from datadog.util.compat import is_p3k -from datadog import initialize, api +SUCCESS = "success" +ERROR = "error" +WARNING = "warning" -SUCCESS = 'success' -ERROR = 'error' +MAX_EVENT_BODY_LENGTH = 3000 class Timeout(Exception): @@ -37,48 +49,48 @@ class Timeout(Exception): class OutputReader(threading.Thread): - ''' + """ Thread collecting the output of a subprocess, optionally forwarding it to a given file descriptor and storing it for further retrieval. - ''' + """ + def __init__(self, proc_out, fwd_out=None): - ''' + """ Instantiates an OutputReader. :param proc_out: the output to read :type proc_out: file descriptor :param fwd_out: the output to forward to (None to disable forwarding) :type fwd_out: file descriptor or None - ''' + """ threading.Thread.__init__(self) self.daemon = True - self._out_content = '' + self._out_content = b"" self._out = proc_out self._fwd_out = fwd_out def run(self): - ''' + """ Thread's main loop: collects the output optionnally forwarding it to the file descriptor passed in the constructor. - ''' - for line in iter(self._out.readline, b''): + """ + for line in iter(self._out.readline, b""): if self._fwd_out is not None: self._fwd_out.write(line) - self._out_content += line self._out.close() @property def content(self): - ''' + """ The content stored in out so far. (Not threadsafe, wait with .join()) - ''' + """ return self._out_content def poll_proc(proc, sleep_interval, timeout): - ''' + """ Polls the process until it returns or a given timeout has been reached - ''' + """ start_time = time.time() returncode = None while returncode is None: @@ -90,173 +102,419 @@ def poll_proc(proc, sleep_interval, timeout): return returncode -def execute(cmd, cmd_timeout, sigterm_timeout, sigkill_timeout, - proc_poll_interval, buffer_outs): - ''' +def execute(cmd, cmd_timeout, sigterm_timeout, sigkill_timeout, proc_poll_interval, buffer_outs): + """ Launches the process and monitors its outputs - ''' + """ start_time = time.time() returncode = -1 - stdout = '' - stderr = '' + stdout = b"" + stderr = b"" try: - proc = subprocess.Popen(u' '.join(cmd), stdout=subprocess.PIPE, - stderr=subprocess.PIPE, shell=True) + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) except Exception: - print >> sys.stderr, u"Failed to execute %s" % (repr(cmd)) + print(u"Failed to execute %s" % (repr(cmd)), file=sys.stderr) raise try: # Let's that the threads collecting the output from the command in the # background - out_reader = OutputReader(proc.stdout, sys.stdout if not buffer_outs else None) - err_reader = OutputReader(proc.stderr, sys.stderr if not buffer_outs else None) + stdout_buffer = sys.stdout.buffer if is_p3k() else sys.stdout + stderr_buffer = sys.stderr.buffer if is_p3k() else sys.stderr + out_reader = OutputReader(proc.stdout, stdout_buffer if not buffer_outs else None) + err_reader = OutputReader(proc.stderr, stderr_buffer if not buffer_outs else None) out_reader.start() err_reader.start() - # Let's quietly wait from the program's completion here et get the exit + # Let's quietly wait from the program's completion here to get the exit # code when it finishes returncode = poll_proc(proc, proc_poll_interval, cmd_timeout) - - # Let's harvest the outputs collected by our background threads after - # making sure they're done reading it. - out_reader.join() - err_reader.join() - stdout = out_reader.content - stderr = err_reader.content - - duration = time.time() - start_time except Timeout: - duration = time.time() - start_time + returncode = Timeout + sigterm_start = time.time() + print("Command timed out after %.2fs, killing with SIGTERM" % (time.time() - start_time), file=sys.stderr) try: proc.terminate() - sigterm_start = time.time() try: - print >> sys.stderr, "Command timed out after %.2fs, killing with SIGTERM" \ - % (time.time() - start_time) poll_proc(proc, proc_poll_interval, sigterm_timeout) - returncode = Timeout except Timeout: - print >> sys.stderr, "SIGTERM timeout failed after %.2fs, killing with SIGKILL" \ - % (time.time() - sigterm_start) + print( + "SIGTERM timeout failed after %.2fs, killing with SIGKILL" % (time.time() - sigterm_start), + file=sys.stderr, + ) + sigkill_start = time.time() proc.kill() - poll_proc(proc, proc_poll_interval, sigkill_timeout) - returncode = Timeout + try: + poll_proc(proc, proc_poll_interval, sigkill_timeout) + except Timeout: + print( + "SIGKILL timeout failed after %.2fs, exiting" % (time.time() - sigkill_start), file=sys.stderr + ) except OSError as e: # Ignore OSError 3: no process found. if e.errno != 3: raise + + # Let's harvest the outputs collected by our background threads + # after making sure they're done reading it. + out_reader.join() + err_reader.join() + stdout = out_reader.content + stderr = err_reader.content + + duration = time.time() - start_time + return returncode, stdout, stderr, duration -def main(): - parser = optparse.OptionParser(usage="%prog -n [event_name] -k [api_key] --submit_mode i\ -[ all | errors ] [options] \"command\". \n\nNote that you need to enclose your command in \ -quotes to prevent python as soon as there is a space in your command. \n \nNOTICE: In normal \ -mode, the whole stderr is printed before stdout, in flush_live mode they will be mixed but there \ -is not guarantee that messages sent by the command on both stderr and stdout are printed in the \ -order they were sent.", version="%prog {0}".format(pkg.require("datadog")[0].version)) - - parser.add_option('-n', '--name', action='store', type='string', help="the name of the event \ -as it should appear on your Datadog stream") - parser.add_option('-k', '--api_key', action='store', type='string', - help="your DataDog API Key") - parser.add_option('-m', '--submit_mode', action='store', type='choice', - default='errors', choices=['errors', 'all'], help="[ all | errors ] if set \ +def trim_text(text, max_len): + """ + Trim input text to fit the `max_len` condition. + + If trim is needed: keep the first 1/3rd of the budget on the top, + and the other 2 thirds on the bottom. + """ + if len(text) <= max_len: + return text + + trimmed_text = ( + u"{top_third}\n" + u"```\n" + u"*...trimmed...*\n" + u"```\n" + u"{bottom_two_third}\n".format( + top_third=text[: max_len // 3], bottom_two_third=text[len(text) - (2 * max_len) // 3 :] + ) + ) + + return trimmed_text + + +def build_event_body(cmd, returncode, stdout, stderr, notifications): + """ + Format and return an event body. + + Note: do not exceed MAX_EVENT_BODY_LENGTH length. + """ + fmt_stdout = u"" + fmt_stderr = u"" + fmt_notifications = u"" + + max_length = MAX_EVENT_BODY_LENGTH // 2 if stdout and stderr else MAX_EVENT_BODY_LENGTH + + if stdout: + fmt_stdout = u"**>>>> STDOUT <<<<**\n```\n{stdout} \n```\n".format( + stdout=trim_text(stdout.decode("utf-8", "replace"), max_length) + ) + + if stderr: + fmt_stderr = u"**>>>> STDERR <<<<**\n```\n{stderr} \n```\n".format( + stderr=trim_text(stderr.decode("utf-8", "replace"), max_length) + ) + + if notifications: + notifications = notifications.decode("utf-8", "replace") if isinstance(notifications, bytes) else notifications + fmt_notifications = u"**>>>> NOTIFICATIONS <<<<**\n\n {notifications}\n".format(notifications=notifications) + + return ( + u"%%%\n" + u"**>>>> CMD <<<<**\n```\n{command} \n```\n" + u"**>>>> EXIT CODE <<<<**\n\n {returncode}\n\n\n" + u"{stdout}" + u"{stderr}" + u"{notifications}" + u"%%%\n".format( + command=cmd, + returncode=returncode, + stdout=fmt_stdout, + stderr=fmt_stderr, + notifications=fmt_notifications, + ) + ) + + +def generate_warning_codes(option, opt, options_warning): + try: + # options_warning is a string e.g.: --warning_codes 123,456,789 + # we need to create a list from it + warning_codes = options_warning.split(",") + return warning_codes + except ValueError: + raise optparse.OptionValueError("option %s: invalid warning codes value(s): %r" % (opt, options_warning)) + + +class DogwrapOption(optparse.Option): + # https://docs.python.org/3.7/library/optparse.html#adding-new-types + TYPES = optparse.Option.TYPES + ("warning_codes",) + TYPE_CHECKER = copy(optparse.Option.TYPE_CHECKER) + TYPE_CHECKER["warning_codes"] = generate_warning_codes + + +def parse_options(raw_args=None): + """ + Parse the raw command line options into an options object and the remaining command string + """ + parser = optparse.OptionParser( + usage='%prog -n [event_name] -k [api_key] --submit_mode \ +[ all | errors | warnings] [options] "command". \n\nNote that you need to enclose your command in \ +quotes to prevent python executing as soon as there is a space in your command. \n \nNOTICE: In \ +normal mode, the whole stderr is printed before stdout, in flush_live mode they will be mixed but \ +there is not guarantee that messages sent by the command on both stderr and stdout are printed in \ +the order they were sent.', + version="%prog {0}".format(__version__), + option_class=DogwrapOption, + ) + + parser.add_option( + "-n", + "--name", + action="store", + type="string", + help="the name of the event \ +as it should appear on your Datadog stream", + ) + parser.add_option( + "-k", + "--api_key", + action="store", + type="string", + help="your DataDog API Key", + default=os.environ.get("DD_API_KEY"), + ) + parser.add_option( + "-s", + "--site", + action="store", + type="string", + default="datadoghq.com", + help="The site to send data. Accepts us (datadoghq.com), eu (datadoghq.eu), \ +us3 (us3.datadoghq.com), us5 (us5.datadoghq.com), or ap1 (ap1.datadoghq.com), \ +gov (ddog-gov.com), or custom url. default: us", + ) + parser.add_option( + "-m", + "--submit_mode", + action="store", + type="choice", + default="errors", + choices=["errors", "warnings", "all"], + help="[ all | errors | warnings ] if set \ to error, an event will be sent only of the command exits with a non zero exit status or if it \ -times out.") - parser.add_option('-p', '--priority', action='store', type='choice', choices=['normal', 'low'], - help="the priority of the event (default: 'normal')") - parser.add_option('-t', '--timeout', action='store', type='int', default=60 * 60 * 24, - help="(in seconds) a timeout after which your command must be aborted. An \ -event will be sent to your DataDog stream (default: 24hours)") - parser.add_option('--sigterm_timeout', action='store', type='int', default=60 * 2, - help="(in seconds) When your command times out, the \ +times out. If set to warning, a list of exit codes need to be provided", + ) + parser.add_option( + "--warning_codes", + action="store", + type="warning_codes", + dest="warning_codes", + help="comma separated list of warning codes, e.g: 127,255", + ) + parser.add_option( + "-p", + "--priority", + action="store", + type="choice", + choices=["normal", "low"], + help="the priority of the event (default: 'normal')", + ) + parser.add_option( + "-t", + "--timeout", + action="store", + type="int", + default=60 * 60 * 24, + help="(in seconds) a timeout after which your command must be aborted. An \ +event will be sent to your DataDog stream (default: 24hours)", + ) + parser.add_option( + "--sigterm_timeout", + action="store", + type="int", + default=60 * 2, + help="(in seconds) When your command times out, the \ process it triggers is sent a SIGTERM. If this sigterm_timeout is reached, it will be sent a \ -SIGKILL signal. (default: 2m)") - parser.add_option('--sigkill_timeout', action='store', type='int', default=60, - help="(in seconds) how long to wait at most after SIGKILL \ - has been sent (default: 60s)") - parser.add_option('--proc_poll_interval', action='store', type='float', default=0.5, - help="(in seconds). interval at which your command will be polled \ -(default: 500ms)") - parser.add_option('--notify_success', action='store', type='string', default='', - help="a message string and @people directives to send notifications in \ -case of success.") - parser.add_option('--notify_error', action='store', type='string', default='', - help="a message string and @people directives to send notifications in \ -case of error.") - parser.add_option('-b', '--buffer_outs', action='store_true', dest='buffer_outs', default=False, - help="displays the stderr and stdout of the command only once it has \ -returned (the command outputs remains buffered in dogwrap meanwhile)") - - options, args = parser.parse_args() - - cmd = [] - for part in args: - cmd.extend(part.split(' ')) +SIGKILL signal. (default: 2m)", + ) + parser.add_option( + "--sigkill_timeout", + action="store", + type="int", + default=60, + help="(in seconds) how long to wait at most after SIGKILL \ + has been sent (default: 60s)", + ) + parser.add_option( + "--proc_poll_interval", + action="store", + type="float", + default=0.5, + help="(in seconds). interval at which your command will be polled \ +(default: 500ms)", + ) + parser.add_option( + "--notify_success", + action="store", + type="string", + default="", + help="a message string and @people directives to send notifications in \ +case of success.", + ) + parser.add_option( + "--notify_error", + action="store", + type="string", + default="", + help="a message string and @people directives to send notifications in \ +case of error.", + ) + parser.add_option( + "--notify_warning", + action="store", + type="string", + default="", + help="a message string and @people directives to send notifications in \ + case of warning.", + ) + parser.add_option( + "-b", + "--buffer_outs", + action="store_true", + dest="buffer_outs", + default=False, + help="displays the stderr and stdout of the command only once it has \ +returned (the command outputs remains buffered in dogwrap meanwhile)", + ) + parser.add_option( + "--send_metric", + action="store_true", + dest="send_metric", + default=False, + help="sends a metric for event duration", + ) + parser.add_option( + "--tags", action="store", type="string", dest="tags", default="", help="comma separated list of tags" + ) + + options, args = parser.parse_args(args=raw_args) + + if is_p3k(): + cmd = " ".join(args) + else: + cmd = b" ".join(args).decode("utf-8") + + return options, cmd + + +def main(): + options, cmd = parse_options() + # If silent is checked we force the outputs to be buffered (and therefore # not forwarded to the Terminal streams) and we just avoid printing the # buffers at the end returncode, stdout, stderr, duration = execute( - cmd, options.timeout, - options.sigterm_timeout, options.sigkill_timeout, - options.proc_poll_interval, options.buffer_outs) + cmd, + options.timeout, + options.sigterm_timeout, + options.sigkill_timeout, + options.proc_poll_interval, + options.buffer_outs, + ) + + if options.site in ("datadoghq.com", "us"): + api_host = "https://api.datadoghq.com" + elif options.site in ("datadoghq.eu", "eu"): + api_host = "https://api.datadoghq.eu" + elif options.site in ("us3.datadoghq.com", "us3"): + api_host = "https://api.us3.datadoghq.com" + elif options.site in ("us5.datadoghq.com", "us5"): + api_host = "https://api.us5.datadoghq.com" + elif options.site in ("ap1.datadoghq.com", "ap1"): + api_host = "https://api.ap1.datadoghq.com" + elif options.site in ("ddog-gov.com", "gov"): + api_host = "https://api.ddog-gov.com" + else: + api_host = options.site - initialize(api_key=options.api_key) + initialize(api_key=options.api_key, api_host=api_host) host = api._host_name + warning_codes = None + + if options.warning_codes: + # Convert warning codes from string to int since return codes will evaluate the latter + warning_codes = list(map(int, options.warning_codes)) + if returncode == 0: alert_type = SUCCESS - event_priority = 'low' - event_title = u'[%s] %s succeeded in %.2fs' % (host, options.name, - duration) + event_priority = "low" + event_title = u"[%s] %s succeeded in %.2fs" % (host, options.name, duration) + elif returncode != 0 and options.submit_mode == "warnings": + if not warning_codes: + # the list of warning codes is empty - the option was not specified + print("A comma separated list of exit codes need to be provided") + sys.exit() + elif returncode in warning_codes: + alert_type = WARNING + event_priority = "normal" + event_title = u"[%s] %s failed in %.2fs" % (host, options.name, duration) + else: + print("Command exited with a different exit code that the one(s) provided") + sys.exit() else: alert_type = ERROR - event_priority = 'normal' + event_priority = "normal" if returncode is Timeout: - event_title = u'[%s] %s timed out after %.2fs' % (host, options.name, duration) + event_title = u"[%s] %s timed out after %.2fs" % (host, options.name, duration) returncode = -1 else: - event_title = u'[%s] %s failed in %.2fs' % (host, options.name, duration) - - event_body = [u'%%%\n', - u'commmand:\n```\n', u' '.join(cmd), u'\n```\n', - u'exit code: %s\n\n' % returncode, ] - - if stdout: - event_body.extend([u'stdout:\n```\n', stdout, u'\n```\n']) - if stderr: - event_body.extend([u'stderr:\n```\n', stderr, u'\n```\n']) + event_title = u"[%s] %s failed in %.2fs" % (host, options.name, duration) notifications = "" + if alert_type == SUCCESS and options.notify_success: notifications = options.notify_success elif alert_type == ERROR and options.notify_error: notifications = options.notify_error + elif alert_type == WARNING and options.notify_warning: + notifications = options.notify_warning - if notifications: - event_body.extend([u'notifications: %s\n' % (notifications)]) + if options.tags: + tags = [t.strip() for t in options.tags.split(",")] + else: + tags = None + + event_body = build_event_body(cmd, returncode, stdout, stderr, notifications) - event_body.append(u'%%%\n') - # ensure all strings are parsed as utf-8 - event_body = [x.decode('utf-8') for x in event_body] - event_body = u''.join(event_body) event = { - 'alert_type': alert_type, - 'aggregation_key': options.name, - 'host': host, - 'priority': options.priority or event_priority, + "alert_type": alert_type, + "aggregation_key": options.name, + "host": host, + "priority": options.priority or event_priority, + "tags": tags, } if options.buffer_outs: - print >> sys.stderr, stderr.strip() - print >> sys.stdout, stdout.strip() - - if options.submit_mode == 'all' or returncode != 0: + if is_p3k(): + stderr = stderr.decode("utf-8") + stdout = stdout.decode("utf-8") + + print(stderr.strip(), file=sys.stderr) + print(stdout.strip(), file=sys.stdout) + + if options.submit_mode == "all" or returncode != 0: + if options.send_metric: + event_name_tag = "event_name:{}".format(options.name) + if tags: + duration_tags = tags + [event_name_tag] + else: + duration_tags = [event_name_tag] + api.Metric.send(metric="dogwrap.duration", points=duration, tags=duration_tags, type="gauge") api.Event.create(title=event_title, text=event_body, **event) sys.exit(returncode) -if __name__ == '__main__': + +if __name__ == "__main__": + if sys.argv[0].endswith("dogwrap"): + warnings.warn("dogwrap is pending deprecation. Please use dogshellwrap instead.", PendingDeprecationWarning) main() diff --git a/datadog/dogstatsd/__init__.py b/datadog/dogstatsd/__init__.py index dbbe8ad3f..d6fa52772 100644 --- a/datadog/dogstatsd/__init__.py +++ b/datadog/dogstatsd/__init__.py @@ -1 +1,4 @@ -from datadog.dogstatsd.base import statsd # noqa +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.dogstatsd.base import DogStatsd, statsd # noqa diff --git a/datadog/dogstatsd/aggregator.py b/datadog/dogstatsd/aggregator.py new file mode 100644 index 000000000..4a805b75e --- /dev/null +++ b/datadog/dogstatsd/aggregator.py @@ -0,0 +1,62 @@ +import threading +from datadog.dogstatsd.metrics import ( + CountMetric, + GaugeMetric, + SetMetric, +) +from datadog.dogstatsd.metric_types import MetricType + + +class Aggregator(object): + def __init__(self): + self.metrics_map = { + MetricType.COUNT: {}, + MetricType.GAUGE: {}, + MetricType.SET: {}, + } + self._locks = { + MetricType.COUNT: threading.RLock(), + MetricType.GAUGE: threading.RLock(), + MetricType.SET: threading.RLock(), + } + + def flush_aggregated_metrics(self): + metrics = [] + for metric_type in self.metrics_map.keys(): + with self._locks[metric_type]: + current_metrics = self.metrics_map[metric_type] + self.metrics_map[metric_type] = {} + for metric in current_metrics.values(): + metrics.extend(metric.get_data() if isinstance(metric, SetMetric) else [metric]) + return metrics + + def get_context(self, name, tags): + tags_str = ",".join(tags) if tags is not None else "" + return "{}:{}".format(name, tags_str) + + def count(self, name, value, tags, rate, timestamp=0): + return self.add_metric( + MetricType.COUNT, CountMetric, name, value, tags, rate, timestamp + ) + + def gauge(self, name, value, tags, rate, timestamp=0): + return self.add_metric( + MetricType.GAUGE, GaugeMetric, name, value, tags, rate, timestamp + ) + + def set(self, name, value, tags, rate, timestamp=0): + return self.add_metric( + MetricType.SET, SetMetric, name, value, tags, rate, timestamp + ) + + def add_metric( + self, metric_type, metric_class, name, value, tags, rate, timestamp=0 + ): + context = self.get_context(name, tags) + with self._locks[metric_type]: + if context in self.metrics_map[metric_type]: + self.metrics_map[metric_type][context].aggregate(value) + else: + self.metrics_map[metric_type][context] = metric_class( + name, value, tags, rate, timestamp + ) diff --git a/datadog/dogstatsd/base.py b/datadog/dogstatsd/base.py index f48e50e11..da9ece563 100644 --- a/datadog/dogstatsd/base.py +++ b/datadog/dogstatsd/base.py @@ -1,174 +1,1040 @@ #!/usr/bin/env python + +# Unless explicitly stated otherwise all files in this repository are licensed under +# the BSD-3-Clause License. This product includes software developed at Datadog +# (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc """ DogStatsd is a Python client for DogStatsd, a Statsd fork for Datadog. """ - -import logging +# Standard libraries from random import random -from time import time +import logging +import os import socket -from functools import wraps +import errno +import threading +import time +from threading import Lock, RLock +import weakref try: - from itertools import imap + import queue except ImportError: - imap = map + # pypy has the same module, but capitalized. + import Queue as queue # type: ignore[no-redef] + + +# pylint: disable=unused-import +from typing import Optional, List, Text, Union +# pylint: enable=unused-import + +# Datadog libraries +from datadog.dogstatsd.aggregator import Aggregator +from datadog.dogstatsd.metric_types import MetricType +from datadog.dogstatsd.context import ( + TimedContextManagerDecorator, + DistributedContextManagerDecorator, +) +from datadog.dogstatsd.route import get_default_route +from datadog.dogstatsd.container import Cgroup +from datadog.util.compat import is_p3k, text +from datadog.util.format import normalize_tags +from datadog.version import __version__ + +# Logging +log = logging.getLogger("datadog.dogstatsd") + +# Default config +DEFAULT_HOST = "localhost" +DEFAULT_PORT = 8125 + +# Buffering-related values (in seconds) +DEFAULT_BUFFERING_FLUSH_INTERVAL = 0.3 +MIN_FLUSH_INTERVAL = 0.0001 + +# Env var to enable/disable sending the container ID field +ORIGIN_DETECTION_ENABLED = "DD_ORIGIN_DETECTION_ENABLED" + +# Environment variable containing external data used for Origin Detection. +EXTERNAL_DATA_ENV_VAR = "DD_EXTERNAL_ENV" + +# Default buffer settings based on socket type +UDP_OPTIMAL_PAYLOAD_LENGTH = 1432 +UDS_OPTIMAL_PAYLOAD_LENGTH = 8192 + +# Socket options +MIN_SEND_BUFFER_SIZE = 32 * 1024 + +# Mapping of each "DD_" prefixed environment variable to a specific tag name +DD_ENV_TAGS_MAPPING = { + "DD_ENTITY_ID": "dd.internal.entity_id", + "DD_ENV": "env", + "DD_SERVICE": "service", + "DD_VERSION": "version", +} + +# Telemetry minimum flush interval in seconds +DEFAULT_TELEMETRY_MIN_FLUSH_INTERVAL = 10 + +# Telemetry pre-computed formatting string. Pre-computation +# increases throughput of composing the result by 2-15% from basic +# '%'-based formatting with a `join`. +TELEMETRY_FORMATTING_STR = "\n".join( + [ + "datadog.dogstatsd.client.metrics:%s|c|#%s", + "datadog.dogstatsd.client.events:%s|c|#%s", + "datadog.dogstatsd.client.service_checks:%s|c|#%s", + "datadog.dogstatsd.client.bytes_sent:%s|c|#%s", + "datadog.dogstatsd.client.bytes_dropped:%s|c|#%s", + "datadog.dogstatsd.client.bytes_dropped_queue:%s|c|#%s", + "datadog.dogstatsd.client.bytes_dropped_writer:%s|c|#%s", + "datadog.dogstatsd.client.packets_sent:%s|c|#%s", + "datadog.dogstatsd.client.packets_dropped:%s|c|#%s", + "datadog.dogstatsd.client.packets_dropped_queue:%s|c|#%s", + "datadog.dogstatsd.client.packets_dropped_writer:%s|c|#%s", + ] +) + "\n" + +Stop = object() +SUPPORTS_FORKING = hasattr(os, "register_at_fork") and not os.environ.get("DD_DOGSTATSD_DISABLE_FORK_SUPPORT", None) +TRACK_INSTANCES = not os.environ.get("DD_DOGSTATSD_DISABLE_INSTANCE_TRACKING", None) -log = logging.getLogger('dogstatsd') +_instances = weakref.WeakSet() # type: weakref.WeakSet +def pre_fork(): + """Prepare all client instances for a process fork. + + If SUPPORTS_FORKING is true, this will be called automatically before os.fork(). + """ + for c in _instances: + c.pre_fork() + + +def post_fork_parent(): + """Restore all client instances after a fork. + + If SUPPORTS_FORKING is true, this will be called automatically after os.fork(). + """ + for c in _instances: + c.post_fork_parent() + + +def post_fork_child(): + for c in _instances: + c.post_fork_child() + + +if SUPPORTS_FORKING: + os.register_at_fork( # type: ignore + before=pre_fork, + after_in_child=post_fork_child, + after_in_parent=post_fork_parent, + ) + + +# pylint: disable=useless-object-inheritance,too-many-instance-attributes +# pylint: disable=too-many-arguments,too-many-locals class DogStatsd(object): OK, WARNING, CRITICAL, UNKNOWN = (0, 1, 2, 3) - def __init__(self, host='localhost', port=8125, max_buffer_size=50): + def __init__( + self, + host=DEFAULT_HOST, # type: Text + port=DEFAULT_PORT, # type: int + max_buffer_size=None, # type: None + flush_interval=DEFAULT_BUFFERING_FLUSH_INTERVAL, # type: float + disable_aggregation=True, # type: bool + disable_buffering=True, # type: bool + namespace=None, # type: Optional[Text] + constant_tags=None, # type: Optional[List[str]] + use_ms=False, # type: bool + use_default_route=False, # type: bool + socket_path=None, # type: Optional[Text] + default_sample_rate=1, # type: float + disable_telemetry=False, # type: bool + telemetry_min_flush_interval=(DEFAULT_TELEMETRY_MIN_FLUSH_INTERVAL), # type: int + telemetry_host=None, # type: Text + telemetry_port=None, # type: Union[str, int] + telemetry_socket_path=None, # type: Text + max_buffer_len=0, # type: int + container_id=None, # type: Optional[Text] + origin_detection_enabled=True, # type: bool + socket_timeout=0, # type: Optional[float] + telemetry_socket_timeout=0, # type: Optional[float] + disable_background_sender=True, # type: bool + sender_queue_size=0, # type: int + sender_queue_timeout=0, # type: Optional[float] + track_instance=True, # type: bool + ): # type: (...) -> None """ Initialize a DogStatsd object. >>> statsd = DogStatsd() + :envvar DD_AGENT_HOST: the host of the DogStatsd server. + If set, it overrides default value. + :type DD_AGENT_HOST: string + + :envvar DD_DOGSTATSD_PORT: the port of the DogStatsd server. + If set, it overrides default value. + :type DD_DOGSTATSD_PORT: integer + + :envvar DATADOG_TAGS: Tags to attach to every metric reported by dogstatsd client. + :type DATADOG_TAGS: comma-delimited string + + :envvar DD_ENTITY_ID: Tag to identify the client entity. + :type DD_ENTITY_ID: string + + :envvar DD_ENV: the env of the service running the dogstatsd client. + If set, it is appended to the constant (global) tags of the statsd client. + :type DD_ENV: string + + :envvar DD_SERVICE: the name of the service running the dogstatsd client. + If set, it is appended to the constant (global) tags of the statsd client. + :type DD_SERVICE: string + + :envvar DD_VERSION: the version of the service running the dogstatsd client. + If set, it is appended to the constant (global) tags of the statsd client. + :type DD_VERSION: string + + :envvar DD_DOGSTATSD_DISABLE: Disable any statsd metric collection (default False) + :type DD_DOGSTATSD_DISABLE: boolean + + :envvar DD_TELEMETRY_HOST: the host for the dogstatsd server we wish to submit + telemetry stats to. If set, it overrides default value. + :type DD_TELEMETRY_HOST: string + + :envvar DD_TELEMETRY_PORT: the port for the dogstatsd server we wish to submit + telemetry stats to. If set, it overrides default value. + :type DD_TELEMETRY_PORT: integer + + :envvar DD_ORIGIN_DETECTION_ENABLED: Enable/disable sending the container ID field + for origin detection. + :type DD_ORIGIN_DETECTION_ENABLED: boolean + + :envvar DD_DOGSTATSD_DISABLE_FORK_SUPPORT: Don't install global fork hooks with os.register_at_fork. + Global fork hooks then need to be called manually before and after calling os.fork. + :type DD_DOGSTATSD_DISABLE_FORK_SUPPORT: boolean + + :envvar DD_DOGSTATSD_DISABLE_INSTANCE_TRACKING: Don't register instances of this class with global fork hooks. + :type DD_DOGSTATSD_DISABLE_INSTANCE_TRACKING: boolean + :param host: the host of the DogStatsd server. + :type host: string + :param port: the port of the DogStatsd server. - :param max_buffer_size: Maximum number of metric to buffer before sending to the server - if sending metrics in batch + :type port: integer + + :max_buffer_size: Deprecated option, do not use it anymore. + :type max_buffer_type: None + + :flush_interval: Amount of time in seconds that the flush thread will + wait before trying to flush the buffered metrics to the server. If set, + it overrides the default value. + :type flush_interval: float + + :disable_aggregation: If true, metrics (Count, Gauge, Set) are no longered aggregated by the client + :type disable_aggregation: bool + + :disable_buffering: If set, metrics are no longered buffered by the client and + all data is sent synchronously to the server + :type disable_buffering: bool + + :param namespace: Namespace to prefix all metric names + :type namespace: string + + :param constant_tags: Tags to attach to all metrics + :type constant_tags: list of strings + + :param use_ms: Report timed values in milliseconds instead of seconds (default False) + :type use_ms: boolean + + :param use_default_route: Dynamically set the DogStatsd host to the default route + (Useful when running the client in a container) (Linux only) + :type use_default_route: boolean + + :param socket_path: Communicate with dogstatsd through a UNIX socket instead of + UDP. If set, disables UDP transmission (Linux only) + :type socket_path: string + + :param default_sample_rate: Sample rate to use by default for all metrics + :type default_sample_rate: float + + :param max_buffer_len: Maximum number of bytes to buffer before sending to the server + if sending metrics in batch. If not specified it will be adjusted to a optimal value + depending on the connection type. + :type max_buffer_len: integer + + :param disable_telemetry: Should client telemetry be disabled + :type disable_telemetry: boolean + + :param telemetry_min_flush_interval: Minimum flush interval for telemetry in seconds + :type telemetry_min_flush_interval: integer + + :param telemetry_host: the host for the dogstatsd server we wish to submit + telemetry stats to. Optional. If telemetry is enabled and this is not specified + the default host will be used. + :type host: string + + :param telemetry_port: the port for the dogstatsd server we wish to submit + telemetry stats to. Optional. If telemetry is enabled and this is not specified + the default host will be used. + :type port: integer + + :param telemetry_socket_path: Submit client telemetry to dogstatsd through a UNIX + socket instead of UDP. If set, disables UDP transmission (Linux only) + :type telemetry_socket_path: string + + :param container_id: Allows passing the container ID, this will be used by the Agent to enrich + metrics with container tags. + This feature requires Datadog Agent version >=6.35.0 && <7.0.0 or Agent versions >=7.35.0. + When configured, the provided container ID is prioritized over the container ID discovered + via Origin Detection. + Default: None. + :type container_id: string + + :param origin_detection_enabled: Enable/disable the client origin detection. + This feature requires Datadog Agent version >=6.35.0 && <7.0.0 or Agent versions >=7.35.0. + When enabled, the client tries to discover its container ID and sends it to the Agent + to enrich the metrics with container tags. + Origin detection can be disabled by configuring the environment variabe DD_ORIGIN_DETECTION_ENABLED=false + The client tries to read the container ID by parsing the file /proc/self/cgroup. + This is not supported on Windows. + Default: True. + More on this: https://docs.datadoghq.com/developers/dogstatsd/?tab=kubernetes#origin-detection-over-udp + :type origin_detection_enabled: boolean + + :param socket_timeout: Set timeout for socket operations, in seconds. Optional. + If sets to zero, never wait if operation can not be completed immediately. If set to None, wait forever. + This option does not affect hostname resolution when using UDP. + :type socket_timeout: float + + :param telemetry_socket_timeout: Set timeout for the telemetry socket operations. Optional. + Effective only if either telemetry_host or telemetry_socket_path are set. + If sets to zero, never wait if operation can not be completed immediately. If set to None, wait forever. + This option does not affect hostname resolution when using UDP. + :type telemetry_socket_timeout: float + + :param disable_background_sender: Use a background thread to communicate with the dogstatsd server. Optional. + When enabled, a background thread will be used to send metric payloads to the Agent. + Applications should call stop() before exiting to make sure all pending payloads are sent. + Default: True. + :type disable_background_sender: boolean + + :param sender_queue_size: Set the maximum number of packets to queue for the sender. Optional + How may packets to queue before blocking or dropping the packet if the packet queue is already full. + Default: 0 (unlimited). + :type sender_queue_size: integer + + :param sender_queue_timeout: Set timeout for packet queue operations, in seconds. Optional. + How long the application thread is willing to wait for the queue clear up before dropping the metric packet. + If set to None, wait forever. + If set to zero drop the packet immediately if the queue is full. + Default: 0 (no wait) + :type sender_queue_timeout: float + + :param track_instance: Keep track of this instance and automatically handle cleanup when os.fork() is called, + if supported. + Default: True. + :type track_instance: boolean """ - self.host = host - self.port = int(port) + + self._socket_lock = Lock() + + # Check for deprecated option + if max_buffer_size is not None: + log.warning("The parameter max_buffer_size is now deprecated and is not used anymore") + # Check host and port env vars + agent_host = os.environ.get("DD_AGENT_HOST") + if agent_host and host == DEFAULT_HOST: + host = agent_host + + dogstatsd_port = os.environ.get("DD_DOGSTATSD_PORT") + if dogstatsd_port and port == DEFAULT_PORT: + try: + port = int(dogstatsd_port) + except ValueError: + log.warning( + "Port number provided in DD_DOGSTATSD_PORT env var is not an integer: \ + %s, using %s as port number", + dogstatsd_port, + port, + ) + + # Assuming environment variables always override + telemetry_host = os.environ.get("DD_TELEMETRY_HOST", telemetry_host) + telemetry_port = os.environ.get("DD_TELEMETRY_PORT", telemetry_port) or port + + # Check enabled + if os.environ.get("DD_DOGSTATSD_DISABLE") not in {"True", "true", "yes", "1"}: + self._enabled = True + else: + self._enabled = False + + # Connection + self._max_buffer_len = max_buffer_len + self.socket_timeout = socket_timeout + if socket_path is not None: + self.socket_path = socket_path # type: Optional[text] + self.host = None + self.port = None + else: + self.socket_path = None + self.host = self.resolve_host(host, use_default_route) + self.port = int(port) + + self.telemetry_socket_path = telemetry_socket_path + self.telemetry_host = None + self.telemetry_port = None + self.telemetry_socket_timeout = telemetry_socket_timeout + if not telemetry_socket_path and telemetry_host: + self.telemetry_socket_path = None + self.telemetry_host = self.resolve_host(telemetry_host, use_default_route) + self.telemetry_port = int(telemetry_port) + + # Socket self.socket = None - self.max_buffer_size = max_buffer_size - self._send = self._send_to_server - self.encoding = 'utf-8' + self.telemetry_socket = None + self.encoding = "utf-8" + + # Options + env_tags = [tag for tag in os.environ.get("DATADOG_TAGS", "").split(",") if tag] + # Inject values of DD_* environment variables as global tags. + for var, tag_name in DD_ENV_TAGS_MAPPING.items(): + value = os.environ.get(var, "") + if value: + env_tags.append("{name}:{value}".format(name=tag_name, value=value)) + if constant_tags is None: + constant_tags = [] + self.constant_tags = constant_tags + env_tags + if namespace is not None: + namespace = text(namespace) + self.namespace = namespace + self.use_ms = use_ms + self.default_sample_rate = default_sample_rate + + # Origin detection + self._container_id = None + origin_detection_enabled = self._is_origin_detection_enabled( + container_id, origin_detection_enabled + ) + self._set_container_id(container_id, origin_detection_enabled) + self._external_data = os.environ.get(EXTERNAL_DATA_ENV_VAR, None) + + # init telemetry version + self._client_tags = [ + "client:py", + "client_version:{}".format(__version__), + ] + self._reset_telemetry() + self._telemetry_flush_interval = telemetry_min_flush_interval + self._telemetry = not disable_telemetry + self._last_flush_time = time.time() + + self._current_buffer_total_size = 0 + self._buffer = [] # type: List[Text] + self._buffer_lock = RLock() + + self._reset_buffer() + + # This lock is used for all cases where client configuration is being changed: buffering, + # aggregation, sender mode. + self._config_lock = RLock() + + self._disable_buffering = disable_buffering + self._disable_aggregation = disable_aggregation + + self._flush_interval = flush_interval + self._flush_thread = None + self._flush_thread_stop = threading.Event() + self.aggregator = Aggregator() + # Indicates if the process is about to fork, so we shouldn't start any new threads yet. + self._forking = False + + if not self._disable_buffering: + self._send = self._send_to_buffer + else: + self._send = self._send_to_server + + if not self._disable_aggregation or not self._disable_buffering: + self._start_flush_thread() + else: + log.debug("Statsd buffering and aggregation is disabled") + + self._queue = None + self._sender_thread = None + self._sender_enabled = False + + if not disable_background_sender: + self.enable_background_sender(sender_queue_size, sender_queue_timeout) + + if TRACK_INSTANCES and track_instance: + _instances.add(self) + + @property + def socket_path(self): + return self._socket_path + + @socket_path.setter + def socket_path(self, path): + with self._socket_lock: + self._socket_path = path + if path is None: + self._transport = "udp" + self._max_payload_size = self._max_buffer_len or UDP_OPTIMAL_PAYLOAD_LENGTH + else: + self._transport = "uds" + self._max_payload_size = self._max_buffer_len or UDS_OPTIMAL_PAYLOAD_LENGTH + + def enable_background_sender(self, sender_queue_size=0, sender_queue_timeout=0): + """ + Use a background thread to communicate with the dogstatsd server. + When enabled, a background thread will be used to send metric payloads to the Agent. + + Applications should call stop() before exiting to make sure all pending payloads are sent. + + Compatible with os.fork() starting with Python 3.7. On earlier versions, compatible if applications + arrange to call pre_fork(), post_fork_parent() and post_fork_child() module functions around calls + to os.fork(). + + :param sender_queue_size: Set the maximum number of packets to queue for the sender. + How many packets to queue before blocking or dropping the packet if the packet queue is already full. + Default: 0 (unlimited). + :type sender_queue_size: integer, optional + :param sender_queue_timeout: Set timeout for packet queue operations, in seconds. + How long the application thread is willing to wait for the queue clear up before dropping the metric packet. + If set to None, wait forever. If set to zero drop the packet immediately if the queue is full. + Default: 0 (no wait). + :type sender_queue_timeout: float, optional + """ + + with self._config_lock: + self._sender_enabled = True + self._sender_queue_size = sender_queue_size + if sender_queue_timeout is None: + self._queue_blocking = True + self._queue_timeout = None + else: + self._queue_blocking = sender_queue_timeout > 0 + self._queue_timeout = max(0, sender_queue_timeout) + + self._start_sender_thread() + + def disable_background_sender(self): + """Disable background sender mode. + + This call will block until all previously queued payloads are sent. + """ + with self._config_lock: + self._sender_enabled = False + self._stop_sender_thread() + + def disable_telemetry(self): + self._telemetry = False + + def enable_telemetry(self): + self._telemetry = True + + # Note: Invocations of this method should be thread-safe + def _start_flush_thread(self): + if self._disable_aggregation and self.disable_buffering: + log.debug("Statsd periodic buffer and aggregation flush is disabled") + return + if self._flush_interval <= MIN_FLUSH_INTERVAL: + log.debug( + "the set flush interval is less then the minimum" + ) + return + + if self._forking: + return + + if self._flush_thread is not None: + return + + def _flush_thread_loop(self, flush_interval): + while not self._flush_thread_stop.is_set(): + time.sleep(flush_interval) + if not self._disable_aggregation: + self.flush_aggregated_metrics() + if not self._disable_buffering: + self.flush_buffered_metrics() + self._flush_thread = threading.Thread( + name="{}_flush_thread".format(self.__class__.__name__), + target=_flush_thread_loop, + args=(self, self._flush_interval,), + ) + self._flush_thread.daemon = True + self._flush_thread.start() + log.debug( + "Statsd flush thread registered with period of %s", + self._flush_interval, + ) + + # Note: Invocations of this method should be thread-safe + def _stop_flush_thread(self): + if not self._flush_thread: + return + try: + if not self._disable_aggregation: + self.flush_aggregated_metrics() + if not self.disable_buffering: + self.flush_buffered_metrics() + finally: + pass + + self._flush_thread_stop.set() + self._flush_thread.join() + self._flush_thread = None + self._flush_thread_stop.clear() + + def _dedicated_telemetry_destination(self): + return bool(self.telemetry_socket_path or self.telemetry_host) + + # Context manager helper def __enter__(self): - self.open_buffer(self.max_buffer_size) + self.open_buffer() return self - def __exit__(self, type, value, traceback): + # Context manager helper + def __exit__(self, exc_type, value, traceback): self.close_buffer() - def get_socket(self): + @property + def disable_buffering(self): + with self._config_lock: + return self._disable_buffering + + @disable_buffering.setter + def disable_buffering(self, is_disabled): + with self._config_lock: + # If the toggle didn't change anything, this method is a noop + if self._disable_buffering == is_disabled: + return + + self._disable_buffering = is_disabled + + # If buffering (and aggregation) has been disabled, flush and kill the background thread + # otherwise start up the flushing thread and enable the buffering. + if is_disabled: + self._send = self._send_to_server + if self._disable_aggregation and self.disable_buffering: + self._stop_flush_thread() + log.debug("Statsd buffering is disabled") + else: + self._send = self._send_to_buffer + self._start_flush_thread() + + def disable_aggregation(self): + with self._config_lock: + # If the toggle didn't change anything, this method is a noop + if self._disable_aggregation: + return + + self._disable_aggregation = True + + # If aggregation and buffering has been disabled, flush and kill the background thread + # otherwise start up the flushing thread and enable aggregation. + if self._disable_aggregation and self.disable_buffering: + self._stop_flush_thread() + log.debug("Statsd aggregation is disabled") + + def enable_aggregation(self, flush_interval=DEFAULT_BUFFERING_FLUSH_INTERVAL): + with self._config_lock: + if not self._disable_aggregation: + return + self._disable_aggregation = False + self._flush_interval = flush_interval + if self._disable_buffering: + self._send = self._send_to_server + self._start_flush_thread() + + @staticmethod + def resolve_host(host, use_default_route): + """ + Resolve the DogStatsd host. + + :param host: host + :type host: string + :param use_default_route: Use the system default route as host (overrides `host` parameter) + :type use_default_route: bool + """ + if not use_default_route: + return host + + return get_default_route() + + def get_socket(self, telemetry=False): """ Return a connected socket. Note: connect the socket before assigning it to the class instance to avoid bad thread race conditions. """ - if not self.socket: - sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - sock.connect((self.host, self.port)) - self.socket = sock - return self.socket + with self._socket_lock: + if telemetry and self._dedicated_telemetry_destination(): + if not self.telemetry_socket: + if self.telemetry_socket_path is not None: + self.telemetry_socket = self._get_uds_socket( + self.telemetry_socket_path, + self.telemetry_socket_timeout, + ) + else: + self.telemetry_socket = self._get_udp_socket( + self.telemetry_host, + self.telemetry_port, + self.telemetry_socket_timeout, + ) + + return self.telemetry_socket - def open_buffer(self, max_buffer_size=50): + if not self.socket: + if self.socket_path is not None: + self.socket = self._get_uds_socket(self.socket_path, self.socket_timeout) + else: + self.socket = self._get_udp_socket( + self.host, + self.port, + self.socket_timeout, + ) + + return self.socket + + def set_socket_timeout(self, timeout): + """ + Set timeout for socket operations, in seconds. + + If set to zero, never wait if operation can not be completed immediately. If set to None, wait forever. + This option does not affect hostname resolution when using UDP. """ - Open a buffer to send a batch of metrics in one packet. + with self._socket_lock: + self.socket_timeout = timeout + if self.socket: + self.socket.settimeout(timeout) + + @classmethod + def _ensure_min_send_buffer_size(cls, sock, min_size=MIN_SEND_BUFFER_SIZE): + # Increase the receiving buffer size where needed (e.g. MacOS has 4k RX + # buffers which is half of the max packet size that the client will send. + if os.name == 'posix': + try: + recv_buff_size = sock.getsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF) + if recv_buff_size <= min_size: + sock.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, min_size) + log.debug("Socket send buffer increased to %dkb", min_size / 1024) + finally: + pass - You can also use this as a context manager. + @classmethod + def _get_uds_socket(cls, socket_path, timeout): + sock = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM) + sock.settimeout(timeout) + cls._ensure_min_send_buffer_size(sock) + sock.connect(socket_path) + return sock + + @classmethod + def _get_udp_socket(cls, host, port, timeout): + log.debug("Connecting to %s:%s", host, port) + addrinfo = socket.getaddrinfo(host, port, 0, socket.SOCK_DGRAM) + # Override gai.conf order for backwrads compatibility: prefer + # v4, so that a v4-only service on hosts with both addresses + # still works. + addrinfo.sort(key=lambda v: v[0] == socket.AF_INET, reverse=True) + lastaddr = len(addrinfo) - 1 + for i, (af, ty, proto, _, addr) in enumerate(addrinfo): + sock = None + try: + sock = socket.socket(af, ty, proto) + sock.settimeout(timeout) + cls._ensure_min_send_buffer_size(sock) + sock.connect(addr) + log.debug("Connected to: %s", addr) + return sock + except Exception as e: + if sock is not None: + sock.close() + log.debug("Failed to connect to %s: %s", addr, e) + if i < lastaddr: + continue + raise e + else: + raise ValueError("getaddrinfo returned no addresses to connect to") + + def open_buffer(self, max_buffer_size=None): + """ + Open a buffer to send a batch of metrics. + + To take advantage of automatic flushing, you should use the context manager instead >>> with DogStatsd() as batch: - >>> batch.gauge('users.online', 123) - >>> batch.gauge('active.connections', 1001) + >>> batch.gauge("users.online", 123) + >>> batch.gauge("active.connections", 1001) + + Note: This method must be called before close_buffer() matching invocation. """ - self.max_buffer_size = max_buffer_size - self.buffer = [] + + self._config_lock.acquire() + self._send = self._send_to_buffer + if max_buffer_size is not None: + log.warning("The parameter max_buffer_size is now deprecated and is not used anymore") + def close_buffer(self): """ Flush the buffer and switch back to single metric packets. + + Note: This method must be called after a matching open_buffer() + invocation. """ - self._send = self._send_to_server - self._flush_buffer() + try: + self.flush_buffered_metrics() + finally: + if self._disable_buffering: + self._send = self._send_to_server + + self._config_lock.release() + + def _reset_buffer(self): + with self._buffer_lock: + self._current_buffer_total_size = 0 + self._buffer = [] + + def flush(self): + self.flush_buffered_metrics() - def gauge(self, metric, value, tags=None, sample_rate=1): + def flush_buffered_metrics(self): + """ + Flush the metrics buffer by sending the data to the server. + """ + with self._buffer_lock: + # Only send packets if there are packets to send + if self._buffer: + self._send_to_server("\n".join(self._buffer)) + self._reset_buffer() + + def flush_aggregated_metrics(self): + """ + Flush the aggregated metrics + """ + metrics = self.aggregator.flush_aggregated_metrics() + for m in metrics: + self._report(m.name, m.metric_type, m.value, m.tags, m.rate, m.timestamp) + + def gauge( + self, + metric, # type: Text + value, # type: float + tags=None, # type: Optional[List[str]] + sample_rate=None, # type: Optional[float] + ): # type(...) -> None """ Record the value of a gauge, optionally setting a list of tags and a sample rate. - >>> statsd.gauge('users.online', 123) - >>> statsd.gauge('active.connections', 1001, tags=["protocol:http"]) + >>> statsd.gauge("users.online", 123) + >>> statsd.gauge("active.connections", 1001, tags=["protocol:http"]) """ - return self._report(metric, 'g', value, tags, sample_rate) + if self._disable_aggregation: + self._report(metric, "g", value, tags, sample_rate) + else: + self.aggregator.gauge(metric, value, tags, sample_rate) + + # Minimum Datadog Agent version: 7.40.0 + def gauge_with_timestamp( + self, + metric, # type: Text + value, # type: float + timestamp, # type: int + tags=None, # type: Optional[List[str]] + sample_rate=None, # type: Optional[float] + ): # type(...) -> None + """u + Record the value of a gauge with a Unix timestamp (in seconds), + optionally setting a list of tags and a sample rate. + + Minimum Datadog Agent version: 7.40.0 + + >>> statsd.gauge("users.online", 123, 1713804588) + >>> statsd.gauge("active.connections", 1001, 1713804588, tags=["protocol:http"]) + """ + if self._disable_aggregation: + self._report(metric, "g", value, tags, sample_rate, timestamp) + else: + self.aggregator.gauge(metric, value, tags, sample_rate, timestamp) + + def count( + self, + metric, # type: Text + value, # type: float + tags=None, # type: Optional[List[str]] + sample_rate=None, # type: Optional[float] + ): # type(...) -> None + """ + Count tracks how many times something happened per second, tags and a sample + rate. - def increment(self, metric, value=1, tags=None, sample_rate=1): + >>> statsd.count("page.views", 123) + """ + if self._disable_aggregation: + self._report(metric, "c", value, tags, sample_rate) + else: + self.aggregator.count(metric, value, tags, sample_rate) + + # Minimum Datadog Agent version: 7.40.0 + def count_with_timestamp( + self, + metric, # type: Text + value, # type: float + timestamp=0, # type: int + tags=None, # type: Optional[List[str]] + sample_rate=None, # type: Optional[float] + ): # type(...) -> None + """ + Count how many times something happened at a given Unix timestamp in seconds, + tags and a sample rate. + + Minimum Datadog Agent version: 7.40.0 + + >>> statsd.count("files.transferred", 124, timestamp=1713804588) + """ + if self._disable_aggregation: + self._report(metric, "c", value, tags, sample_rate, timestamp) + else: + self.aggregator.count(metric, value, tags, sample_rate, timestamp) + + def increment( + self, + metric, # type: Text + value=1, # type: float + tags=None, # type: Optional[List[str]] + sample_rate=None, # type: Optional[float] + ): # type(...) -> None """ Increment a counter, optionally setting a value, tags and a sample rate. - >>> statsd.increment('page.views') - >>> statsd.increment('files.transferred', 124) + >>> statsd.increment("page.views") + >>> statsd.increment("files.transferred", 124) """ - self._report(metric, 'c', value, tags, sample_rate) + if self._disable_aggregation: + self._report(metric, "c", value, tags, sample_rate) + else: + self.aggregator.count(metric, value, tags, sample_rate) - def decrement(self, metric, value=1, tags=None, sample_rate=1): + def decrement( + self, + metric, # type: Text + value=1, # type: float + tags=None, # type: Optional[List[str]] + sample_rate=None, # type: Optional[float] + ): # type(...) -> None """ Decrement a counter, optionally setting a value, tags and a sample rate. - >>> statsd.decrement('files.remaining') - >>> statsd.decrement('active.connections', 2) + >>> statsd.decrement("files.remaining") + >>> statsd.decrement("active.connections", 2) """ - self._report(metric, 'c', -value, tags, sample_rate) + metric_value = -value if value else value + if self._disable_aggregation: + self._report(metric, "c", metric_value, tags, sample_rate) + else: + self.aggregator.count(metric, metric_value, tags, sample_rate) - def histogram(self, metric, value, tags=None, sample_rate=1): + def histogram( + self, + metric, # type: Text + value, # type: float + tags=None, # type: Optional[List[str]] + sample_rate=None, # type: Optional[float] + ): # type(...) -> None """ Sample a histogram value, optionally setting tags and a sample rate. - >>> statsd.histogram('uploaded.file.size', 1445) - >>> statsd.histogram('album.photo.count', 26, tags=["gender:female"]) + >>> statsd.histogram("uploaded.file.size", 1445) + >>> statsd.histogram("album.photo.count", 26, tags=["gender:female"]) """ - self._report(metric, 'h', value, tags, sample_rate) + self._report(metric, "h", value, tags, sample_rate) - def timing(self, metric, value, tags=None, sample_rate=1): + def distribution( + self, + metric, # type: Text + value, # type: float + tags=None, # type: Optional[List[str]] + sample_rate=None, # type: Optional[float] + ): # type(...) -> None """ - Record a timing, optionally setting tags and a sample rate. + Send a global distribution value, optionally setting tags and a sample rate. - >>> statsd.timing("query.response.time", 1234) + >>> statsd.distribution("uploaded.file.size", 1445) + >>> statsd.distribution("album.photo.count", 26, tags=["gender:female"]) """ - self._report(metric, 'ms', value, tags, sample_rate) + self._report(metric, "d", value, tags, sample_rate) - class _TimedContextManagerDecorator(object): + def timing( + self, + metric, # type: Text + value, # type: float + tags=None, # type: Optional[List[str]] + sample_rate=None, # type: Optional[float] + ): # type(...) -> None """ - A context manager and a decorator which will report the elapsed time in - the context OR in a function call. + Record a timing, optionally setting tags and a sample rate. + + >>> statsd.timing("query.response.time", 1234) """ + self._report(metric, "ms", value, tags, sample_rate) - def __init__(self, statsd, metric, tags=None, sample_rate=1): - self.statsd = statsd - self.metric = metric - self.tags = tags - self.sample_rate = sample_rate + def timed(self, metric=None, tags=None, sample_rate=None, use_ms=None): + """ + A decorator or context manager that will measure the distribution of a + function's/context's run time. Optionally specify a list of tags or a + sample rate. If the metric is not defined as a decorator, the module + name and function name will be used. The metric is required as a context + manager. + :: - def __call__(self, func): - """Decorator which returns the elapsed time of the function call.""" - @wraps(func) - def wrapped(*args, **kwargs): - with self: - return func(*args, **kwargs) - return wrapped + @statsd.timed("user.query.time", sample_rate=0.5) + def get_user(user_id): + # Do what you need to ... + pass - def __enter__(self): - self.start = time() + # Is equivalent to ... + with statsd.timed("user.query.time", sample_rate=0.5): + # Do what you need to ... + pass - def __exit__(self, type, value, traceback): - # Report the elapsed time of the context manager. - self.statsd.timing(self.metric, time() - self.start, - self.tags, self.sample_rate) + # Is equivalent to ... + start = time.time() + try: + get_user(user_id) + finally: + statsd.timing("user.query.time", time.time() - start) + """ + return TimedContextManagerDecorator(self, metric, tags, sample_rate, use_ms) - def timed(self, metric, tags=None, sample_rate=1): + def distributed(self, metric=None, tags=None, sample_rate=None, use_ms=None): """ A decorator or context manager that will measure the distribution of a - function's/context's run time. Optionally specify a list of tags or a - sample rate. + function's/context's run time using custom metric distribution. + Optionally specify a list of tags or a sample rate. If the metric is not + defined as a decorator, the module name and function name will be used. + The metric is required as a context manager. :: - @statsd.timed('user.query.time', sample_rate=0.5) + @statsd.distributed("user.query.time", sample_rate=0.5) def get_user(user_id): # Do what you need to ... pass # Is equivalent to ... - with statsd.timed('user.query.time', sample_rate=0.5): + with statsd.distributed("user.query.time", sample_rate=0.5): # Do what you need to ... pass @@ -177,113 +1043,509 @@ def get_user(user_id): try: get_user(user_id) finally: - statsd.timing('user.query.time', time.time() - start) + statsd.distribution("user.query.time", time.time() - start) """ - return self._TimedContextManagerDecorator(self, metric, tags, sample_rate) + return DistributedContextManagerDecorator(self, metric, tags, sample_rate, use_ms) - def set(self, metric, value, tags=None, sample_rate=1): + def set(self, metric, value, tags=None, sample_rate=None): """ Sample a set value. - >>> statsd.set('visitors.uniques', 999) + >>> statsd.set("visitors.uniques", 999) """ - self._report(metric, 's', value, tags, sample_rate) + if self._disable_aggregation: + self._report(metric, "s", value, tags, sample_rate) + else: + self.aggregator.set(metric, value, tags, sample_rate) + + def close_socket(self): + """ + Closes connected socket if connected. + """ + with self._socket_lock: + if self.socket: + try: + self.socket.close() + except OSError as e: + log.error("Unexpected error: %s", str(e)) + self.socket = None + + if self.telemetry_socket: + try: + self.telemetry_socket.close() + except OSError as e: + log.error("Unexpected error: %s", str(e)) + self.telemetry_socket = None + + def _serialize_metric( + self, metric, metric_type, value, tags, sample_rate=1, timestamp=0 + ): + # Create/format the metric packet + return "%s%s:%s|%s%s%s%s%s%s" % ( + (self.namespace + ".") if self.namespace else "", + metric, + value, + metric_type, + ("|@" + text(sample_rate)) if sample_rate != 1 else "", + ("|#" + ",".join(normalize_tags(tags))) if tags else "", + ("|c:" + self._container_id if self._container_id else ""), + ("|e:" + self._external_data if self._external_data else ""), + ("|T" + text(timestamp)) if timestamp > 0 else "", + ) + + def _report(self, metric, metric_type, value, tags, sample_rate, timestamp=0): + """ + Create a metric packet and send it. + + More information about the packets' format: + https://docs.datadoghq.com/developers/dogstatsd/datagram_shell/?tab=metrics#the-dogstatsd-protocol + """ + if value is None: + return + + if self._enabled is not True: + return + + if self._telemetry: + self.metrics_count += 1 + + if sample_rate is None: + sample_rate = self.default_sample_rate - def _report(self, metric, metric_type, value, tags, sample_rate): if sample_rate != 1 and random() > sample_rate: return + # timestamps (protocol v1.3) only allowed on gauges and counts + allows_timestamp = metric_type == MetricType.GAUGE or metric_type == MetricType.COUNT - payload = [metric, ":", value, "|", metric_type] - if sample_rate != 1: - payload.extend(["|@", sample_rate]) - if tags: - payload.extend(["|#", ",".join(tags)]) + if not allows_timestamp or timestamp < 0: + timestamp = 0 + + # Resolve the full tag list + tags = self._add_constant_tags(tags) + payload = self._serialize_metric( + metric, metric_type, value, tags, sample_rate, timestamp + ) + + # Send it + self._send(payload) + + def _reset_telemetry(self): + self.metrics_count = 0 + self.events_count = 0 + self.service_checks_count = 0 + self.bytes_sent = 0 + self.bytes_dropped_queue = 0 + self.bytes_dropped_writer = 0 + self.packets_sent = 0 + self.packets_dropped_queue = 0 + self.packets_dropped_writer = 0 + self._last_flush_time = time.time() + + # Aliases for backwards compatibility. + @property + def packets_dropped(self): + return self.packets_dropped_queue + self.packets_dropped_writer + + @property + def bytes_dropped(self): + return self.bytes_dropped_queue + self.bytes_dropped_writer + + def _flush_telemetry(self): + tags = self._client_tags[:] + tags.append("client_transport:{}".format(self._transport)) + tags.extend(self.constant_tags) + telemetry_tags = ",".join(tags) - encoded = "".join(imap(str, payload)) - self._send(encoded) + return TELEMETRY_FORMATTING_STR % ( + self.metrics_count, + telemetry_tags, + self.events_count, + telemetry_tags, + self.service_checks_count, + telemetry_tags, + self.bytes_sent, + telemetry_tags, + self.bytes_dropped_queue + self.bytes_dropped_writer, + telemetry_tags, + self.bytes_dropped_queue, + telemetry_tags, + self.bytes_dropped_writer, + telemetry_tags, + self.packets_sent, + telemetry_tags, + self.packets_dropped_queue + self.packets_dropped_writer, + telemetry_tags, + self.packets_dropped_queue, + telemetry_tags, + self.packets_dropped_writer, + telemetry_tags, + ) + + def _is_telemetry_flush_time(self): + return self._telemetry and \ + self._last_flush_time + self._telemetry_flush_interval < time.time() def _send_to_server(self, packet): + # Skip the lock if the queue is None. There is no race with enable_background_sender. + if self._queue is not None: + # Prevent a race with disable_background_sender. + with self._buffer_lock: + if self._queue is not None: + try: + self._queue.put(packet + '\n', self._queue_blocking, self._queue_timeout) + except queue.Full: + self.packets_dropped_queue += 1 + self.bytes_dropped_queue += 1 + return + + self._xmit_packet_with_telemetry(packet + '\n') + + def _xmit_packet_with_telemetry(self, packet): + self._xmit_packet(packet, False) + + if self._is_telemetry_flush_time(): + telemetry = self._flush_telemetry() + if self._xmit_packet(telemetry, True): + self._reset_telemetry() + self.packets_sent += 1 + self.bytes_sent += len(telemetry) + else: + # Telemetry packet has been dropped, keep telemetry data for the next flush + self._last_flush_time = time.time() + self.bytes_dropped_writer += len(telemetry) + self.packets_dropped_writer += 1 + + def _xmit_packet(self, packet, is_telemetry): try: - # If set, use socket directly - (self.socket or self.get_socket()).send(packet.encode(self.encoding)) - except socket.error: - log.info("Error submitting packet, will try refreshing the socket") - self.socket = None - try: - self.get_socket().send(packet.encode(self.encoding)) - except socket.error: - log.exception("Failed to send packet with a newly binded socket") + if is_telemetry and self._dedicated_telemetry_destination(): + mysocket = self.telemetry_socket or self.get_socket(telemetry=True) + else: + # If set, use socket directly + mysocket = self.socket or self.get_socket() + + mysocket.send(packet.encode(self.encoding)) + + if not is_telemetry and self._telemetry: + self.packets_sent += 1 + self.bytes_sent += len(packet) + + return True + except socket.timeout: + # dogstatsd is overflowing, drop the packets (mimics the UDP behaviour) + pass + except (socket.herror, socket.gaierror) as socket_err: + log.warning( + "Error submitting packet: %s, dropping the packet and closing the socket", + socket_err, + ) + self.close_socket() + except socket.error as socket_err: + if socket_err.errno == errno.EAGAIN: + log.debug("Socket send would block: %s, dropping the packet", socket_err) + elif socket_err.errno == errno.ENOBUFS: + log.debug("Socket buffer full: %s, dropping the packet", socket_err) + elif socket_err.errno == errno.EMSGSIZE: + log.debug( + "Packet size too big (size: %d): %s, dropping the packet", + len(packet.encode(self.encoding)), + socket_err) + else: + log.warning( + "Error submitting packet: %s, dropping the packet and closing the socket", + socket_err, + ) + self.close_socket() + except Exception as exc: + print("Unexpected error: %s", exc) + log.error("Unexpected error: %s", str(exc)) + + if not is_telemetry and self._telemetry: + self.bytes_dropped_writer += len(packet) + self.packets_dropped_writer += 1 + + return False def _send_to_buffer(self, packet): - self.buffer.append(packet) - if len(self.buffer) >= self.max_buffer_size: - self._flush_buffer() + with self._buffer_lock: + if self._should_flush(len(packet)): + self.flush_buffered_metrics() + + self._buffer.append(packet) + # Update the current buffer length, including line break to anticipate + # the final packet size + self._current_buffer_total_size += len(packet) + 1 - def _flush_buffer(self): - self._send_to_server("\n".join(self.buffer)) - self.buffer = [] + def _should_flush(self, length_to_be_added): + if self._current_buffer_total_size + length_to_be_added + 1 > self._max_payload_size: + return True + return False - def _escape_event_content(self, string): - return string.replace('\n', '\\n') + @staticmethod + def _escape_event_content(string): + return string.replace("\n", "\\n") - def _escape_service_check_message(self, string): - return string.replace('\n', '\\n').replace('m:', 'm\:') + @staticmethod + def _escape_service_check_message(string): + return string.replace("\n", "\\n").replace("m:", "m\\:") - def event(self, title, text, alert_type=None, aggregation_key=None, - source_type_name=None, date_happened=None, priority=None, - tags=None, hostname=None): + def event( + self, + title, + message, + alert_type=None, + aggregation_key=None, + source_type_name=None, + date_happened=None, + priority=None, + tags=None, + hostname=None, + ): """ Send an event. Attributes are the same as the Event API. http://docs.datadoghq.com/api/ - >>> statsd.event('Man down!', 'This server needs assistance.') - >>> statsd.event('The web server restarted', 'The web server is up again', alert_type='success') # NOQA + >>> statsd.event("Man down!", "This server needs assistance.") + >>> statsd.event("Web server restart", "The web server is up", alert_type="success") # NOQA """ - title = self._escape_event_content(title) - text = self._escape_event_content(text) - string = u'_e{%d,%d}:%s|%s' % (len(title), len(text), title, text) + title = DogStatsd._escape_event_content(title) + message = DogStatsd._escape_event_content(message) + + # pylint: disable=undefined-variable + if not is_p3k(): + if not isinstance(title, unicode): # noqa: F821 + title = unicode(DogStatsd._escape_event_content(title), 'utf8') # noqa: F821 + if not isinstance(message, unicode): # noqa: F821 + message = unicode(DogStatsd._escape_event_content(message), 'utf8') # noqa: F821 + + # Append all client level tags to every event + tags = self._add_constant_tags(tags) + + string = u"_e{{{},{}}}:{}|{}".format( + len(title.encode('utf8', 'replace')), + len(message.encode('utf8', 'replace')), + title, + message, + ) + if date_happened: - string = '%s|d:%d' % (string, date_happened) + string = "%s|d:%d" % (string, date_happened) if hostname: - string = '%s|h:%s' % (string, hostname) + string = "%s|h:%s" % (string, hostname) if aggregation_key: - string = '%s|k:%s' % (string, aggregation_key) + string = "%s|k:%s" % (string, aggregation_key) if priority: - string = '%s|p:%s' % (string, priority) + string = "%s|p:%s" % (string, priority) if source_type_name: - string = '%s|s:%s' % (string, source_type_name) + string = "%s|s:%s" % (string, source_type_name) if alert_type: - string = '%s|t:%s' % (string, alert_type) + string = "%s|t:%s" % (string, alert_type) if tags: - string = '%s|#%s' % (string, ','.join(tags)) + string = "%s|#%s" % (string, ",".join(tags)) + if self._container_id: + string = "%s|c:%s" % (string, self._container_id) if len(string) > 8 * 1024: - raise Exception(u'Event "%s" payload is too big (more that 8KB), ' - 'event discarded' % title) + raise ValueError( + u'Event "{0}" payload is too big (>=8KB). Event discarded'.format( + title + ) + ) + + if self._telemetry: + self.events_count += 1 self._send(string) - def service_check(self, check_name, status, tags=None, timestamp=None, - hostname=None, message=None): + def service_check( + self, + check_name, + status, + tags=None, + timestamp=None, + hostname=None, + message=None, + ): """ Send a service check run. - >>> statsd.service_check('my_service.check_name', DogStatsd.WARNING) + >>> statsd.service_check("my_service.check_name", DogStatsd.WARNING) """ - message = self._escape_service_check_message(message) if message is not None else '' + message = DogStatsd._escape_service_check_message(message) if message is not None else "" + + string = u"_sc|{0}|{1}".format(check_name, status) - string = u'_sc|{0}|{1}'.format(check_name, status) + # Append all client level tags to every status check + tags = self._add_constant_tags(tags) if timestamp: - string = u'{0}|d:{1}'.format(string, timestamp) + string = u"{0}|d:{1}".format(string, timestamp) if hostname: - string = u'{0}|h:{1}'.format(string, hostname) + string = u"{0}|h:{1}".format(string, hostname) if tags: - string = u'{0}|#{1}'.format(string, ','.join(tags)) + string = u"{0}|#{1}".format(string, ",".join(tags)) if message: - string = u'{0}|m:{1}'.format(string, message) + string = u"{0}|m:{1}".format(string, message) + if self._container_id: + string = u"{0}|c:{1}".format(string, self._container_id) + + if self._telemetry: + self.service_checks_count += 1 self._send(string) + def _add_constant_tags(self, tags): + if self.constant_tags: + if tags: + return tags + self.constant_tags + + return self.constant_tags + return tags + + def _is_origin_detection_enabled(self, container_id, origin_detection_enabled): + """ + Returns whether the client should fill the container field. + If a user-defined container ID is provided, we don't ignore origin detection + as dd.internal.entity_id is prioritized over the container field for backward compatibility. + We try to fill the container field automatically unless DD_ORIGIN_DETECTION_ENABLED is explicitly set to false. + """ + if not origin_detection_enabled or container_id is not None: + # origin detection is explicitly disabled + # or a user-defined container ID was provided + return False + value = os.environ.get(ORIGIN_DETECTION_ENABLED, "") + return value.lower() not in {"no", "false", "0", "n", "off"} + + def _set_container_id(self, container_id, origin_detection_enabled): + """ + Initializes the container ID. + It can either be provided by the user or read from cgroups. + """ + if container_id: + self._container_id = container_id + return + if origin_detection_enabled: + try: + reader = Cgroup() + self._container_id = reader.container_id + except Exception as e: + log.debug("Couldn't get container ID: %s", str(e)) + self._container_id = None + + def _start_sender_thread(self): + if not self._sender_enabled or self._forking: + return + + if self._queue is not None: + return + + self._queue = queue.Queue(self._sender_queue_size) + + log.debug("Starting background sender thread") + self._sender_thread = threading.Thread( + name="{}_sender_thread".format(self.__class__.__name__), + target=self._sender_main_loop, + args=(self._queue,) + ) + self._sender_thread.daemon = True + self._sender_thread.start() + + def _stop_sender_thread(self): + # Lock ensures that nothing gets added to the queue after we disable it. + with self._buffer_lock: + if not self._queue: + return + self._queue.put(Stop) + self._queue = None + + self._sender_thread.join() + self._sender_thread = None + + def _sender_main_loop(self, queue): + while True: + item = queue.get() + if item is Stop: + queue.task_done() + return + self._xmit_packet_with_telemetry(item) + queue.task_done() + + def wait_for_pending(self): + """ + Flush the buffer and wait for all queued payloads to be written to the server. + """ + + self.flush_buffered_metrics() + + # Avoid race with disable_background_sender. We don't need a + # lock, just copy the value so it doesn't change between the + # check and join later. + queue = self._queue + + if queue is not None: + queue.join() + + def pre_fork(self): + """Prepare client for a process fork. + + Flush any pending payloads and stop all background threads. + + The client should not be used from this point until + state is restored by calling post_fork_parent() or + post_fork_child(). + """ + + # Hold the config lock across fork. This will make sure that + # we don't fork in the middle of the concurrent modification + # of the client's settings. Data protected by other locks may + # be left in inconsistent state in the child process, which we + # will clean up in post_fork_child. + + self._config_lock.acquire() + self._stop_flush_thread() + self._stop_sender_thread() + + def post_fork_parent(self): + """Restore the client state after a fork in the parent process.""" + self._start_flush_thread() + self._start_sender_thread() + self._config_lock.release() + + def post_fork_child(self): + """Restore the client state after a fork in the child process.""" + self._config_lock.release() + + # Discard the locks that could have been locked at the time + # when we forked. This may cause inconsistent internal state, + # which we will fix in the next steps. + self._socket_lock = Lock() + self._buffer_lock = RLock() + + # Reset the buffer so we don't send metrics from the parent + # process. Also makes sure buffer properties are consistent. + self._reset_buffer() + # Execute the socket_path setter to reconcile transport and + # payload size properties in respect to socket_path value. + self.socket_path = self.socket_path + self.close_socket() + + with self._config_lock: + self._start_flush_thread() + self._start_sender_thread() + + def stop(self): + """Stop the client. + + Disable buffering, aggregation, background sender and flush any pending payloads to the server. + + Client remains usable after this method, but sending metrics may block if socket_timeout is enabled. + """ + + self.disable_background_sender() + self._disable_buffering = True + self._disable_aggregation = True + self.flush_aggregated_metrics() + self.flush_buffered_metrics() + self.close_socket() + statsd = DogStatsd() diff --git a/datadog/dogstatsd/container.py b/datadog/dogstatsd/container.py new file mode 100644 index 000000000..5ee2ea61d --- /dev/null +++ b/datadog/dogstatsd/container.py @@ -0,0 +1,111 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under +# the BSD-3-Clause License. This product includes software developed at Datadog +# (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc + +import errno +import os +import re + + +class UnresolvableContainerID(Exception): + """ + Unable to get container ID from cgroup. + """ + + +class Cgroup(object): + """ + A reader class that retrieves either: + - The current container ID parsed from the cgroup file + - The cgroup controller inode. + + Returns: + object: Cgroup + + Raises: + `NotImplementedError`: No proc filesystem is found (non-Linux systems) + `UnresolvableContainerID`: Unable to read the container ID + """ + + CGROUP_PATH = "/proc/self/cgroup" + CGROUP_MOUNT_PATH = "/sys/fs/cgroup" # cgroup mount path. + CGROUP_NS_PATH = "/proc/self/ns/cgroup" # path to the cgroup namespace file. + CGROUPV1_BASE_CONTROLLER = "memory" # controller used to identify the container-id in cgroup v1 (memory). + CGROUPV2_BASE_CONTROLLER = "" # controller used to identify the container-id in cgroup v2. + HOST_CGROUP_NAMESPACE_INODE = 0xEFFFFFFB # inode of the host cgroup namespace. + + UUID_SOURCE = r"[0-9a-f]{8}[-_][0-9a-f]{4}[-_][0-9a-f]{4}[-_][0-9a-f]{4}[-_][0-9a-f]{12}" + CONTAINER_SOURCE = r"[0-9a-f]{64}" + TASK_SOURCE = r"[0-9a-f]{32}-\d+" + LINE_RE = re.compile(r"^(\d+):([^:]*):(.+)$") + CONTAINER_RE = re.compile(r"(?:.+)?({0}|{1}|{2})(?:\.scope)?$".format(UUID_SOURCE, CONTAINER_SOURCE, TASK_SOURCE)) + + def __init__(self): + if self._is_host_cgroup_namespace(): + self.container_id = self._read_cgroup_path() + return + self.container_id = self._get_cgroup_from_inode() + + def _is_host_cgroup_namespace(self): + """Check if the current process is in a host cgroup namespace.""" + try: + return ( + os.stat(self.CGROUP_NS_PATH).st_ino == self.HOST_CGROUP_NAMESPACE_INODE + if os.path.exists(self.CGROUP_NS_PATH) + else False + ) + except Exception: + return False + + def _read_cgroup_path(self): + """Read the container ID from the cgroup file.""" + try: + with open(self.CGROUP_PATH, mode="r") as fp: + for line in fp: + line = line.strip() + match = self.LINE_RE.match(line) + if not match: + continue + _, _, path = match.groups() + parts = [p for p in path.split("/")] + if len(parts): + match = self.CONTAINER_RE.match(parts.pop()) + if match: + return "ci-{0}".format(match.group(1)) + except IOError as e: + if e.errno != errno.ENOENT: + raise NotImplementedError("Unable to open {}.".format(self.CGROUP_PATH)) + except Exception as e: + raise UnresolvableContainerID("Unable to read the container ID: " + str(e)) + return None + + def _get_cgroup_from_inode(self): + """Read the container ID from the cgroup inode.""" + # Parse /proc/self/cgroup and get a map of controller to its associated cgroup node path. + cgroup_controllers_paths = {} + with open(self.CGROUP_PATH, mode="r") as fp: + for line in fp: + tokens = line.strip().split(":") + if len(tokens) != 3: + continue + if tokens[1] == self.CGROUPV1_BASE_CONTROLLER or tokens[1] == self.CGROUPV2_BASE_CONTROLLER: + cgroup_controllers_paths[tokens[1]] = tokens[2] + + # Retrieve the cgroup inode from "/sys/fs/cgroup + controller + cgroupNodePath" + for controller in [ + self.CGROUPV1_BASE_CONTROLLER, + self.CGROUPV2_BASE_CONTROLLER, + ]: + if controller in cgroup_controllers_paths: + inode_path = os.path.join( + self.CGROUP_MOUNT_PATH, + controller, + cgroup_controllers_paths[controller] if cgroup_controllers_paths[controller] != "/" else "", + ) + inode = os.stat(inode_path).st_ino + # 0 is not a valid inode. 1 is a bad block inode and 2 is the root of a filesystem. + if inode > 2: + return "in-{0}".format(inode) + + return None diff --git a/datadog/dogstatsd/context.py b/datadog/dogstatsd/context.py new file mode 100644 index 000000000..90e9ce90e --- /dev/null +++ b/datadog/dogstatsd/context.py @@ -0,0 +1,88 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# stdlib +from functools import wraps + +try: + from time import monotonic # type: ignore[attr-defined] +except ImportError: + from time import time as monotonic + +# datadog +from datadog.dogstatsd.context_async import _get_wrapped_co +from datadog.util.compat import iscoroutinefunction + + +class TimedContextManagerDecorator(object): + """ + A context manager and a decorator which will report the elapsed time in + the context OR in a function call. + """ + + def __init__(self, statsd, metric=None, tags=None, sample_rate=1, use_ms=None): + self.statsd = statsd + self.timing_func = statsd.timing + self.metric = metric + self.tags = tags + self.sample_rate = sample_rate + self.use_ms = use_ms + self.elapsed = None + + def __call__(self, func): + """ + Decorator which returns the elapsed time of the function call. + + Default to the function name if metric was not provided. + """ + if not self.metric: + self.metric = "%s.%s" % (func.__module__, func.__name__) + + # Coroutines + if iscoroutinefunction(func): + return _get_wrapped_co(self, func) + + # Others + @wraps(func) + def wrapped(*args, **kwargs): + start = monotonic() + try: + return func(*args, **kwargs) + finally: + self._send(start) + + return wrapped + + def __enter__(self): + if not self.metric: + raise TypeError("Cannot used timed without a metric!") + self._start = monotonic() + return self + + def __exit__(self, type, value, traceback): + # Report the elapsed time of the context manager. + self._send(self._start) + + def _send(self, start): + elapsed = monotonic() - start + use_ms = self.use_ms if self.use_ms is not None else self.statsd.use_ms + elapsed = int(round(1000 * elapsed)) if use_ms else elapsed + self.timing_func(self.metric, elapsed, self.tags, self.sample_rate) + self.elapsed = elapsed + + def start(self): + self.__enter__() + + def stop(self): + self.__exit__(None, None, None) + + +class DistributedContextManagerDecorator(TimedContextManagerDecorator): + """ + A context manager and a decorator which will report the elapsed time in + the context OR in a function call using the custom distribution metric. + """ + + def __init__(self, statsd, metric=None, tags=None, sample_rate=1, use_ms=None): + super(DistributedContextManagerDecorator, self).__init__(statsd, metric, tags, sample_rate, use_ms) + self.timing_func = statsd.distribution diff --git a/datadog/dogstatsd/context_async.py b/datadog/dogstatsd/context_async.py new file mode 100644 index 000000000..d178d4e49 --- /dev/null +++ b/datadog/dogstatsd/context_async.py @@ -0,0 +1,52 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +""" +Decorator `timed` for coroutine methods. + +Warning: requires Python 3.5 or higher. +""" +# stdlib +import sys + + +# Wrap the Python 3.5+ function in a docstring to avoid syntax errors when +# running mypy in --py2 mode. Currently there is no way to have mypy skip an +# entire file if it has syntax errors. This solution is very hacky; another +# option is to specify the source files to process in mypy.ini (using glob +# inclusion patterns), and omit this file from the list. +# +# https://stackoverflow.com/a/57023749/3776794 +# https://github.com/python/mypy/issues/6897 +ASYNC_SOURCE = r''' +from functools import wraps +try: + from time import monotonic +except ImportError: + from time import time as monotonic + + +def _get_wrapped_co(self, func): + """ + `timed` wrapper for coroutine methods. + """ + @wraps(func) + async def wrapped_co(*args, **kwargs): + start = monotonic() + try: + result = await func(*args, **kwargs) + return result + finally: + self._send(start) + return wrapped_co +''' + + +def _get_wrapped_co(self, func): + raise NotImplementedError( + u"Decorator `timed` compatibility with coroutine functions" u" requires Python 3.5 or higher." + ) + + +if sys.version_info >= (3, 5): + exec(compile(ASYNC_SOURCE, __file__, "exec")) diff --git a/datadog/dogstatsd/metric_types.py b/datadog/dogstatsd/metric_types.py new file mode 100644 index 000000000..8eee29849 --- /dev/null +++ b/datadog/dogstatsd/metric_types.py @@ -0,0 +1,4 @@ +class MetricType: + COUNT = "c" + GAUGE = "g" + SET = "s" diff --git a/datadog/dogstatsd/metrics.py b/datadog/dogstatsd/metrics.py new file mode 100644 index 000000000..570c3dcf0 --- /dev/null +++ b/datadog/dogstatsd/metrics.py @@ -0,0 +1,53 @@ +from datadog.dogstatsd.metric_types import MetricType + + +class MetricAggregator(object): + def __init__(self, name, tags, rate, metric_type, value=0, timestamp=0): + self.name = name + self.tags = tags + self.rate = rate + self.metric_type = metric_type + self.value = value + self.timestamp = timestamp + + def aggregate(self, value): + raise NotImplementedError("Subclasses should implement this method.") + + +class CountMetric(MetricAggregator): + def __init__(self, name, value, tags, rate, timestamp=0): + super(CountMetric, self).__init__( + name, tags, rate, MetricType.COUNT, value, timestamp + ) + + def aggregate(self, v): + self.value += v + + +class GaugeMetric(MetricAggregator): + def __init__(self, name, value, tags, rate, timestamp=0): + super(GaugeMetric, self).__init__( + name, tags, rate, MetricType.GAUGE, value, timestamp + ) + + def aggregate(self, v): + self.value = v + + +class SetMetric(MetricAggregator): + def __init__(self, name, value, tags, rate, timestamp=0): + default_value = 0 + super(SetMetric, self).__init__( + name, tags, rate, MetricType.SET, default_value, default_value + ) + self.data = set() + self.data.add(value) + + def aggregate(self, v): + self.data.add(v) + + def get_data(self): + return [ + MetricAggregator(self.name, self.tags, self.rate, MetricType.SET, value) + for value in self.data + ] diff --git a/datadog/dogstatsd/route.py b/datadog/dogstatsd/route.py new file mode 100644 index 000000000..c3fe7793d --- /dev/null +++ b/datadog/dogstatsd/route.py @@ -0,0 +1,40 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +""" +Helper(s), resolve the system's default interface. +""" +# stdlib +import socket +import struct + + +class UnresolvableDefaultRoute(Exception): + """ + Unable to resolve system's default route. + """ + + +def get_default_route(): + """ + Return the system default interface using the proc filesystem. + + Returns: + string: default route + + Raises: + `NotImplementedError`: No proc filesystem is found (non-Linux systems) + `StopIteration`: No default route found + """ + try: + with open("/proc/net/route") as f: + for line in f.readlines(): + fields = line.strip().split() + if fields[1] == "00000000": + return socket.inet_ntoa(struct.pack(" 0: + should_flush = False + if should_flush: + _get_lambda_stats().flush(float("inf")) + + def __call__(self, *args, **kw): + warnings.warn("datadog_lambda_wrapper() is relocated to https://git.io/fjy8o", DeprecationWarning) + _LambdaDecorator._enter() + try: + return self.func(*args, **kw) + finally: + _LambdaDecorator._close() + + +_lambda_stats = None +datadog_lambda_wrapper = _LambdaDecorator + + +def _get_lambda_stats(): + global _lambda_stats + # This is not thread-safe, it should be called first by _LambdaDecorator + if _lambda_stats is None: + _lambda_stats = ThreadStats() + _lambda_stats.start(flush_in_greenlet=False, flush_in_thread=False) + return _lambda_stats + + +def lambda_metric(*args, **kw): + """ Alias to expose only distributions for lambda functions""" + _get_lambda_stats().distribution(*args, **kw) + + +def _init_api_client(): + """No-op GET to initialize the requests connection with DD's endpoints + + The goal here is to make the final flush faster: + we keep alive the Requests session, this means that we can re-use the connection + The consequence is that the HTTP Handshake, which can take hundreds of ms, + is now made at the beginning of a lambda instead of at the end. + + By making the initial request async, we spare a lot of execution time in the lambdas. + """ + try: + api.api_client.APIClient.submit("GET", "validate") + except Exception: + pass diff --git a/datadog/threadstats/base.py b/datadog/threadstats/base.py index afbf1575f..b5e7699ab 100644 --- a/datadog/threadstats/base.py +++ b/datadog/threadstats/base.py @@ -1,34 +1,96 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc """ ThreadStats is a tool for collecting application metrics without hindering performance. It collects metrics in the application thread with very little overhead and allows flushing metrics in process, in a thread or in a greenlet, depending on your application's needs. """ - +import atexit import logging -from functools import wraps +import os + +# stdlib from contextlib import contextmanager +from functools import wraps from time import time +try: + from time import monotonic # type: ignore[attr-defined] +except ImportError: + from time import time as monotonic + +# datadog from datadog.api.exceptions import ApiNotInitialized from datadog.threadstats.constants import MetricType -from datadog.threadstats.metrics import MetricsAggregator, Counter, Gauge, Histogram, Timing from datadog.threadstats.events import EventsAggregator +from datadog.threadstats.metrics import MetricsAggregator, Counter, Gauge, Histogram, Timing, Distribution, Set from datadog.threadstats.reporters import HttpReporter # Loggers -log = logging.getLogger('dd.datadogpy') +log = logging.getLogger("datadog.threadstats") + +DD_ENV_TAGS_MAPPING = { + "DD_ENV": "env", + "DD_SERVICE": "service", + "DD_VERSION": "version", +} class ThreadStats(object): + def __init__(self, namespace="", constant_tags=None, compress_payload=False): + """ + Initialize a threadstats object. - def __init__(self): - """ Initialize a dogstats object. """ - # Don't collect until start is called. - self._disabled = True + :param namespace: Namespace to prefix all metric names + :type namespace: string + + :param constant_tags: Tags to attach to every metric reported by this client + :type constant_tags: list of strings + + :param compress_payload: compress the payload using zlib + :type compress_payload: bool - def start(self, flush_interval=10, roll_up_interval=10, device=None, - flush_in_thread=True, flush_in_greenlet=False, disabled=False): + :envvar DATADOG_TAGS: Tags to attach to every metric reported by ThreadStats client + :type DATADOG_TAGS: comma-delimited string + + :envvar DD_ENV: the env of the service running the ThreadStats client. + If set, it is appended to the constant (global) tags of the client. + :type DD_ENV: string + + :envvar DD_SERVICE: the name of the service running the ThreadStats client. + If set, it is appended to the constant (global) tags of the client. + :type DD_SERVICE: string + + :envvar DD_VERSION: the version of the service running the ThreadStats client. + If set, it is appended to the constant (global) tags of the client. + :type DD_VERSION: string + """ + # Parameters + self.namespace = namespace + env_tags = [tag for tag in os.environ.get("DATADOG_TAGS", "").split(",") if tag] + for var, tag_name in DD_ENV_TAGS_MAPPING.items(): + value = os.environ.get(var, "") + if value: + env_tags.append("{name}:{value}".format(name=tag_name, value=value)) + if constant_tags is None: + constant_tags = [] + self.constant_tags = constant_tags + env_tags + + # State + self._disabled = True + self.compress_payload = compress_payload + + def start( + self, + flush_interval=10, + roll_up_interval=10, + device=None, + flush_in_thread=True, + flush_in_greenlet=False, + disabled=False, + ): """ Start the ThreadStats instance with the specified metric flushing method and preferences. @@ -58,10 +120,10 @@ def start(self, flush_interval=10, roll_up_interval=10, device=None, using datadog module ``initialize`` method. >>> from datadog import initialize, ThreadStats - >>> initialize(api_key='my_api_key') + >>> initialize(api_key="my_api_key") >>> stats = ThreadStats() >>> stats.start() - >>> stats.increment('home.page.hits') + >>> stats.increment("home.page.hits") :param flush_interval: The number of seconds to wait between flushes. :type flush_interval: int @@ -86,7 +148,7 @@ def start(self, flush_interval=10, roll_up_interval=10, device=None, # The reporter is responsible for sending metrics off to their final destination. # It's abstracted to support easy unit testing and in the near future, forwarding # to the datadog agent. - self.reporter = HttpReporter() + self.reporter = HttpReporter(compress_payload=self.compress_payload) self._is_flush_in_progress = False self.flush_count = 0 @@ -98,6 +160,9 @@ def start(self, flush_interval=10, roll_up_interval=10, device=None, elif flush_in_thread: self._start_flush_thread() + # Flush all remaining metrics on exit + atexit.register(lambda: self.flush(float("inf"))) + def stop(self): if not self._is_auto_flushing: return True @@ -106,35 +171,74 @@ def stop(self): self._is_auto_flushing = False return True - def event(self, title, text, alert_type=None, aggregation_key=None, - source_type_name=None, date_happened=None, priority=None, - tags=None, hostname=None): + def event( + self, + title, + message, + alert_type=None, + aggregation_key=None, + source_type_name=None, + date_happened=None, + priority=None, + tags=None, + hostname=None, + ): """ - Send an event. Attributes are the same as the Event API. (http://docs.datadoghq.com/api/) - >>> stats.event('Man down!', 'This server needs assistance.') - >>> stats.event('The web server restarted', \ - 'The web server is up again', alert_type='success') + Send an event. See http://docs.datadoghq.com/api/ for more info. + + >>> stats.event("Man down!", "This server needs assistance.") + >>> stats.event("The web server restarted", \ + "The web server is up again", alert_type="success") """ if not self._disabled: + # Append all client level tags to every event + event_tags = tags + if self.constant_tags: + if tags: + event_tags = tags + self.constant_tags + else: + event_tags = self.constant_tags + self._event_aggregator.add_event( - title=title, text=text, alert_type=alert_type, aggregation_key=aggregation_key, - source_type_name=source_type_name, date_happened=date_happened, priority=priority, - tags=tags, host=hostname) + title=title, + text=message, + alert_type=alert_type, + aggregation_key=aggregation_key, + source_type_name=source_type_name, + date_happened=date_happened, + priority=priority, + tags=event_tags, + host=hostname, + ) def gauge(self, metric_name, value, timestamp=None, tags=None, sample_rate=1, host=None): """ - Record the current ``value`` of a metric. They most recent value in + Record the current ``value`` of a metric. The most recent value in a given flush interval will be recorded. Optionally, specify a set of tags to associate with the metric. This should be used for sum values such as total hard disk space, process uptime, total number of active users, or number of rows in a database table. - >>> stats.gauge('process.uptime', time.time() - process_start_time) - >>> stats.gauge('cache.bytes.free', cache.get_free_bytes(), tags=['version:1.0']) + >>> stats.gauge("process.uptime", time.time() - process_start_time) + >>> stats.gauge("cache.bytes.free", cache.get_free_bytes(), tags=["version:1.0"]) + """ + if not self._disabled: + self._metric_aggregator.add_point( + metric_name, tags, timestamp or time(), value, Gauge, sample_rate=sample_rate, host=host + ) + + def set(self, metric_name, value, timestamp=None, tags=None, sample_rate=1, host=None): + """ + Add ``value`` to the current set. The length of the set is + flushed as a gauge to Datadog. Optionally, specify a set of + tags to associate with the metric. + + >>> stats.set("example_metric.set", "value_1", tags=["environment:dev"]) """ if not self._disabled: - self._metric_aggregator.add_point(metric_name, tags, timestamp or time(), value, Gauge, - sample_rate=sample_rate, host=host) + self._metric_aggregator.add_point( + metric_name, tags, timestamp or time(), value, Set, sample_rate=sample_rate, host=host + ) def increment(self, metric_name, value=1, timestamp=None, tags=None, sample_rate=1, host=None): """ @@ -146,33 +250,50 @@ def increment(self, metric_name, value=1, timestamp=None, tags=None, sample_rate >>> stats.increment('bytes.processed', file.size()) """ if not self._disabled: - self._metric_aggregator.add_point(metric_name, tags, timestamp or time(), value, - Counter, sample_rate=sample_rate, host=host) + self._metric_aggregator.add_point( + metric_name, tags, timestamp or time(), value, Counter, sample_rate=sample_rate, host=host + ) def decrement(self, metric_name, value=1, timestamp=None, tags=None, sample_rate=1, host=None): """ Decrement a counter, optionally setting a value, tags and a sample rate. - >>> stats.decrement('files.remaining') - >>> stats.decrement('active.connections', 2) + >>> stats.decrement("files.remaining") + >>> stats.decrement("active.connections", 2) """ if not self._disabled: - self._metric_aggregator.add_point(metric_name, tags, timestamp or time(), -value, - Counter, sample_rate=sample_rate, host=host) + self._metric_aggregator.add_point( + metric_name, tags, timestamp or time(), -value, Counter, sample_rate=sample_rate, host=host + ) def histogram(self, metric_name, value, timestamp=None, tags=None, sample_rate=1, host=None): """ Sample a histogram value. Histograms will produce metrics that - describe the distribution of the recorded values, namely the minimum, - maximum, average, count and the 75th, 85th, 95th and 99th percentiles. - Optionally, specify a list of ``tags`` to associate with the metric. + describe the distribution of the recorded values, namely the maximum, minimum, + average, count and the 75/85/95/99 percentiles. Optionally, specify + a list of ``tags`` to associate with the metric. + + >>> stats.histogram("uploaded_file.size", uploaded_file.size()) + """ + if not self._disabled: + self._metric_aggregator.add_point( + metric_name, tags, timestamp or time(), value, Histogram, sample_rate=sample_rate, host=host + ) - >>> stats.histogram('uploaded_file.size', uploaded_file.size()) + def distribution(self, metric_name, value, timestamp=None, tags=None, sample_rate=1, host=None): + """ + Sample a distribution value. Distributions will produce metrics that + describe the distribution of the recorded values, namely the maximum, + median, average, count and the 50/75/90/95/99 percentiles. Optionally, + specify a list of ``tags`` to associate with the metric. + + >>> stats.distribution("uploaded_file.size", uploaded_file.size()) """ if not self._disabled: - self._metric_aggregator.add_point(metric_name, tags, timestamp or time(), value, - Histogram, sample_rate=sample_rate, host=host) + self._metric_aggregator.add_point( + metric_name, tags, timestamp or time(), value, Distribution, sample_rate=sample_rate, host=host + ) def timing(self, metric_name, value, timestamp=None, tags=None, sample_rate=1, host=None): """ @@ -181,8 +302,9 @@ def timing(self, metric_name, value, timestamp=None, tags=None, sample_rate=1, h >>> stats.timing("query.response.time", 1234) """ if not self._disabled: - self._metric_aggregator.add_point(metric_name, tags, timestamp or time(), value, Timing, - sample_rate=sample_rate, host=host) + self._metric_aggregator.add_point( + metric_name, tags, timestamp or time(), value, Timing, sample_rate=sample_rate, host=host + ) @contextmanager def timer(self, metric_name, sample_rate=1, tags=None, host=None): @@ -192,7 +314,7 @@ def timer(self, metric_name, sample_rate=1, tags=None, host=None): :: def get_user(user_id): - with stats.timer('user.query.time'): + with stats.timer("user.query.time"): # Do what you need to ... pass @@ -203,15 +325,14 @@ def get_user(user_id): # Do what you need to ... pass finally: - stats.histogram('user.query.time', time.time() - start) + stats.histogram("user.query.time", time.time() - start) """ - start = time() + start = monotonic() try: yield finally: - end = time() - self.timing(metric_name, end - start, end, tags=tags, - sample_rate=sample_rate, host=host) + end = monotonic() + self.timing(metric_name, end - start, time(), tags=tags, sample_rate=sample_rate, host=host) def timed(self, metric_name, sample_rate=1, tags=None, host=None): """ @@ -219,7 +340,7 @@ def timed(self, metric_name, sample_rate=1, tags=None, host=None): Optionally specify a list of tags to associate with the metric. :: - @stats.timed('user.query.time') + @stats.timed("user.query.time") def get_user(user_id): # Do what you need to ... pass @@ -229,15 +350,18 @@ def get_user(user_id): try: get_user(user_id) finally: - stats.histogram('user.query.time', time.time() - start) + stats.histogram("user.query.time", time.time() - start) """ + def wrapper(func): @wraps(func) def wrapped(*args, **kwargs): with self.timer(metric_name, sample_rate, tags, host): result = func(*args, **kwargs) return result + return wrapped + return wrapper def flush(self, timestamp=None): @@ -257,7 +381,7 @@ def flush(self, timestamp=None): self._is_flush_in_progress = True # Process metrics - metrics = self._get_aggregate_metrics(timestamp or time()) + metrics, dists = self._get_aggregate_metrics_and_dists(timestamp or time()) count_metrics = len(metrics) if count_metrics: self.flush_count += 1 @@ -266,6 +390,14 @@ def flush(self, timestamp=None): else: log.debug("No metrics to flush. Continuing.") + count_dists = len(dists) + if count_dists: + self.flush_count += 1 + log.debug("Flush #%s sending %s distributions" % (self.flush_count, count_dists)) + self.reporter.flush_distributions(dists) + else: + log.debug("No distributions to flush. Continuing.") + # Process events events = self._get_aggregate_events() count_events = len(events) @@ -277,31 +409,53 @@ def flush(self, timestamp=None): log.debug("No events to flush. Continuing.") except ApiNotInitialized: raise - except: + except Exception: try: log.exception("Error flushing metrics and events") - except: + except Exception: pass finally: self._is_flush_in_progress = False - def _get_aggregate_metrics(self, flush_time=None): + def _get_aggregate_metrics_and_dists(self, flush_time=None): + """ + Get, format and return the rolled up metrics from the aggregator. + """ # Get rolled up metrics rolled_up_metrics = self._metric_aggregator.flush(flush_time) # FIXME: emit a dictionary from the aggregator metrics = [] - for timestamp, value, name, tags, host in rolled_up_metrics: + dists = [] + for timestamp, value, name, tags, host, metric_type, interval in rolled_up_metrics: + metric_tags = tags + metric_name = name + + # Append all client level tags to every metric + if self.constant_tags: + if tags: + metric_tags = tags + self.constant_tags + else: + metric_tags = self.constant_tags + + # Resolve the metric name + if self.namespace: + metric_name = self.namespace + "." + name + metric = { - 'metric': name, - 'points': [[timestamp, value]], - 'type': MetricType.Gauge, - 'host': host, - 'device': self.device, - 'tags': tags + "metric": metric_name, + "points": [[timestamp, value]], + "type": metric_type, + "host": host, + "device": self.device, + "tags": metric_tags, + "interval": interval, } - metrics.append(metric) - return metrics + if metric_type == MetricType.Distribution: + dists.append(metric) + else: + metrics.append(metric) + return (metrics, dists) def _get_aggregate_events(self): # Get events @@ -311,6 +465,7 @@ def _get_aggregate_events(self): def _start_flush_thread(self): """ Start a thread to flush metrics. """ from datadog.threadstats.periodic_timer import PeriodicTimer + if self._is_auto_flushing: log.info("Autoflushing already started.") return @@ -321,10 +476,10 @@ def flush(): try: log.debug("Flushing metrics in thread") self.flush() - except: + except Exception: try: log.exception("Error flushing in thread") - except: + except Exception: pass log.info("Starting flush thread with interval %s." % self.flush_interval) @@ -346,10 +501,10 @@ def flush(): log.debug("Flushing metrics in greenlet") self.flush() gevent.sleep(self.flush_interval) - except: + except Exception: try: log.exception("Error flushing in greenlet") - except: + except Exception: pass log.info("Starting flush greenlet with interval %s." % self.flush_interval) diff --git a/datadog/threadstats/constants.py b/datadog/threadstats/constants.py index e031484b3..63b565d26 100644 --- a/datadog/threadstats/constants.py +++ b/datadog/threadstats/constants.py @@ -1,11 +1,18 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc + + class MetricType(object): Gauge = "gauge" Counter = "counter" Histogram = "histogram" + Rate = "rate" + Distribution = "distribution" class MonitorType(object): - SERVICE_CHECK = 'service check' - METRIC_ALERT = 'metric alert' - QUERY_ALERT = 'query alert' + SERVICE_CHECK = "service check" + METRIC_ALERT = "metric alert" + QUERY_ALERT = "query alert" ALL = (SERVICE_CHECK, METRIC_ALERT, QUERY_ALERT) diff --git a/datadog/threadstats/events.py b/datadog/threadstats/events.py index e93a24f44..a85c798a9 100644 --- a/datadog/threadstats/events.py +++ b/datadog/threadstats/events.py @@ -1,3 +1,6 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc """ Event aggregator class. """ @@ -9,6 +12,7 @@ class EventsAggregator(object): """ A simple event aggregator """ + def __init__(self): self._events = [] diff --git a/datadog/threadstats/metrics.py b/datadog/threadstats/metrics.py index cbdbf66d6..aa9fef5b2 100644 --- a/datadog/threadstats/metrics.py +++ b/datadog/threadstats/metrics.py @@ -1,11 +1,16 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc """ Metric roll-up classes. """ from collections import defaultdict import random import itertools +import threading from datadog.util.compat import iternext +from datadog.threadstats.constants import MetricType class Metric(object): @@ -18,15 +23,33 @@ def add_point(self, value): """ Add a point to the given metric. """ raise NotImplementedError() - def flush(self, timestamp): + def flush(self, timestamp, interval): """ Flush all metrics up to the given timestamp. """ raise NotImplementedError() +class Set(Metric): + """ A set metric. """ + + stats_tag = "g" + + def __init__(self, name, tags, host): + self.name = name + self.tags = tags + self.host = host + self.set = set() + + def add_point(self, value): + self.set.add(value) + + def flush(self, timestamp, interval): + return [(timestamp, len(self.set), self.name, self.tags, self.host, MetricType.Gauge, interval)] + + class Gauge(Metric): """ A gauge metric. """ - stats_tag = 'g' + stats_tag = "g" def __init__(self, name, tags, host): self.name = name @@ -37,14 +60,14 @@ def __init__(self, name, tags, host): def add_point(self, value): self.value = value - def flush(self, timestamp): - return [(timestamp, self.value, self.name, self.tags, self.host)] + def flush(self, timestamp, interval): + return [(timestamp, self.value, self.name, self.tags, self.host, MetricType.Gauge, interval)] class Counter(Metric): - """ A counter metric. """ + """ A metric that tracks a counter value. """ - stats_tag = 'c' + stats_tag = "c" def __init__(self, name, tags, host): self.name = name @@ -55,15 +78,33 @@ def __init__(self, name, tags, host): def add_point(self, value): self.count.append(value) - def flush(self, timestamp): + def flush(self, timestamp, interval): count = sum(self.count, 0) - return [(timestamp, count, self.name, self.tags, self.host)] + return [(timestamp, count / float(interval), self.name, self.tags, self.host, MetricType.Rate, interval)] + + +class Distribution(Metric): + """ A distribution metric. """ + + stats_tag = "d" + + def __init__(self, name, tags, host): + self.name = name + self.tags = tags + self.host = host + self.value = [] + + def add_point(self, value): + self.value.append(value) + + def flush(self, timestamp, interval): + return [(timestamp, self.value, self.name, self.tags, self.host, MetricType.Distribution, interval)] class Histogram(Metric): """ A histogram metric. """ - stats_tag = 'h' + stats_tag = "h" def __init__(self, name, tags, host): self.name = name @@ -88,21 +129,29 @@ def add_point(self, value): self.samples[random.randrange(0, self.sample_size)] = value self.count = iternext(self.iter_counter) - def flush(self, timestamp): + def flush(self, timestamp, interval): if not self.count: return [] metrics = [ - (timestamp, self.min, '%s.min' % self.name, self.tags, self.host), - (timestamp, self.max, '%s.max' % self.name, self.tags, self.host), - (timestamp, self.count, '%s.count' % self.name, self.tags, self.host), - (timestamp, self.average(), '%s.avg' % self.name, self.tags, self.host) + (timestamp, self.min, "%s.min" % self.name, self.tags, self.host, MetricType.Gauge, interval), + (timestamp, self.max, "%s.max" % self.name, self.tags, self.host, MetricType.Gauge, interval), + ( + timestamp, + self.count / float(interval), + "%s.count" % self.name, + self.tags, + self.host, + MetricType.Rate, + interval, + ), + (timestamp, self.average(), "%s.avg" % self.name, self.tags, self.host, MetricType.Gauge, interval), ] length = len(self.samples) self.samples.sort() for p in self.percentiles: val = self.samples[int(round(p * length - 1))] - name = '%s.%spercentile' % (self.name, int(p * 100)) - metrics.append((timestamp, val, name, self.tags, self.host)) + name = "%s.%spercentile" % (self.name, int(p * 100)) + metrics.append((timestamp, val, name, self.tags, self.host, MetricType.Gauge, interval)) return metrics def average(self): @@ -116,7 +165,7 @@ class Timing(Histogram): Inherit from Histogram to workaround and support it in API mode """ - stats_tag = 'ms' + stats_tag = "ms" class MetricsAggregator(object): @@ -125,6 +174,7 @@ class MetricsAggregator(object): """ def __init__(self, roll_up_interval=10): + self._lock = threading.RLock() self._metrics = defaultdict(lambda: {}) self._roll_up_interval = roll_up_interval @@ -132,16 +182,22 @@ def add_point(self, metric, tags, timestamp, value, metric_class, sample_rate=1, # The sample rate is currently ignored for in process stuff interval = timestamp - timestamp % self._roll_up_interval key = (metric, host, tuple(sorted(tags)) if tags else None) - if key not in self._metrics[interval]: - self._metrics[interval][key] = metric_class(metric, tags, host) - self._metrics[interval][key].add_point(value) + with self._lock: + if key not in self._metrics[interval]: + self._metrics[interval][key] = metric_class(metric, tags, host) + self._metrics[interval][key].add_point(value) def flush(self, timestamp): """ Flush all metrics up to the given timestamp. """ - interval = timestamp - timestamp % self._roll_up_interval - past_intervals = [i for i in self._metrics if i < interval] - metrics = [] - for i in past_intervals: - for m in list(self._metrics.pop(i).values()): - metrics += m.flush(i) + if timestamp == float("inf"): + interval = float("inf") + else: + interval = timestamp - timestamp % self._roll_up_interval + + with self._lock: + past_intervals = [i for i in self._metrics.keys() if i < interval] + metrics = [] + for i in past_intervals: + for m in list(self._metrics.pop(i).values()): + metrics += m.flush(i, self._roll_up_interval) return metrics diff --git a/datadog/threadstats/periodic_timer.py b/datadog/threadstats/periodic_timer.py index a82a1774d..ff4b58363 100644 --- a/datadog/threadstats/periodic_timer.py +++ b/datadog/threadstats/periodic_timer.py @@ -1,9 +1,13 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc """ A small class to run a task periodically in a thread. """ from threading import Thread, Event +import sys class PeriodicTimer(Thread): @@ -18,27 +22,15 @@ def __init__(self, interval, function, *args, **kwargs): self.kwargs = kwargs self.finished = Event() - def _is_alive(self): - # HACK: The Python interpreter can start cleaning up objects in the - # main thread before killing daemon threads, so these references can be - # null result in errors like that in case #18, tagged - # "most likely raised during interpreter shutdown". This is hack to - # try gracefully fail in these circumstances. - # - # http://stackoverflow.com/questions/1745232 - return ( - bool(self.finished) and - bool(self.interval) and - bool(self.function) - ) - def end(self): - if self._is_alive(): - self.finished.set() + self.finished.set() def run(self): - while True: - if not self._is_alive() or self.finished.isSet(): - break - self.finished.wait(self.interval) - self.function(*self.args, **self.kwargs) + while not self.finished.wait(self.interval): + try: + self.function(*self.args, **self.kwargs) + except Exception: + # If `sys` is None, it means the interpreter is shutting down + # and it's very likely the reason why we got an exception. + if sys is not None: + raise diff --git a/datadog/threadstats/reporters.py b/datadog/threadstats/reporters.py index f79189edd..132479487 100644 --- a/datadog/threadstats/reporters.py +++ b/datadog/threadstats/reporters.py @@ -1,3 +1,6 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc """ Reporter classes. """ @@ -7,15 +10,19 @@ class Reporter(object): - def flush(self, metrics): raise NotImplementedError() class HttpReporter(Reporter): + def __init__(self, compress_payload=False): + self.compress_payload = compress_payload + + def flush_distributions(self, distributions): + api.Distribution.send(distributions, compress_payload=self.compress_payload) def flush_metrics(self, metrics): - api.Metric.send(metrics) + api.Metric.send(metrics, compress_payload=self.compress_payload) def flush_events(self, events): for event in events: @@ -23,6 +30,5 @@ def flush_events(self, events): class GraphiteReporter(Reporter): - def flush(self, metrics): pass diff --git a/datadog/util/__init__.py b/datadog/util/__init__.py index e69de29bb..b3017a1db 100644 --- a/datadog/util/__init__.py +++ b/datadog/util/__init__.py @@ -0,0 +1,3 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc diff --git a/datadog/util/cli.py b/datadog/util/cli.py new file mode 100644 index 000000000..f309980c2 --- /dev/null +++ b/datadog/util/cli.py @@ -0,0 +1,152 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datetime import datetime, timedelta +from argparse import ArgumentTypeError +import json +import re +from datadog.util.format import force_to_epoch_seconds +import time + + +def comma_list(list_str, item_func=None): + if not list_str: + raise ArgumentTypeError("Invalid comma list") + item_func = item_func or (lambda i: i) + return [item_func(i.strip()) for i in list_str.split(",") if i.strip()] + + +def comma_set(list_str, item_func=None): + return set(comma_list(list_str, item_func=item_func)) + + +def comma_list_or_empty(list_str): + if not list_str: + return [] + else: + return comma_list(list_str) + + +def list_of_ints(int_csv): + if not int_csv: + raise ArgumentTypeError("Invalid list of ints") + try: + # Try as a [1, 2, 3] list + j = json.loads(int_csv) + if isinstance(j, (list, set)): + j = [int(i) for i in j] + return j + except Exception: + pass + + try: + return [int(i.strip()) for i in int_csv.strip().split(",")] + except Exception: + raise ArgumentTypeError("Invalid list of ints: {0}".format(int_csv)) + + +def list_of_ints_and_strs(csv): + def int_or_str(item): + try: + return int(item) + except ValueError: + return item + + return comma_list(csv, int_or_str) + + +def set_of_ints(int_csv): + return set(list_of_ints(int_csv)) + + +class DateParsingError(Exception): + """Thrown if parse_date exhausts all possible parsings of a string""" + + +_date_fieldre = re.compile(r"(\d+)\s?(\w+) (ago|ahead)") + + +def _midnight(): + """ Truncate a date to midnight. Default to UTC midnight today.""" + return datetime.utcnow().replace(hour=0, minute=0, second=0, microsecond=0) + + +def parse_date_as_epoch_timestamp(date_str): + return parse_date(date_str, to_epoch_ts=True) + + +def _parse_date_noop_formatter(d): + """ NOOP - only here for pylint """ + return d + + +def parse_date(date_str, to_epoch_ts=False): + formatter = _parse_date_noop_formatter + if to_epoch_ts: + formatter = force_to_epoch_seconds + + if isinstance(date_str, datetime): + return formatter(date_str) + elif isinstance(date_str, time.struct_time): + return formatter(datetime.fromtimestamp(time.mktime(date_str))) + + # Parse relative dates. + if date_str == "today": + return formatter(_midnight()) + elif date_str == "yesterday": + return formatter(_midnight() - timedelta(days=1)) + elif date_str == "tomorrow": + return formatter(_midnight() + timedelta(days=1)) + elif date_str.endswith(("ago", "ahead")): + m = _date_fieldre.match(date_str) + if m: + fields = m.groups() + else: + fields = date_str.split(" ")[1:] + num = int(fields[0]) + short_unit = fields[1] + time_direction = {"ago": -1, "ahead": 1}[fields[2]] + assert short_unit, short_unit + units = ["weeks", "days", "hours", "minutes", "seconds"] + # translate 'h' -> 'hours' + short_units = dict([(u[:1], u) for u in units]) + unit = short_units.get(short_unit, short_unit) + # translate 'hour' -> 'hours' + if unit[-1] != "s": + unit += "s" # tolerate 1 hour + assert unit in units, "'%s' not in %s" % (unit, units) + return formatter(datetime.utcnow() + time_direction * timedelta(**{unit: num})) + elif date_str == "now": + return formatter(datetime.utcnow()) + + def _from_epoch_timestamp(seconds): + print("_from_epoch_timestamp({})".format(seconds)) + return datetime.utcfromtimestamp(float(seconds)) + + def _from_epoch_ms_timestamp(millis): + print("_from_epoch_ms_timestamp({})".format(millis)) + in_sec = float(millis) / 1000.0 + print("_from_epoch_ms_timestamp({}) -> {}".format(millis, in_sec)) + return _from_epoch_timestamp(in_sec) + + # Or parse date formats (most specific to least specific) + parse_funcs = [ + lambda d: datetime.strptime(d, "%Y-%m-%d %H:%M:%S.%f"), + lambda d: datetime.strptime(d, "%Y-%m-%d %H:%M:%S"), + lambda d: datetime.strptime(d, "%Y-%m-%dT%H:%M:%S.%f"), + lambda d: datetime.strptime(d, "%Y-%m-%dT%H:%M:%S"), + lambda d: datetime.strptime(d, "%Y-%m-%d %H:%M"), + lambda d: datetime.strptime(d, "%Y-%m-%d-%H"), + lambda d: datetime.strptime(d, "%Y-%m-%d"), + lambda d: datetime.strptime(d, "%Y-%m"), + lambda d: datetime.strptime(d, "%Y"), + _from_epoch_timestamp, # an epoch in seconds + _from_epoch_ms_timestamp, # an epoch in milliseconds + ] + + for parse_func in parse_funcs: + try: + return formatter(parse_func(date_str)) + except Exception: + pass + raise DateParsingError(u"Could not parse {0} as date".format(date_str)) diff --git a/datadog/util/compat.py b/datadog/util/compat.py index 7aac77d90..5fcd1e3b1 100644 --- a/datadog/util/compat.py +++ b/datadog/util/compat.py @@ -1,22 +1,49 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc # flake8: noqa - -""" Imports for compatibility with Py2 and Py3 """ -import sys +Imports for compatibility with Python 2, Python 3 and Google App Engine. +""" import logging -import socket - +import sys -def is_p3k(): - value = sys.version_info[0] == 3 - return value +# Logging +log = logging.getLogger("datadog.util") -get_input = input +# Note: using `sys.version_info` instead of the helper functions defined here +# so that mypy detects version-specific code paths. Currently, mypy doesn't +# support try/except imports for version-specific code paths either. +# +# https://mypy.readthedocs.io/en/stable/common_issues.html#python-version-and-system-platform-checks -if is_p3k(): - import configparser - import urllib.request as url_lib, urllib.error, urllib.parse +# Python 3.x +if sys.version_info[0] >= 3: + import builtins + from collections import UserDict as IterableUserDict from io import StringIO + from urllib.parse import urlparse + + class LazyLoader(object): + def __init__(self, module_name): + self.module_name = module_name + + def __getattr__(self, name): + # defer the importing of the module to when one of its attributes + # is accessed + import importlib + mod = importlib.import_module(self.module_name) + return getattr(mod, name) + + url_lib = LazyLoader('urllib.request') + configparser = LazyLoader('configparser') + + def ConfigParser(): + return configparser.ConfigParser() + + imap = map + get_input = input + text = str def iteritems(d): return iter(d.items()) @@ -24,11 +51,20 @@ def iteritems(d): def iternext(iter): return next(iter) + +# Python 2.x else: - get_input = raw_input + import __builtin__ as builtins import ConfigParser as configparser - import urllib2 as url_lib + from configparser import ConfigParser from cStringIO import StringIO + from itertools import imap + import urllib2 as url_lib + from urlparse import urlparse + from UserDict import IterableUserDict + + get_input = raw_input + text = unicode def iteritems(d): return d.iteritems() @@ -37,23 +73,73 @@ def iternext(iter): return iter.next() -try: - from UserDict import IterableUserDict -except ImportError: - from collections import UserDict as IterableUserDict +# Python >= 3.5 +if sys.version_info >= (3, 5): + from inspect import iscoroutinefunction +# Others +else: -try: - from configparser import ConfigParser -except ImportError: - from ConfigParser import ConfigParser + def iscoroutinefunction(*args, **kwargs): + return False -try: - from urllib.parse import urlparse -except ImportError: - from urlparse import urlparse -# prefer simplejson but fall back to stdlib python -try: - import simplejson as json -except ImportError: - import json +# Python >= 2.7 +if sys.version_info >= (2, 7): + from logging import NullHandler +# Python 2.6.x +else: + class NullHandler(logging.Handler): + def emit(self, record): + pass + + +def _is_py_version_higher_than(major, minor=0): + """ + Assert that the Python version is higher than `$maj.$min`. + """ + return sys.version_info >= (major, minor) + + +def is_p3k(): + """ + Assert that Python is version 3 or higher. + """ + return _is_py_version_higher_than(3) + + +def is_higher_py32(): + """ + Assert that Python is version 3.2 or higher. + """ + return _is_py_version_higher_than(3, 2) + + +def is_higher_py35(): + """ + Assert that Python is version 3.5 or higher. + """ + return _is_py_version_higher_than(3, 5) + + +def is_pypy(): + """ + Assert that PyPy is being used (regardless of 2 or 3) + """ + return "__pypy__" in sys.builtin_module_names + + +def conditional_lru_cache(func): + """ + A decorator that conditionally enables a lru_cache of size 512 if + the version of Python can support it (>3.2) and otherwise returns + the original function + """ + if not is_higher_py32(): + return func + + log.debug("Enabling LRU cache for function %s", func.__name__) + + # pylint: disable=import-outside-toplevel + from functools import lru_cache + + return lru_cache(maxsize=512)(func) diff --git a/datadog/util/config.py b/datadog/util/config.py index c303d9e29..cd186bc17 100644 --- a/datadog/util/config.py +++ b/datadog/util/config.py @@ -1,15 +1,17 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc import os -import logging import string import sys +# datadog from datadog.util.compat import configparser, StringIO, is_p3k +from datadog.version import __version__ # CONSTANTS DATADOG_CONF = "datadog.conf" -log = logging.getLogger('dd.datadogpy') - class CfgNotFound(Exception): pass @@ -21,16 +23,16 @@ class PathNotFound(Exception): def get_os(): "Human-friendly OS name" - if sys.platform == 'darwin': - return 'mac' - elif sys.platform.find('freebsd') != -1: - return 'freebsd' - elif sys.platform.find('linux') != -1: - return 'linux' - elif sys.platform.find('win32') != -1: - return 'windows' - elif sys.platform.find('sunos') != -1: - return 'solaris' + if sys.platform == "darwin": + return "mac" + elif sys.platform.find("freebsd") != -1: + return "freebsd" + elif sys.platform.find("linux") != -1: + return "linux" + elif sys.platform.find("win32") != -1: + return "windows" + elif sys.platform.find("sunos") != -1: + return "solaris" else: return sys.platform @@ -54,10 +56,7 @@ def _windows_commondata_path(): CSIDL_COMMON_APPDATA = 35 _SHGetFolderPath = windll.shell32.SHGetFolderPathW - _SHGetFolderPath.argtypes = [wintypes.HWND, - ctypes.c_int, - wintypes.HANDLE, - wintypes.DWORD, wintypes.LPCWSTR] + _SHGetFolderPath.argtypes = [wintypes.HWND, ctypes.c_int, wintypes.HANDLE, wintypes.DWORD, wintypes.LPCWSTR] path_buf = ctypes.create_unicode_buffer(wintypes.MAX_PATH) _SHGetFolderPath(0, CSIDL_COMMON_APPDATA, 0, 0, path_buf) @@ -66,21 +65,21 @@ def _windows_commondata_path(): def _windows_config_path(): common_data = _windows_commondata_path() - path = os.path.join(common_data, 'Datadog', DATADOG_CONF) + path = os.path.join(common_data, "Datadog", DATADOG_CONF) if os.path.exists(path): return path raise PathNotFound(path) def _unix_config_path(): - path = os.path.join('/etc/dd-agent', DATADOG_CONF) + path = os.path.join("/etc/dd-agent", DATADOG_CONF) if os.path.exists(path): return path raise PathNotFound(path) def _mac_config_path(): - path = os.path.join('~/.datadog-agent/agent', DATADOG_CONF) + path = os.path.join("~/.datadog-agent/agent", DATADOG_CONF) path = os.path.expanduser(path) if os.path.exists(path): return path @@ -96,9 +95,9 @@ def get_config_path(cfg_path=None, os_name=None): os_name = get_os() # Check for an OS-specific path, continue on not-found exceptions - if os_name == 'windows': + if os_name == "windows": return _windows_config_path() - elif os_name == 'mac': + elif os_name == "mac": return _mac_config_path() else: return _unix_config_path() @@ -115,13 +114,35 @@ def get_config(cfg_path=None, options=None): config_path = get_config_path(cfg_path, os_name=get_os()) config = configparser.ConfigParser() - config.readfp(skip_leading_wsp(open(config_path))) + with open(config_path) as config_file: + if is_p3k(): + config.read_file(skip_leading_wsp(config_file)) + else: + config.readfp(skip_leading_wsp(config_file)) # bulk import - for option in config.options('Main'): - agentConfig[option] = config.get('Main', option) + for option in config.options("Main"): + agentConfig[option] = config.get("Main", option) except Exception: raise CfgNotFound return agentConfig + + +def get_pkg_version(): + """ + Resolve `datadog` package version. + + Deprecated: use `datadog.__version__` directly instead + """ + return __version__ + + +def get_version(): + """ + Resolve `datadog` package version. + + Deprecated: use `datadog.__version__` directly instead + """ + return __version__ diff --git a/datadog/util/deprecation.py b/datadog/util/deprecation.py new file mode 100644 index 000000000..57673ef8d --- /dev/null +++ b/datadog/util/deprecation.py @@ -0,0 +1,24 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc + +import warnings +from functools import wraps + + +def deprecated(message): + def deprecated_decorator(func): + @wraps(func) + def deprecated_func(*args, **kwargs): + warnings.warn( + "'{0}' is a deprecated function. {1}".format(func.__name__, message), + category=DeprecationWarning, + stacklevel=2, + ) + warnings.simplefilter('default', DeprecationWarning) + + return func(*args, **kwargs) + + return deprecated_func + + return deprecated_decorator diff --git a/datadog/util/format.py b/datadog/util/format.py new file mode 100644 index 000000000..f6b1e96af --- /dev/null +++ b/datadog/util/format.py @@ -0,0 +1,42 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# stdlib +import calendar +import datetime +import json +import re + +from datadog.util.compat import conditional_lru_cache + +TAG_INVALID_CHARS_RE = re.compile(r"[^\w\d_\-:/\.]", re.UNICODE) +TAG_INVALID_CHARS_SUBS = "_" + + +def pretty_json(obj): + return json.dumps(obj, sort_keys=True, indent=2) + + +def construct_url(host, api_version, path): + return "{}/api/{}/{}".format(host.strip("/"), api_version.strip("/"), path.strip("/")) + + +def construct_path(api_version, path): + return "{}/{}".format(api_version.strip("/"), path.strip("/")) + + +def force_to_epoch_seconds(epoch_sec_or_dt): + if isinstance(epoch_sec_or_dt, datetime.datetime): + return calendar.timegm(epoch_sec_or_dt.timetuple()) + return epoch_sec_or_dt + + +@conditional_lru_cache +def _normalize_tags_with_cache(tag_list): + return [TAG_INVALID_CHARS_RE.sub(TAG_INVALID_CHARS_SUBS, tag) for tag in tag_list] + + +def normalize_tags(tag_list): + # We have to turn our input tag list into a non-mutable tuple for it to + # be hashable (and thus usable) by the @lru_cache decorator. + return _normalize_tags_with_cache(tuple(tag_list)) diff --git a/datadog/util/hostname.py b/datadog/util/hostname.py index 0e5b6394f..6a1f85726 100644 --- a/datadog/util/hostname.py +++ b/datadog/util/hostname.py @@ -1,30 +1,40 @@ -import socket -import re +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# stdlib +import json import logging +import re +import socket import subprocess import types +from typing import Dict, Optional -from datadog.util.compat import url_lib, is_p3k, iteritems, json +# datadog +from datadog.util.compat import url_lib, is_p3k, iteritems from datadog.util.config import get_config, get_os, CfgNotFound -VALID_HOSTNAME_RFC_1123_PATTERN = re.compile(r"^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]*[A-Za-z0-9])$") # noqa +VALID_HOSTNAME_RFC_1123_PATTERN = re.compile( + r"^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]*[A-Za-z0-9])$" +) # noqa MAX_HOSTNAME_LEN = 255 -log = logging.getLogger('dd.datadogpy') +log = logging.getLogger("datadog.api") def is_valid_hostname(hostname): - if hostname.lower() in set([ - 'localhost', - 'localhost.localdomain', - 'localhost6.localdomain6', - 'ip6-localhost', - ]): + if hostname.lower() in set( + [ + "localhost", + "localhost.localdomain", + "localhost6.localdomain6", + "ip6-localhost", + ] + ): log.warning("Hostname: %s is local" % hostname) return False if len(hostname) > MAX_HOSTNAME_LEN: - log.warning("Hostname: %s is too long (max length is %s characters)" - % (hostname, MAX_HOSTNAME_LEN)) + log.warning("Hostname: %s is too long (max length is %s characters)" % (hostname, MAX_HOSTNAME_LEN)) return False if VALID_HOSTNAME_RFC_1123_PATTERN.match(hostname) is None: log.warning("Hostname: %s is not complying with RFC 1123" % hostname) @@ -32,7 +42,8 @@ def is_valid_hostname(hostname): return True -def get_hostname(): +def get_hostname(hostname_from_config): + # type: (bool) -> Optional[str] """ Get the canonical host name this agent should identify as. This is the authoritative source of the host name for the agent. @@ -47,12 +58,18 @@ def get_hostname(): hostname = None config = None - # first, try the config + # first, try the config if hostname_from_config is set to True try: - config = get_config() - config_hostname = config.get('hostname') - if config_hostname and is_valid_hostname(config_hostname): - return config_hostname + if hostname_from_config: + config = get_config() + config_hostname = config.get("hostname") + if config_hostname and is_valid_hostname(config_hostname): + log.warning( + "Hostname lookup from agent configuration will be deprecated " + "in an upcoming version of datadogpy. Set hostname_from_config to False " + "to get rid of this warning" + ) + return config_hostname except CfgNotFound: log.info("No agent or invalid configuration file found") @@ -64,10 +81,11 @@ def get_hostname(): return gce_hostname # then move on to os-specific detection if hostname is None: + def _get_hostname_unix(): try: # try fqdn - p = subprocess.Popen(['/bin/hostname', '-f'], stdout=subprocess.PIPE) + p = subprocess.Popen(["/bin/hostname", "-f"], stdout=subprocess.PIPE, stderr=subprocess.DEVNULL) out, err = p.communicate() if p.returncode == 0: if is_p3k(): @@ -78,13 +96,13 @@ def _get_hostname_unix(): return None os_name = get_os() - if os_name in ['mac', 'freebsd', 'linux', 'solaris']: + if os_name in ["mac", "freebsd", "linux", "solaris"]: unix_hostname = _get_hostname_unix() if unix_hostname and is_valid_hostname(unix_hostname): hostname = unix_hostname # if we have an ec2 default hostname, see if there's an instance-id available - if hostname is not None and True in [hostname.lower().startswith(p) for p in [u'ip-', u'domu']]: + if hostname is not None and True in [hostname.lower().startswith(p) for p in [u"ip-", u"domu"]]: instanceid = EC2.get_instance_id(config) if instanceid: hostname = instanceid @@ -92,19 +110,19 @@ def _get_hostname_unix(): # fall back on socket.gethostname(), socket.getfqdn() is too unreliable if hostname is None: try: - socket_hostname = socket.gethostname() + socket_hostname = socket.gethostname() # type: Optional[str] except socket.error: socket_hostname = None if socket_hostname and is_valid_hostname(socket_hostname): hostname = socket_hostname if hostname is None: - log.critical("Unable to reliably determine host name. You can define one" - " in datadog.conf or in your hosts file") - raise Exception("Unable to reliably determine host name. You can define" - " one in datadog.conf or in your hosts file") - else: - return hostname + log.warning( + u"Unable to reliably determine host name. You can define one in your `hosts` file, " + u"or in `datadog.conf` file if you have Datadog Agent installed." + ) + + return hostname def get_ec2_instance_id(): @@ -116,19 +134,18 @@ def get_ec2_instance_id(): socket.setdefaulttimeout(0.25) try: - return url_lib.urlopen(url_lib.Request("http://169.254.169.254/latest/" - "meta-data/instance-id")).read() + return url_lib.urlopen(url_lib.Request("http://169.254.169.254/latest/" "meta-data/instance-id")).read() finally: # Reset the previous default timeout socket.setdefaulttimeout(old_timeout) - except: + except Exception: return socket.gethostname() class GCE(object): URL = "http://169.254.169.254/computeMetadata/v1/?recursive=true" TIMEOUT = 0.1 # second - SOURCE_TYPE_NAME = 'google cloud platform' + SOURCE_TYPE_NAME = "google cloud platform" metadata = None @staticmethod @@ -136,7 +153,7 @@ def _get_metadata(agentConfig): if GCE.metadata is not None: return GCE.metadata - if not agentConfig['collect_instance_metadata']: + if not agentConfig["collect_instance_metadata"]: log.info("Instance metadata collection is disabled. Not collecting it.") GCE.metadata = {} return GCE.metadata @@ -150,7 +167,7 @@ def _get_metadata(agentConfig): try: opener = url_lib.build_opener() - opener.addheaders = [('X-Google-Metadata-Request', 'True')] + opener.addheaders = [("X-Google-Metadata-Request", "True")] GCE.metadata = json.loads(opener.open(GCE.URL).read().strip()) except Exception: @@ -168,21 +185,21 @@ def _get_metadata(agentConfig): def get_hostname(agentConfig): try: host_metadata = GCE._get_metadata(agentConfig) - return host_metadata['instance']['hostname'].split('.')[0] + return host_metadata["instance"]["hostname"].split(".")[0] except Exception: return None class EC2(object): - """Retrieve EC2 metadata - """ + """Retrieve EC2 metadata""" + URL = "http://169.254.169.254/latest/meta-data" TIMEOUT = 0.1 # second - metadata = {} + metadata = {} # type: Dict[str, str] @staticmethod def get_tags(agentConfig): - if not agentConfig['collect_instance_metadata']: + if not agentConfig["collect_instance_metadata"]: log.info("Instance metadata collection is disabled. Not collecting it.") return [] @@ -195,16 +212,19 @@ def get_tags(agentConfig): try: iam_role = url_lib.urlopen(EC2.URL + "/iam/security-credentials").read().strip() - iam_params = json.loads(url_lib.urlopen(EC2.URL + "/iam/security-credentials" + "/" + - unicode(iam_role)).read().strip()) + iam_params = json.loads( + url_lib.urlopen(EC2.URL + "/iam/security-credentials" + "/" + str(iam_role)).read().strip() + ) from boto.ec2.connection import EC2Connection - connection = EC2Connection(aws_access_key_id=iam_params['AccessKeyId'], - aws_secret_access_key=iam_params['SecretAccessKey'], - security_token=iam_params['Token']) - instance_object = connection.get_only_instances([EC2.metadata['instance-id']])[0] - EC2_tags = [u"%s:%s" % (tag_key, tag_value) for tag_key, tag_value - in iteritems(instance_object.tags)] + connection = EC2Connection( + aws_access_key_id=iam_params["AccessKeyId"], + aws_secret_access_key=iam_params["SecretAccessKey"], + security_token=iam_params["Token"], + ) + instance_object = connection.get_only_instances([EC2.metadata["instance-id"]])[0] + + EC2_tags = [u"%s:%s" % (tag_key, tag_value) for tag_key, tag_value in iteritems(instance_object.tags)] except Exception: log.exception("Problem retrieving custom EC2 tags") @@ -238,7 +258,7 @@ def get_metadata(agentConfig): # python 2.4 does not support an explicit timeout argument so force it here # Rather than monkey-patching urllib2, just lower the timeout globally for these calls - if not agentConfig['collect_instance_metadata']: + if not agentConfig["collect_instance_metadata"]: log.info("Instance metadata collection is disabled. Not collecting it.") return {} @@ -249,12 +269,21 @@ def get_metadata(agentConfig): except Exception: pass - for k in ('instance-id', 'hostname', 'local-hostname', 'public-hostname', 'ami-id', - 'local-ipv4', 'public-keys', 'public-ipv4', 'reservation-id', 'security-groups'): + for k in ( + "instance-id", + "hostname", + "local-hostname", + "public-hostname", + "ami-id", + "local-ipv4", + "public-keys", + "public-ipv4", + "reservation-id", + "security-groups", + ): try: - v = url_lib.urlopen(EC2.URL + "/" + unicode(k)).read().strip() - assert type(v) in (types.StringType, types.UnicodeType) and len(v) > 0, \ - "%s is not a string" % v + v = url_lib.urlopen(EC2.URL + "/" + str(k)).read().strip() + assert type(v) in (types.StringType, types.UnicodeType) and len(v) > 0, "%s is not a string" % v EC2.metadata[k] = v except Exception: pass diff --git a/datadog/version.py b/datadog/version.py new file mode 100644 index 000000000..6923e57f7 --- /dev/null +++ b/datadog/version.py @@ -0,0 +1 @@ +__version__ = "0.50.3-dev" diff --git a/doc/requirements.txt b/doc/requirements.txt new file mode 100644 index 000000000..483a4e960 --- /dev/null +++ b/doc/requirements.txt @@ -0,0 +1 @@ +sphinx_rtd_theme diff --git a/doc/source/conf.py b/doc/source/conf.py index beaabcafe..f8295997c 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -1,3 +1,7 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc. + # -*- coding: utf-8 -*- # # Datadog API documentation build configuration file, created by @@ -12,6 +16,7 @@ # serve to show the default. import sys, os +import sphinx_rtd_theme # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the @@ -25,7 +30,7 @@ # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ['sphinx.ext.autodoc'] +extensions = ['sphinx.ext.autodoc', 'sphinx_rtd_theme'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] @@ -41,16 +46,16 @@ # General information about the project. project = u'Datadog Python Client' -copyright = u'2015, Datadog, Inc.' +copyright = u'2015-Present Datadog, Inc.' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. -version = 'v1' +#version = 'v1' # The full version, including alpha/beta/rc tags. -release = '1.0.0' +#release = '1.0.0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -91,7 +96,7 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -html_theme = 'default' +html_theme = 'sphinx_rtd_theme' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the @@ -120,7 +125,7 @@ # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] +#html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. diff --git a/doc/source/index.rst b/doc/source/index.rst index a4a02376d..c8a64cb4a 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -1,29 +1,69 @@ -############################################### -:mod:`datadog` --- The Datadog's Python library -############################################### +######################################### +:mod:`datadog` --- Datadog Python library +######################################### .. module:: datadog -The :mod:`datadog` module provides :mod:`datadog.api` - a simple wrapper around Datadog's HTTP API - :mod:`datadog.threadstats` - a tool for collecting metrics in high performance applications - and :mod:`datadog.dogstatsd` a DogStatsd Python client. +The :mod:`datadog` module provides + - :mod:`datadog.api`: A client for Datadog's HTTP API. + - :mod:`datadog.dogstatsd`: A UDP/UDS DogStatsd client. + - :mod:`datadog.threadstats`: A client for Datadog’s HTTP API that submits metrics in a + worker thread. + Installation ============ -To install from source, `download `_ a distribution and run: +Install from PyPI:: + + pip install datadog + + +Initialization +============== - >>> sudo python setup.py install +:mod:`datadog` must be initialized with :meth:`datadog.initialize`. An +API key and an app key are required unless you intend to use only the +:class:`~datadog.dogstatsd.base.DogStatsd` client. The keys can be passed +explicitly to :meth:`datadog.initialize` or defined as environment variables +``DATADOG_API_KEY`` and ``DATADOG_APP_KEY`` respectively. -If you use `virtualenv `_ you do not need to use sudo. +Here's an example where the statsd host and port are configured as well:: -Datadog.api module -================== -Datadog.api is a Python client library for Datadog's `HTTP API `_. + from datadog import initialize -Datadog.api client requires to run :mod:`datadog` `initialize` method first. + initialize( + api_key="", + app_key="", + statsd_host="127.0.0.1", + statsd_port=8125 + ) .. autofunction:: datadog.initialize + +datadog.api +=========== +:mod:`datadog.api` is a Python client library for Datadog's `HTTP API +`_. + + +Usage +~~~~~ + +Be sure to initialize the client using :meth:`datadog.initialize` and then use +:mod:`datadog.api`:: + + from datadog import api + + api.Event.create( + title="Something big happened!", + text="And let me tell you all about it here!", + tags=["version:1", "application:web"], + ) + + .. autoclass:: datadog.api.Comment :members: :inherited-members: @@ -44,6 +84,10 @@ Datadog.api client requires to run :mod:`datadog` `initialize` method first. :members: :inherited-members: +.. autoclass:: datadog.api.Hosts + :members: + :inherited-members: + .. autoclass:: datadog.api.Infrastructure :members: :inherited-members: @@ -75,24 +119,60 @@ Datadog.api client requires to run :mod:`datadog` `initialize` method first. .. autoclass:: datadog.api.User :members: :inherited-members: + :exclude-members: invite + +.. autoclass:: datadog.api.Dashboard + :members: + :inherited-members: + +.. autoclass:: datadog.api.DashboardList + :members: + :inherited-members: -Datadog.threadstats module -========================== -Datadog.threadstats is a tool for collecting application metrics without hindering performance. -It collects metrics in the application thread with very little overhead and allows flushing -metrics in process, in a thread or in a greenlet, depending on your application's needs. +datadog.threadstats +=================== +:mod:`datadog.threadstats` is a tool for collecting application metrics without hindering +performance. It collects metrics in the application thread with very little overhead +and allows flushing metrics in process, in a thread, or in a greenlet, depending +on your application's needs. Submission is done through the HTTP API. -To run properly Datadog.threadstats requires to run :mod:`datadog` `initialize` method first. +Usage +~~~~~ + +Be sure to initialize the library with :meth:`datadog.initialize`. Then create +an instance of :class:`datadog.threadstats.ThreadStats`:: + + from datadog.threadstats import ThreadStats + + statsd = ThreadStats() + statsd.start() # Creates a worker thread used to submit metrics. + + # Use statsd just like any other DatadogStatsd client. + statsd.increment("home.page.hits") -.. autofunction:: datadog.initialize .. autoclass:: datadog.threadstats.base.ThreadStats :members: :inherited-members: -Datadog.dogstatsd module -========================== + +datadog.dogstatsd +================= +:mod:`datadog.dogstatsd` is a Python client for DogStatsd that submits metrics +to the Agent. + + +Usage +~~~~~ + +:: + + from datadog.dogstatsd import DogStatsd + + client = DogStatsd() + client.increment("home.page.hits") + .. autoclass:: datadog.dogstatsd.base.DogStatsd :members: @@ -101,24 +181,26 @@ Datadog.dogstatsd module .. data:: statsd - A global :class:`~datadog.dogstatsd.base.DogStatsd` instance that is easily shared - across an application's modules. Initialize this once in your application's - set-up code and then other modules can import and use it without further - configuration. + A global :class:`~datadog.dogstatsd.base.DogStatsd` instance that can be + used across an application:: >>> from datadog import initialize, statsd - >>> initialize(statsd_host='localhost', statsd_port=8125) - >>> statsd.increment('home.page.hits') + >>> initialize(statsd_host="localhost", statsd_port=8125) + >>> statsd.increment("home.page.hits") +.. data:: datadog.dogstatsd.base.SUPPORTS_FORKING + Indicates whether the Python runtime supports os.register_at_fork(). When + true, buffering and background sender can be safely used in applications + that use os.fork(). -Source -====== +.. autofunction:: datadog.dogstatsd.base.pre_fork +.. autofunction:: datadog.dogstatsd.base.post_fork -The Datadog's Python library source is freely available on Github. Check it out `here -`_. Get in Touch ============ -If you'd like to suggest a feature or report a bug, please add an issue `here `_. If you want to talk about Datadog in general, reach out at `datadoghq.com `_. +If you'd like to suggest a feature or report a bug, please submit an issue +`here `_. If you have questions +about Datadog in general, reach out to support@datadoghq.com. diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 000000000..27de16f77 --- /dev/null +++ b/mypy.ini @@ -0,0 +1,28 @@ +[mypy] + +# Ignore Mypy errors about packages with missing type hints. +# +# Note: If Python 2 support is dropped in the future, `pkg_resources`, +# `configparser` and `queue` will no longer need to be ignored because +# the Python 3 versions have type hints. + +[mypy-boto.*] +ignore_missing_imports = True + +[mypy-configparser.*] +ignore_missing_imports = True + +[mypy-gevent.*] +ignore_missing_imports = True + +[mypy-google.*] +ignore_missing_imports = True + +[mypy-pkg_resources.*] +ignore_missing_imports = True + +[mypy-queue.*] +ignore_missing_imports = True + +[mypy-Queue.*] +ignore_missing_imports = True diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 000000000..a2bee4c0b --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,52 @@ +[build-system] +requires = ["hatchling>=0.15.0"] +build-backend = "hatchling.build" + +[project] +name = "datadog" +description = "The Datadog Python library" +readme = "README.md" +license = {text = "BSD-3-Clause"} +requires-python = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +keywords = [ + "datadog", +] +authors = [ + { name = "Datadog, Inc.", email = "dev@datadoghq.com" }, +] +classifiers = [ + "License :: OSI Approved :: BSD License", + "Operating System :: OS Independent", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + 'Programming Language :: Python :: Implementation :: CPython', + "Programming Language :: Python :: Implementation :: PyPy", +] +dependencies = [ + "requests>=2.6.0", + "typing; python_version < '3.5'", + "configparser<5; python_version < '3.0'", +] +dynamic = ["version"] + +[project.urls] +"Bug Tracker" = "https://github.com/DataDog/datadogpy/issues" +Documentation = "https://datadogpy.readthedocs.io/en/latest/" +"Source Code" = "https://github.com/DataDog/datadogpy" + +[project.scripts] +dog = "datadog.dogshell:main" +dogwrap = "datadog.dogshell.wrap:main" +dogshell = "datadog.dogshell:main" +dogshellwrap = "datadog.dogshell.wrap:main" + +[tool.hatch.version] +path = "datadog/version.py" + +[tool.hatch.build.targets.sdist] +include = [ + "/datadog", + "/tests", +] diff --git a/setup.py b/setup.py index 2be75d2fd..186f3b4b2 100644 --- a/setup.py +++ b/setup.py @@ -1,50 +1,75 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc. + from setuptools import setup + +from io import open import sys + +def get_readme_md_contents(): + """read the contents of your README file""" + with open("README.md", encoding='utf-8') as f: + long_description = f.read() + return long_description + +version = {} +with open("datadog/version.py") as fp: + exec(fp.read(), version) + +# `typing` package is only required for Python versions older than 3.5, but we include it here so +# that a single wheel build can be used for all supported Python versions. Installing `typing` on +# Python 3.5+ has no effect. +# +# `configparser` package is only required for Python versions older than 3 (it is included here for +# the same reason as the `typing` package). install_reqs = [ - "decorator>=3.3.2", "requests>=2.6.0", + 'typing;python_version<"3.5"', + 'configparser<5;python_version<"3.0"', ] -if sys.version_info[0] == 2: - # simplejson is not python3 compatible - install_reqs.append("simplejson>=2.0.9") - -if [sys.version_info[0], sys.version_info[1]] < [2, 7]: - install_reqs.append("argparse>=1.2") setup( name="datadog", - version="0.7.0", + version=version["__version__"], install_requires=install_reqs, - tests_require=["tox", "nose", "mock", "six", "pillow"], - packages=[ - 'datadog', - 'datadog.api', - 'datadog.dogstatsd', - 'datadog.threadstats', - 'datadog.util', - 'datadog.dogshell' - ], + tests_require=["pytest", "mock", "freezegun", "psutil"], + packages=["datadog", "datadog.api", "datadog.dogstatsd", "datadog.threadstats", "datadog.util", "datadog.dogshell"], + package_data={"datadog": ["py.typed"]}, author="Datadog, Inc.", + long_description=get_readme_md_contents(), + long_description_content_type="text/markdown", author_email="dev@datadoghq.com", description="The Datadog Python library", license="BSD", - keywords="datadog data", - url="http://www.datadoghq.com", + keywords="datadog", + url="https://www.datadoghq.com", + project_urls={ + "Bug Tracker": "https://github.com/DataDog/datadogpy/issues", + "Documentation": "https://datadogpy.readthedocs.io/en/latest/", + "Source Code": "https://github.com/DataDog/datadogpy", + }, entry_points={ - 'console_scripts': [ - 'dog = datadog.dogshell:main', - 'dogwrap = datadog.dogshell.wrap:main', - ], + "console_scripts": [ + "dog = datadog.dogshell:main", + "dogwrap = datadog.dogshell.wrap:main", + "dogshell = datadog.dogshell:main", + "dogshellwrap = datadog.dogshell.wrap:main" + ] }, - test_suite="nose.collector", + test_suite="tests", classifiers=[ "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", - "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", - "Programming Language :: Python :: Implementation :: PyPy" - ] + "Programming Language :: Python :: 3.5", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: Implementation :: PyPy", + ], + # Required by Mypy when declaring PEP 561 compatibility with `py.typed` + # file. + zip_safe=False ) diff --git a/tests/__init__.py b/tests/__init__.py index e69de29bb..b3017a1db 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -0,0 +1,3 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py new file mode 100644 index 000000000..b3017a1db --- /dev/null +++ b/tests/integration/__init__.py @@ -0,0 +1,3 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc diff --git a/tests/integration/api/__init__.py b/tests/integration/api/__init__.py new file mode 100644 index 000000000..b3017a1db --- /dev/null +++ b/tests/integration/api/__init__.py @@ -0,0 +1,3 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc diff --git a/tests/integration/api/cassettes/TestAwsIntegration.test_create.yaml b/tests/integration/api/cassettes/TestAwsIntegration.test_create.yaml new file mode 100644 index 000000000..23cde540d --- /dev/null +++ b/tests/integration/api/cassettes/TestAwsIntegration.test_create.yaml @@ -0,0 +1,505 @@ +interactions: +- request: + body: '{"account_id": "123456789103", "account_specific_namespace_rules": {"auto_scaling": + false, "opsworks": false}, "filter_tags": ["filter:test"], "host_tags": ["api:test"], + "role_name": "DatadogApiTestRole"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '204' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: !!binary | + H4sIAAAAAAAAA6tWSq0oSS3KS8yJz0xRslJKMzU0S042NEhNMzA3sUhKTTIyMjFIMbI0TjKwNE8x + N1GqBQCBHzRsMgAAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:21:01 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:59 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - aHo5LNOt81r33IYjVJvAW38EarOXxgJiaRes9P/xhrf7FT81LvjEnVLCvw9iPn7T + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789101", "role_name": "DatadogApiTestRole"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:21:01 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:21:01 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - kg+/Cls6zaJcT2blJLlU62BwgGePGdpqSwWrJ0xEIvzmSMWHXxGNsiyEzBPJ1a96 + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789101", "role_name": "DatadogApiTestRolo"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:21:02 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:21:01 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - aswcxBk1J00Iy18+kQFKF6EQfzLy4sWD4ILciesVMX5rWDYniffEYH6qbK0qwOgw + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789102", "role_name": "DatadogApiTestRole"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:21:04 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:21:02 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - RngFxOd8mVeT14auLfzsH/6kz142QLoKkYXZjfmXpXDkZ/eN6uoCM3cTScXuFEa0 + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789102", "role_name": "DatadogApiTestRolo"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:21:05 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:21:04 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - nRZqCODixwNZX0HLyT17WzYwenviVG0rmnZak57k5KsDWun3aWEsPedTsRpiFQxf + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789103", "role_name": "DatadogApiTestRole"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:21:05 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:21:05 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - nwn8Akm+cp12Jtby9xyfYjHWK2KZDWf5LxY+SMa+2NK6hVIBcKsVHXjynaTEG+o0 + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789103", "role_name": "DatadogApiTestRolo"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:21:06 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:21:05 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - 0pmBjL5vG2A5IkxC4OBtwgn929khTZGgUquRW20JC77zchR4jTrHgra/pB22jP66 + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789104", "role_name": "DatadogApiTestRole"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:21:07 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:21:06 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - FkKIRCzyOlcTfevOWu/Pn0jzNwYGEOKsDSSLLIk1UH0umdv3B3q8BoRMqfK8ce37 + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789104", "role_name": "DatadogApiTestRolo"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:21:08 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:21:07 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - IRAJ1mQ+c3epm0CLGtZoe/y8O4TCss3jYw+fwQOm7+eSKRCE+p3OtawVnIQ5ts76 + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/api/cassettes/TestAwsIntegration.test_delete.yaml b/tests/integration/api/cassettes/TestAwsIntegration.test_delete.yaml new file mode 100644 index 000000000..9e9448aa5 --- /dev/null +++ b/tests/integration/api/cassettes/TestAwsIntegration.test_delete.yaml @@ -0,0 +1,558 @@ +interactions: +- request: + body: '{"account_id": "123456789101", "role_name": "DatadogApiTestRole"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: !!binary | + H4sIAAAAAAAAA6tWSq0oSS3KS8yJz0xRslIyTrQwNkozNzMxSDE1MUkzSEpJsjQ2S0xLTUlNNjc0 + TlWqBQBMEuCiMgAAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:21:23 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:21:22 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - aiFdvD+ESSGWQuLeXGShIAaySBrTSq6aZf+crfPnDVFrMRUU9f0HobLUCBopvakz + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789101", "role_name": "DatadogApiTestRole"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:21:25 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:21:24 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - 0pmBjL5vG2A5IkxC4OBtwgn929khTZGgUquRW20JC77zchR4jTrHgra/pB22jP66 + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789101", "role_name": "DatadogApiTestRole"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:21:26 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:21:25 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - dPTJBBDv5jeY1gnH1FisDpda5Hi0boOGbsHxIOi4qkMt+QLOH7F7P7MeSr40vXZ0 + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789101", "role_name": "DatadogApiTestRolo"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:21:27 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:21:26 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - qA85Kiicwd/s93AfT3MSf+l6IYc5FQ6tEbp4Kft/ri41UOumJ967MPQKmz3gwejd + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789102", "role_name": "DatadogApiTestRole"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:21:29 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:21:28 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - 2VXDwI2pcuhRZeQ6xt/fJh1koMYSfGcgQg5wAzgLqeh10Zf5/W946U7T5w6SEIhy + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789102", "role_name": "DatadogApiTestRolo"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:21:30 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:21:29 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - o8MFmk+4Ge4vq85ax+5C1nfQs0lbtaPPYZrpqzeG6IsYGNLGMu/G7PbJElpjPS5i + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789103", "role_name": "DatadogApiTestRole"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:21:31 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:21:30 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - 562ySu37xnxKxbTr0NFd7oH3+L3JO3D7GcG/Lb1Dr0vgKuyocJBk1SrO7ogLRZuZ + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789103", "role_name": "DatadogApiTestRolo"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:21:33 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:21:31 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - IWbeot5NPPjwzkLRJwJSrhKxooUYWPiItYmeOu7MvfpEU9kI8879nM2EukYnEnom + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789104", "role_name": "DatadogApiTestRole"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:21:33 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:21:33 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - l+fZq7vW9gg1qInAzXkJZdt8f8e/094RDnN9pOEIlkXbx1jb6kpjgt1+syYCZyFC + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789104", "role_name": "DatadogApiTestRolo"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:21:35 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:21:34 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - 7vC9CD2UnUYbC7cu05B95RgDyGt2vcRq8GQJgBahx4BAPKzA8OvLqEF8NdaLccla + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/api/cassettes/TestAwsIntegration.test_generate_new_external_id.yaml b/tests/integration/api/cassettes/TestAwsIntegration.test_generate_new_external_id.yaml new file mode 100644 index 000000000..ab56e76e2 --- /dev/null +++ b/tests/integration/api/cassettes/TestAwsIntegration.test_generate_new_external_id.yaml @@ -0,0 +1,564 @@ +interactions: +- request: + body: '{"account_id": "123456789102", "role_name": "DatadogApiTestRole"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: !!binary | + H4sIAAAAAAAAA6tWSq0oSS3KS8yJz0xRslIyMzc1szBMNjKytDAzsUxJtbAwTjK1NEhOtjQ2NTdP + MVGqBQDXOdMAMgAAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:21:37 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:21:35 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - ErnxdXQi+pjNvJU00qnaaPgTN904IR+BI4NeCvSijs0uGcTaVMOpPOuObFW+gkC6 + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789102", "role_name": "DatadogApiTestRole"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: PUT + uri: https://api.datadoghq.com/api/v1/integration/aws/generate_new_external_id + response: + body: + string: !!binary | + H4sIAAAAAAAAA6tWSq0oSS3KS8yJz0xRslIyMDA0MDIxSzEwSTQysUwzTkxMTU1MtkgxMbM0MLE0 + t1SqBQBWO2rnMgAAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:21:39 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:21:37 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - HTCsbjwqQM0jTFHFq9ukWObBv4f/yxvHIxzrANPhzJkr6s3+rN5uCN3TcZuK2V2B + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789101", "role_name": "DatadogApiTestRole"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:21:40 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:21:39 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - tC9U8NM0Z/kvHE2H14k1U3spiHbXz72rh5O3tOqhQhVPC3nlvooY3TRyCaI/gIdt + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789101", "role_name": "DatadogApiTestRolo"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:21:40 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:21:40 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - i90G6k4M6qI4UypyvMoczcO5m+jatiEQSMeHpdjycp0h4nWxRpKUHr6efynkbQs+ + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789102", "role_name": "DatadogApiTestRole"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:21:41 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:21:40 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - u2B27rQjtu8TuEjzrroc8ae3xeJMLmsxU6SiAszW1tH+EI3X0cOP819eGNRqlxzl + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789102", "role_name": "DatadogApiTestRolo"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:21:42 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:21:41 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - GAK1J4mJd/EBZfEK4rqUw9OeB9GOeKgSyrXGtzNUi5zrv5sHYU56xJgA4bcbtgUA + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789103", "role_name": "DatadogApiTestRole"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:21:42 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:21:42 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - KKQq2SiaDLpychKSp47ffvU6SRxUV+VzBWr187ESkULBuGOI+kREfb/2NCy8DAWC + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789103", "role_name": "DatadogApiTestRolo"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:21:44 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:21:42 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - oiF9oLqSEnBWpAh9z89c+Ruy9xKAqrdZzQPjGsNOxlGQNWaw3sCTSoKaMkMdPunL + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789104", "role_name": "DatadogApiTestRole"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:21:44 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:21:44 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - 6qTaw+brNWWnKD6ULH8747/TVkPK0wedRsruOmMITJcYBkJ/Eac9bUO9jP1Btfl5 + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789104", "role_name": "DatadogApiTestRolo"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:21:46 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:21:44 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - pNNj5PhODCVJlRBPEhZP3s9KL9kvFYv//TnGsiPp+3AqL7R5kIW2JlCWtfMcXeFn + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/api/cassettes/TestAwsIntegration.test_list.yaml b/tests/integration/api/cassettes/TestAwsIntegration.test_list.yaml new file mode 100644 index 000000000..0f72251e3 --- /dev/null +++ b/tests/integration/api/cassettes/TestAwsIntegration.test_list.yaml @@ -0,0 +1,511 @@ +interactions: +- request: + body: '{"account_id": "123456789101", "role_name": "DatadogApiTestRole"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + User-Agent: + - datadogpy/0.42.0 (python 3.9.7; os darwin; arch x86_64) + method: POST + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: !!binary | + H4sIAAAAAAAAA6tWSq0oSS3KS8yJz0xRslIyNTRJMjWwsExNMTYxMTe2TDJLMjFINDA1Tk0zSTQx + M1WqBQBYGP1WMgAAAA== + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Tue, 19 Oct 2021 12:37:36 GMT + Transfer-Encoding: + - chunked + cache-control: + - no-cache + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + pragma: + - no-cache + strict-transport-security: + - max-age=15724800; + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789102", "role_name": "DatadogApiTestRole"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + User-Agent: + - datadogpy/0.42.0 (python 3.9.7; os darwin; arch x86_64) + method: POST + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: !!binary | + H4sIAAAAAAAAA6tWSq0oSS3KS8yJz0xRslIySrKwNE02STEzMrAwMUg0TkxNM062SE60sEw0Tk4z + NlKqBQByWnuxMgAAAA== + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Tue, 19 Oct 2021 12:37:37 GMT + Transfer-Encoding: + - chunked + cache-control: + - no-cache + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + pragma: + - no-cache + strict-transport-security: + - max-age=15724800; + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - datadogpy/0.42.0 (python 3.9.7; os darwin; arch x86_64) + method: GET + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: !!binary | + H4sIAAAAAAAAA+2YbU/bMBDHvwry2yUSbZrS5NU6QBMTYmKgoWlCkXGuweDaXexQHsR33zkOog8B + EmDdJnhVx67t8//Ov7vkhlDGVCGNJvHPG8L0ZJzkoFWRM0iYEgKY4UomIOmJgJTEIyo0eATyXOV2 + zjG2L5koUkhxYob/rXpHXBjIE0OzqgM3Aq2Tc7hKOC5EwjDsd9bXw37YiSJsEO/OlkRPgPERZ4mk + Y9ATirbkhQBc6ObWI2MwOWe61jyTF2jdqdJmZuenD3TrtTh7rgSUluEhvtALurYpOEiz9g371/aw + P+nhWepVqdR2Cpzh5OQsupTctzNmrSblmAF3jviCigIIan03/xGFCJ3wJKMGpvSq8lcj0WZd+rRi + Hpl3MEmVNJsuYGJrPDl+tqh2uk+PDnZVpodpusu12S7jbyjTLRBg4BCF8YViVPidftBb74ZR0G8h + +q+94NPIFHWiH9IsVhJwyJoR66IM2+rxgcHnOMZG6l/xi/eartqihqYqGx4d7EgDWU4tLexFaOaM + 0o+flW9/N0+Bne/S8UlKh/pKMufZXhR0uot+cqFXuck9mKlqej/URE9Vfo40WZUP7D3G0MFj2FaM + ze4LrkenG/Q+2oUQBn7qHODze/l9C6h6/UmhfaCoeQfHsT21+qMt92BxaA6iQS9CQPfCJTAhvysc + eQTbXfcwt8ZjcJoXH/ctjEo0XmQusxXS6v4UL8RUBhIw6mEPptuXmPAkFTvpAp6CQTcIbRA3zQkf + fnzd39g5WQj7ZpRZAVOeL9mfxkWp9OAdF7MlSutsGkRB2A9ahGuwLwE5vSi73fiBhOmS6wODzeJ8 + vsxZFcmXqpxXzab11aRldVNyXBsYfC/p/l5NPlqit8GxE3WpCnnLJXp9pLaBhhO1Fhpv9b2nXtQ2 + L5NO1KWa7S1H6l3JMeG2Kmv+amKr7LC/McBPE4s4bZag/otC7AWqLPLwH1Ll+PY3oxQStNgSAAA= + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Tue, 19 Oct 2021 12:37:37 GMT + Transfer-Encoding: + - chunked + cache-control: + - no-cache + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + pragma: + - no-cache + strict-transport-security: + - max-age=15724800; + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789101", "role_name": "DatadogApiTestRole"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + User-Agent: + - datadogpy/0.42.0 (python 3.9.7; os darwin; arch x86_64) + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Connection: + - keep-alive + Content-Length: + - '2' + Content-Type: + - application/json + Date: + - Tue, 19 Oct 2021 12:37:38 GMT + cache-control: + - no-cache + content-security-policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + pragma: + - no-cache + strict-transport-security: + - max-age=15724800; + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789101", "role_name": "DatadogApiTestRolo"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + User-Agent: + - datadogpy/0.42.0 (python 3.9.7; os darwin; arch x86_64) + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{"errors": ["AWS account 123456789101 does not exist in integration"]}' + headers: + Connection: + - keep-alive + Content-Length: + - '70' + Content-Type: + - application/json + Date: + - Tue, 19 Oct 2021 12:37:38 GMT + cache-control: + - no-cache + content-security-policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + pragma: + - no-cache + strict-transport-security: + - max-age=15724800; + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + status: + code: 400 + message: Bad Request +- request: + body: '{"account_id": "123456789102", "role_name": "DatadogApiTestRole"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + User-Agent: + - datadogpy/0.42.0 (python 3.9.7; os darwin; arch x86_64) + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Connection: + - keep-alive + Content-Length: + - '2' + Content-Type: + - application/json + Date: + - Tue, 19 Oct 2021 12:37:38 GMT + cache-control: + - no-cache + content-security-policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + pragma: + - no-cache + strict-transport-security: + - max-age=15724800; + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789102", "role_name": "DatadogApiTestRolo"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + User-Agent: + - datadogpy/0.42.0 (python 3.9.7; os darwin; arch x86_64) + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{"errors": ["AWS account 123456789102 does not exist in integration"]}' + headers: + Connection: + - keep-alive + Content-Length: + - '70' + Content-Type: + - application/json + Date: + - Tue, 19 Oct 2021 12:37:39 GMT + cache-control: + - no-cache + content-security-policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + pragma: + - no-cache + strict-transport-security: + - max-age=15724800; + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + status: + code: 400 + message: Bad Request +- request: + body: '{"account_id": "123456789103", "role_name": "DatadogApiTestRole"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + User-Agent: + - datadogpy/0.42.0 (python 3.9.7; os darwin; arch x86_64) + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{"errors": ["AWS account 123456789103 does not exist in integration"]}' + headers: + Connection: + - keep-alive + Content-Length: + - '70' + Content-Type: + - application/json + Date: + - Tue, 19 Oct 2021 12:37:39 GMT + cache-control: + - no-cache + content-security-policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + pragma: + - no-cache + strict-transport-security: + - max-age=15724800; + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + status: + code: 400 + message: Bad Request +- request: + body: '{"account_id": "123456789103", "role_name": "DatadogApiTestRolo"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + User-Agent: + - datadogpy/0.42.0 (python 3.9.7; os darwin; arch x86_64) + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{"errors": ["AWS account 123456789103 does not exist in integration"]}' + headers: + Connection: + - keep-alive + Content-Length: + - '70' + Content-Type: + - application/json + Date: + - Tue, 19 Oct 2021 12:37:39 GMT + cache-control: + - no-cache + content-security-policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + pragma: + - no-cache + strict-transport-security: + - max-age=15724800; + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + status: + code: 400 + message: Bad Request +- request: + body: '{"account_id": "123456789104", "role_name": "DatadogApiTestRole"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + User-Agent: + - datadogpy/0.42.0 (python 3.9.7; os darwin; arch x86_64) + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{"errors": ["AWS account 123456789104 does not exist in integration"]}' + headers: + Connection: + - keep-alive + Content-Length: + - '70' + Content-Type: + - application/json + Date: + - Tue, 19 Oct 2021 12:37:39 GMT + cache-control: + - no-cache + content-security-policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + pragma: + - no-cache + strict-transport-security: + - max-age=15724800; + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + status: + code: 400 + message: Bad Request +- request: + body: '{"account_id": "123456789104", "role_name": "DatadogApiTestRolo"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + User-Agent: + - datadogpy/0.42.0 (python 3.9.7; os darwin; arch x86_64) + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{"errors": ["AWS account 123456789104 does not exist in integration"]}' + headers: + Connection: + - keep-alive + Content-Length: + - '70' + Content-Type: + - application/json + Date: + - Tue, 19 Oct 2021 12:37:39 GMT + cache-control: + - no-cache + content-security-policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + pragma: + - no-cache + strict-transport-security: + - max-age=15724800; + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + status: + code: 400 + message: Bad Request +version: 1 diff --git a/tests/integration/api/cassettes/TestAwsIntegration.test_list_namespace_rules.yaml b/tests/integration/api/cassettes/TestAwsIntegration.test_list_namespace_rules.yaml new file mode 100644 index 000000000..2e311d602 --- /dev/null +++ b/tests/integration/api/cassettes/TestAwsIntegration.test_list_namespace_rules.yaml @@ -0,0 +1,566 @@ +interactions: +- request: + body: '{"account_id": "123456789102", "role_name": "DatadogApiTestRole"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: !!binary | + H4sIAAAAAAAAA6tWSq0oSS3KS8yJz0xRslIyMTNLNTCwMDaxMDE0MTYztTQyNzdJNk0zM7A0SLZI + SlGqBQCajzJAMgAAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:21:47 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:21:46 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - ucJMu0SEwqvJ36fqkYRsP+glKObktTtdBf6X17lKXJ4+xOn7nFKnx11beu1ycofn + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/integration/aws/available_namespace_rules?account_id=123456789102&role_name=DatadogApiTestRole + response: + body: + string: !!binary | + H4sIAAAAAAAAA1VTy5LcMAj8lz3nlNR+TSrlwhK2VUbIK9B45+8DeKZm9oK7W4iX8N8POMq0guIJ + 949fxg4qCbQ0npDmSxHtCPWB75wc6YYMDoa2SRJQ4dXoXOiJRl5RLzVRG3npjdVcgmzi8QIKQk/b + k52gaZvwhqzyU6O2htIyzqNQDrxy0RaICJNOaYi2OlXUXtLlzmwH7tLhpAkIevWDXLrpqT2P86W2 + NKrlzt56vjPUFhBnv4Pp92VtaBeQo3lsjFy4hCUQtRmmDV9sRmBRoP0lFV6wI6c3L+3m5Q32cIvE + NbAHXkrHrYn7r1CRyuKpVxquFJbD+mnuXaKmvTBK8YsPNAED3a200KJdgjpnf0bCb7MVc4HXSJ70 + ht0DBj0g7bB6xqAKhSJnJTdfbsSbZNC3tWI8dLDfYtSz9f2xXO0QZ17KYS/oG9izs45ZtqvBjns8 + s62kH7Sh+PnnhTpKI6vQFHFZrLoK+yXE3CwQxroIB/0Kq3hMy+DkcS+hdbv6VrSciznGmxR9/SIh + kP0xcWj7hnmCfCsScxie3763w6s9wUNEiza4qOa724/27z9YqgipeAMAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:21:48 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:21:47 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - RngFxOd8mVeT14auLfzsH/6kz142QLoKkYXZjfmXpXDkZ/eN6uoCM3cTScXuFEa0 + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789101", "role_name": "DatadogApiTestRole"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:21:49 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:21:48 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - TAg/qKywM5rz/AUGkmt8+wB4wzGMJfSiHOrBzxBctPLsV/erSD5TChi/uo5ZlVXK + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789101", "role_name": "DatadogApiTestRolo"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:21:50 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:21:49 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - SaHvyR/hQzhMjBxXmmuM76vwlwfocpgL0LhX3u6R0CFONYqUGm7Xe/7/HyTliTFX + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789102", "role_name": "DatadogApiTestRole"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:21:50 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:21:50 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - em3KoJu1XYdqq1w4EpLi4L54svjYBxZahEDJ8c5gcdIOxnNafHMdF5LLysPLuNcH + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789102", "role_name": "DatadogApiTestRolo"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:21:52 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:21:50 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - YNGrI7M9aLfuf6Npp7n/51e6xDtYCO9Rm/LB+HGbX4I6A/e7rnC+cgQxIZnsU+fj + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789103", "role_name": "DatadogApiTestRole"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:21:54 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:21:52 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - HtltRxB6FWULKbr8JD/35HKWhI+dqAFQg/rNpMbjeMOPUq5j5iWk+nIs8OwDOqUR + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789103", "role_name": "DatadogApiTestRolo"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:21:55 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:21:54 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - vYQu3ls2HKdZ2pXErBiwg/FlJyuK31hjiI+oJSqoEPPw/7mzimb2FzvWEsshbznY + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789104", "role_name": "DatadogApiTestRole"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:21:56 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:21:55 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - WatxAL43AyqgfI4tyA152NzYM3DLdjL7IWr0SzhldiWriTsbw9vUaRZnaqhOCdUk + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789104", "role_name": "DatadogApiTestRolo"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:21:58 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:21:56 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - ns395qtajdi4vImLC5PhByq3vzX3KV9r4kOaLqZ3Kb42AGxxpM06vNzB/Pdubr1b + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/api/cassettes/TestAwsIntegration.test_update.yaml b/tests/integration/api/cassettes/TestAwsIntegration.test_update.yaml new file mode 100644 index 000000000..125d20283 --- /dev/null +++ b/tests/integration/api/cassettes/TestAwsIntegration.test_update.yaml @@ -0,0 +1,616 @@ +interactions: +- request: + body: '{"account_id": "123456789102", "role_name": "DatadogApiTestRole"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: !!binary | + H4sIAAAAAAAAA6tWSq0oSS3KS8yJz0xRslIyMkxJS00zS7E0MTY1MTG1TEyyMEq2tDQzTEmxNDIw + MFaqBQCXVQ/zMgAAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Sat, 21 Mar 2020 08:44:10 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Sat, 28-Mar-2020 08:44:06 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - WatxAL43AyqgfI4tyA152NzYM3DLdjL7IWr0SzhldiWriTsbw9vUaRZnaqhOCdUk + X-DD-VERSION: + - '35.2303403' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789104", "excluded_regions": ["us-east-1", "us-west-1"], + "host_tags": ["api:test2"], "role_name": "DatadogApiTestRolo"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '141' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: PUT + uri: https://api.datadoghq.com/api/v1/integration/aws?account_id=123456789102&role_name=DatadogApiTestRole + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Sat, 21 Mar 2020 08:44:11 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Sat, 28-Mar-2020 08:44:10 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - QpzDmIoaO5Hufx014PqM5BuLw+G9k75nLqy12TEr4Iab1Fl7hIFT5DrERoBer8OF + X-DD-VERSION: + - '35.2303403' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: !!binary | + H4sIAAAAAAAAA02OywqDQAxF/yVrhWrty12hX1C6KzKEmWgHpmaYBy2I/94oCN1dyD0ndwLUmvOY + IrTPCQI7UiO+CVq4YULDw9XbB8V0Z8dQAH21y4aMCjRYHhcKciwJYyoruUv+SFtyV2xqZY3oqnrf + HI6n86XaNVJ8cUwq4bAa0Ns2CVb/U9GTtr3V657oUZMK2ZEA0yxDQuCwwPKnty5R2Gzd3M0/Jtmz + DdcAAAA= + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Sat, 21 Mar 2020 08:44:12 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Sat, 28-Mar-2020 08:44:11 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - EbXB0e7cF4uDRViRvI+w6qPg1YzykoJqZiw5SbqL/81VRQW4a286h09eTGyIVvXJ + X-DD-VERSION: + - '35.2303403' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789101", "role_name": "DatadogApiTestRole"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Sat, 21 Mar 2020 08:44:13 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Sat, 28-Mar-2020 08:44:12 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - IkXBg4ZNMRmDsobzMjEa2v35+NuPiQI0gFmho/o6e7+hfyyJl3rjuklsE4uVJo7l + X-DD-VERSION: + - '35.2303403' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789101", "role_name": "DatadogApiTestRolo"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Sat, 21 Mar 2020 08:44:14 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Sat, 28-Mar-2020 08:44:14 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - RngFxOd8mVeT14auLfzsH/6kz142QLoKkYXZjfmXpXDkZ/eN6uoCM3cTScXuFEa0 + X-DD-VERSION: + - '35.2303403' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789102", "role_name": "DatadogApiTestRole"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Sat, 21 Mar 2020 08:44:16 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Sat, 28-Mar-2020 08:44:15 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - EbXB0e7cF4uDRViRvI+w6qPg1YzykoJqZiw5SbqL/81VRQW4a286h09eTGyIVvXJ + X-DD-VERSION: + - '35.2303403' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789102", "role_name": "DatadogApiTestRolo"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Sat, 21 Mar 2020 08:44:17 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Sat, 28-Mar-2020 08:44:16 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - NueLa2zkdBcl9S7BHrRuWyjAeR9iWgPFe330KTY6Cp0/yUhjUktbxu5rG2fG6gBk + X-DD-VERSION: + - '35.2303403' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789103", "role_name": "DatadogApiTestRole"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Sat, 21 Mar 2020 08:44:18 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Sat, 28-Mar-2020 08:44:17 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - u9VEJv4YNx+Fl9tRGJNbGm0+76jyym0t+mec2t84PhoJYEedil3ajyEhP7U3EneZ + X-DD-VERSION: + - '35.2303403' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789103", "role_name": "DatadogApiTestRolo"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Sat, 21 Mar 2020 08:44:18 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Sat, 28-Mar-2020 08:44:18 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - Z91NUpPIZnIQ9h7lBFWBkEPGVUEsn4/i71imPPwrChu4RPI5uNM5HGuodISK1HBR + X-DD-VERSION: + - '35.2303403' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789104", "role_name": "DatadogApiTestRole"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Sat, 21 Mar 2020 08:44:20 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Sat, 28-Mar-2020 08:44:18 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - +6muH0vWWhHE6JfE/xHkdpoFSNgX/+wCvqEMuEDvglDKir3htwvCDYdHi0bPaPF0 + X-DD-VERSION: + - '35.2303403' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789104", "role_name": "DatadogApiTestRolo"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Sat, 21 Mar 2020 08:44:22 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Sat, 28-Mar-2020 08:44:20 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - WyM4veckZw3QTGGZ+Ro8psXMR12RERTyuAWc4KNrn9Mfk0tQy+xf5Ofi04GlB+uh + X-DD-VERSION: + - '35.2303403' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/api/cassettes/TestAwsLogsIntegration.test_aws_logs_crud.yaml b/tests/integration/api/cassettes/TestAwsLogsIntegration.test_aws_logs_crud.yaml new file mode 100644 index 000000000..d017a5010 --- /dev/null +++ b/tests/integration/api/cassettes/TestAwsLogsIntegration.test_aws_logs_crud.yaml @@ -0,0 +1,349 @@ +interactions: +- request: + body: '{"account_id": "123456789101", "role_name": "DatadogApiTestRole"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: !!binary | + H4sIAAAAAAAAA6tWSq0oSS3KS8yJz0xRslIyT0sxTUs2TLNMSTIzMTCwsEyzMEkzSTYxMTAxSDJI + slCqBQDebMPCMgAAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:22:17 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:22:15 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - KKdI9UAf8fC5q7osIllxNui0A1CUm45w7mZBz+tu6Vlp/ga+Q6ZXvY0JoJlUBVi+ + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789101", "lambda_arn": "arn:aws:lambda:us-east-1:123456789101:function:APITest"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '102' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/integration/aws/logs + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:22:17 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:22:17 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - GAK1J4mJd/EBZfEK4rqUw9OeB9GOeKgSyrXGtzNUi5zrv5sHYU56xJgA4bcbtgUA + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789101", "services": ["s3", "elb", "elbv2", "cloudfront", + "redshift", "lambda"]}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '102' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/integration/aws/logs/services + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:22:18 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:22:17 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - ty7T8eIeXOfZhM7KDN5nGo8JS7ZSIWAqBNFeZshTg3LLDJJa7mPU5wqGt0nOPCpy + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/integration/aws/logs + response: + body: + string: !!binary | + H4sIAAAAAAAAA9WYTY/aQAyG/8ucWWnGHzOe3Koeqt566G2FUBZCmxVNJBLoYbX/vQ5ZVQ3HCo/g + wocEePBjv689z29uaI7ndtsMrnp2A7qVaw4v8+MZ9Hl76E+7/bHvRn1zbHbDz3Y/vTzUv152tVuv + XL3d9qdu3LQ7V7nX+lxvcvAYJf/91PTb6/fVrWMFQOKYRMOFRaw3Vx87PYw+VvXvoZrPWp2Gp6Ye + xqdQ/fvNan/qtmPbd9Wnb1+/N8Po3g3OesmLhxQ8k3VeZgaeGEO0jvWj30Qm9ET5Jgy+9J8PbdON + ZiD0wAFTwsDZPDkzCMCM6K1BfMSKjMxFYgViQDZv8kuBZcDIacnrP5u8SIEJQww5mCfnAh2RxNOy + +wwUTEFQggw5h3QLtS0BIkkkpBAmI5tNwMiKLiAiMgS/TI4BiBl6iNGjOfT5f3GUUKiYiSkne16T + C0SIU98s7fiOVUU8agMqCeti1uQkJkopXanKHSeHUxagiGKdnA/J1ZHT3mcVBMaYVFXgYbxPS1TL + BnjaJOwlV9uXEMpILiGqpZcZrlAEfJHpXUdTXaIoPoynS2BS75Mync4xig66RYqZJaFQmcbhIMzJ + vJhVwXQEE69WsszhHVtJlsyCutsUgS7T8O7NoSsIoGktgbhcRe8YBGFAj3y1blhN1MygVwLmVqIg + fAYWofQ4IBgJ1dfLXJiIGp+AubxfpElj6ZXcsvvuuSOCKpNetZW5TRJVQYFraVr/Ac7MdYk0FgAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:22:18 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:22:18 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - Dpx7DG2N4VEOVm8I4W97n4HwOzBXFJSj1QrKca/nHpAZ6o7/LrJ0o2qyQx0XjNXl + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789101", "lambda_arn": "arn:aws:lambda:us-east-1:123456789101:function:APITest"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '102' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws/logs + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:22:18 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:22:18 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - aq7EAvMMXGdldXT5eVhOcqdveqp5VDY6MoO0A/xKTuSa7v4Cc6HWT9iWUnYD+m1F + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789101", "role_name": "DatadogApiTestRole"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:22:19 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:22:18 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - svhoihUM58m7WJ4Z4lY5tmaXf/MnplHzAbMByuVznFW8yf3JIFAZgW/pCvMnq4iN + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/api/cassettes/TestAwsLogsIntegration.test_check_lambda.yaml b/tests/integration/api/cassettes/TestAwsLogsIntegration.test_check_lambda.yaml new file mode 100644 index 000000000..14ab1a8e0 --- /dev/null +++ b/tests/integration/api/cassettes/TestAwsLogsIntegration.test_check_lambda.yaml @@ -0,0 +1,178 @@ +interactions: +- request: + body: '{"account_id": "123456789101", "role_name": "DatadogApiTestRole"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: !!binary | + H4sIAAAAAAAAA6tWSq0oSS3KS8yJz0xRslIysTBNNUtJSjOzTDY3MTNMsjBLTTY0MDIwNzI1Mkox + NFGqBQAga/w2MgAAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:22:21 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:22:20 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - JEThRmJp6qTNp8pxXQqPpRD40l23OvSASz6GutTWG+aCw+n9cF/5KqfPSziGHWsU + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789101", "lambda_arn": "arn:aws:lambda:us-east-1:123456789101:function:APITest"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '102' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/integration/aws/logs/check_async + response: + body: + string: !!binary | + H4sIAAAAAAAAA6tWKi5JLCktVrJSSi5KTSxJTVGqBQBFnVTEFAAAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:22:22 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:22:21 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - oiF9oLqSEnBWpAh9z89c+Ruy9xKAqrdZzQPjGsNOxlGQNWaw3sCTSoKaMkMdPunL + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789101", "role_name": "DatadogApiTestRole"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:22:23 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:22:22 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - KHJbOoqp3I4BOBzIFnc/Ois3eg3Rjmudy0YalRpnXQEDXDoppykpDMDaJPIufi9t + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/api/cassettes/TestAwsLogsIntegration.test_check_services.yaml b/tests/integration/api/cassettes/TestAwsLogsIntegration.test_check_services.yaml new file mode 100644 index 000000000..dc83e0bc8 --- /dev/null +++ b/tests/integration/api/cassettes/TestAwsLogsIntegration.test_check_services.yaml @@ -0,0 +1,179 @@ +interactions: +- request: + body: '{"account_id": "123456789101", "role_name": "DatadogApiTestRole"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: !!binary | + H4sIAAAAAAAAA6tWSq0oSS3KS8yJz0xRslIyNE6zMEw0T0pOtTAxMUg2skw2Skm1NE81M06xsDBL + S1KqBQC3+lBuMgAAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:22:24 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:22:24 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - pdoE+6MDHF3ASB0y6TdRZq9HO/3uAKc0XPY7EkyNyaFqpXeeAHwz2Ce2QWIN714X + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789101", "services": ["s3", "elb", "elbv2", "cloudfront", + "redshift", "lambda"]}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '102' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/integration/aws/logs/services_async + response: + body: + string: !!binary | + H4sIAAAAAAAAA6tWKi5JLCktVrJSSi5KTSxJTVGqBQBFnVTEFAAAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:22:24 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:22:24 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - j0VNx9cZdAj+uuO7pabHlao4Ioc5q8ovvp4Ja/NYzbHA51zSBYXNvtO+8cOYbE0B + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789101", "role_name": "DatadogApiTestRole"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:22:25 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:22:25 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - FGm8mbL/ixNS/zyX94m5xaWAxszhu9w68KL0QwTbLNqYgp2ZyX2W4rsoYLDoadr+ + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/api/cassettes/TestAwsLogsIntegration.test_list_log_services.yaml b/tests/integration/api/cassettes/TestAwsLogsIntegration.test_list_log_services.yaml new file mode 100644 index 000000000..b34f86614 --- /dev/null +++ b/tests/integration/api/cassettes/TestAwsLogsIntegration.test_list_log_services.yaml @@ -0,0 +1,176 @@ +interactions: +- request: + body: '{"account_id": "123456789101", "role_name": "DatadogApiTestRole"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: !!binary | + H4sIAAAAAAAAA6tWSq0oSS3KS8yJz0xRslIyMjM1Mk02TExOTE00MUsxtUiyNDJIszA3N0xNtLQ0 + s1SqBQBFxg7mMgAAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:22:13 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:22:12 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - J5PL0LnJukdy69mckjXi3cjye/YJX2hkoCBkqKQi+tYjrsXYELx6DfDD11fhyjYF + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/integration/aws/logs/services + response: + body: + string: !!binary | + H4sIAAAAAAAAA4uuVspMUbJSKjZW0lHKSUxKzQFygo0VHJOTU4uLFXzy04uVanWgilJzkpBUOeck + FhdnJiu4+jjhVF5mhKTBsaAgJzM5sSQzPw+3puSc/NKUtKL8vBIUq4CCbiBB7DYVpaYUZ2SmIWsJ + ggqheSEnMTcpJRHJaB+wgIIzyNryxJLkDKiGWADbwDdPGgEAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:22:13 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:22:13 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - OxP+mFpjAbASiVhNf+t4MttAs95ZlMiGosIRnYJJGFoApNgv2oxtdzpnmNlMOki6 + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"account_id": "123456789101", "role_name": "DatadogApiTestRole"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '65' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/aws + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:22:15 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:22:14 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - Lo9psmCk9egobltaxBGqrQFhgCcgUTQoFZpr2xiSR+6tucB/owychJvFjr9YMWzu + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/api/cassettes/TestAzureIntegration.test_azure_crud.yaml b/tests/integration/api/cassettes/TestAzureIntegration.test_azure_crud.yaml new file mode 100644 index 000000000..d2ea4990b --- /dev/null +++ b/tests/integration/api/cassettes/TestAzureIntegration.test_azure_crud.yaml @@ -0,0 +1,581 @@ +interactions: +- request: + body: '{"client_id": "testc7f6-1234-5678-9101-3fcbf464test", "client_secret": + "testingx./Sw*g/Y33t..R1cH+hScMDt", "host_filters": "api:test", "tenant_name": + "testc44-1234-5678-9101-cc00736ftest"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '188' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/integration/azure + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:22:26 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:22:26 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - aJ6GOq3zw1bWl+5n1TKdeAvWSB1g5Zer85qbkQ07UFNZhgfVh/zeqVhNb8FjtbN9 + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/integration/azure + response: + body: + string: !!binary | + H4sIAAAAAAAAA5VRS27CMBC9ijXrREqIm5Ss2XAGhKLBn2LJ2MGeIKGKu9dN2k0IAlYej+d9PG/3 + DaQcOuocnhS06RYJMlAh+BCh3cEGCaX/YsIPVjLniZ0HFa5sgqUjEkMnmbBGOWLbzdhqmXEXtCb1 + g5LpwaCNsM/g6CN12lhSv/RwNH3vk96E7oz8t3DLlpwJzvNyVfH8o24+83VZlLkQRdFUtZ4bv9fC + 3rR/U3M50eh6TlxpcdC85iPkTTtDL5HUu3t8+LuJbmHNj31PkFdjGCPT3mdjccCwEMkzLbjtfwBF + xKCeTgIAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:22:27 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:22:27 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - nL/U8Nu7782wU68M7elx8MY/T+2opB0U5/flvjGsH/qXfYEORYWxwdDpQFq78Mxt + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"client_id": "testc7f6-1234-5678-9101-3fcbf464test", "host_filters": "api:test2", + "tenant_name": "testc44-1234-5678-9101-cc00736ftest"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '136' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/integration/azure/host_filters + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:22:28 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:22:28 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - FGm8mbL/ixNS/zyX94m5xaWAxszhu9w68KL0QwTbLNqYgp2ZyX2W4rsoYLDoadr+ + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/integration/azure + response: + body: + string: !!binary | + H4sIAAAAAAAAA5VRzY6CMBB+lWbOkIB0YZczl30GY8jYn7VJt8V22MQY330r6EUxkVPbmX4/8832 + DKQcOuod/ipo0ysSZKBC8CFCu4UOCaX/YcKPVjLniR1HFU5shqUjEkMnmbBGOWLf3VRqmXF/aE2q + ByVTw6CNsMvg4CP12lhSV3o4mGHwSW9G90beLVyyJWeC87zcVDz/qJvP/KssylyIomiqWj8af9bC + wbTXX5sFPdHo+pG50mKvec0n5pV+xkEiqbVBvhxvplvI+bXvGfLuHqadae+z6bLHsCqj27SX3T9R + Ef3STwIAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:22:28 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:22:28 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - x4m73yTAj65OpCjnvpw3RBJyiFQpkDOBZ7rE/UM6Q4o0837nUb4ZsWFNJUD0Xh0e + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"client_id": "testc7f6-1234-5678-9101-3fcbf464test", "client_secret": + "testingx./Sw*g/Y33t..R1cH+hScMDt", "host_filters": "api:test3", "new_client_id": + "abcd1234-5678-1234-5678-1234abcd5678", "new_tenant_name": "1234abcd-1234-5678-9101-abcd1234abcd", + "tenant_name": "testc44-1234-5678-9101-cc00736ftest"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '305' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: PUT + uri: https://api.datadoghq.com/api/v1/integration/azure + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:22:29 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:22:28 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - ADT0ms9dQnbDHbbduv4c09ChngZrYY7A/Pgms/qacMOruS4mPwZ1GJWq74I7G11W + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/integration/azure + response: + body: + string: !!binary | + H4sIAAAAAAAAA5WQ0UrEMBBFfyXMcwutrVvt8774DcuyTJOJG4hJTaaCyP67Seui1MLqU8JNOPfM + HD6AyaHjk8MXgh7qu6bFQaoyX8r7XfdQPtZVXebs+gYFUAg+ROgPxwLOPvJJG8uUE8DR9EyRm/RN + WkOJbVTOvwgL9Bt/peYuuBRroYz6WQh7ZFT+WUg/WSWcZ/E6UXgXyxzpiCzQKbF0i6f9HPXCuDe0 + JuWBVJIyaCP8tj+bcfQr81lh20y27XpTUlZV10yjQqb/it/AbcwlO71bGzRaDrpdDP4697wj7X0x + XwYMGzu41QWX4ydx26vYUAIAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:22:29 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:22:29 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - em3KoJu1XYdqq1w4EpLi4L54svjYBxZahEDJ8c5gcdIOxnNafHMdF5LLysPLuNcH + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"client_id": "abcd1234-5678-1234-5678-1234abcd5678", "tenant_name": "1234abcd-1234-5678-9101-abcd1234abcd"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '108' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/azure + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:22:29 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:22:29 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - x4pYHtiOW9rUeREgXmH2iIgBaXVGD7x1RIZUg56H0ghPppdtz0ZBEK6nMs8tuoqc + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/integration/azure + response: + body: + string: !!binary | + H4sIAAAAAAAAA5WQywrCMBBFfyXMuoXWVqtdd+M3iJRpHhqISU2mgkj/3djiRgrqal7cuWfm8ACS + Fi21Fi8S6lgFggSk984HqA/QIKFwJ8bdYASzjth1kP7OZlkMgRhawbjR0hLbN1OrZtre0OjY91LE + gUYT4JjA2QVqlTYkX+vhrPveRb9Z3WrxRhiTJTJelmm+Ksp0vam26S7P8pTzLKuKoRdI8l/wL+sW + 7uKV2nwSFIp3qpwJfr17+pFyLpmSDv3CD755wXh8AvAFORW/AQAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:22:30 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:22:29 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - Wn01ZjXucAfzJfwvKAkpy0yFfNtHyWu4ZB2aA4ZDwwhXkyLHirYeUNsx208dZz9p + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/integration/azure + response: + body: + string: !!binary | + H4sIAAAAAAAAA5WQywrCMBBFfyXMuoXWVqtdd+M3iJRpHhqISU2mgkj/3djiRgrqal7cuWfm8ACS + Fi21Fi8S6lgFggSk984HqA/QIKFwJ8bdYASzjth1kP7OZlkMgRhawbjR0hLbN1OrZtre0OjY91LE + gUYT4JjA2QVqlTYkX+vhrPveRb9Z3WrxRhiTJTJelmm+Ksp0vam26S7P8pTzLKuKoRdI8l/wL+sW + 7uKV2nwSFIp3qpwJfr17+pFyLpmSDv3CD755wXh8AvAFORW/AQAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:22:31 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:22:30 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - e8t0cvW5uVKXk1zUsTcAcDpqv28dgy+lCs/R2sCfbKW6stomFiq2a4ijzxRdPBn5 + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"client_id": "abcd1234-5678-1234-5678-1234abcd5678", "tenant_name": "1234abcd-1234-5678-9101-abcd1234abcd"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '108' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/azure + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:22:31 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:22:31 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - pEDVi2191MvoIMwusdL+COAxndBmcRhJtxAtWxDDnECWDI8Z99hIoBZbpR57tJKz + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/api/cassettes/TestDatadog.test_comments.frozen b/tests/integration/api/cassettes/TestDatadog.test_comments.frozen new file mode 100644 index 000000000..70b2995a0 --- /dev/null +++ b/tests/integration/api/cassettes/TestDatadog.test_comments.frozen @@ -0,0 +1 @@ +2020-02-03T17:17:55.650033+01:00 \ No newline at end of file diff --git a/tests/integration/api/cassettes/TestDatadog.test_comments.handle b/tests/integration/api/cassettes/TestDatadog.test_comments.handle new file mode 100644 index 000000000..ad48e494d --- /dev/null +++ b/tests/integration/api/cassettes/TestDatadog.test_comments.handle @@ -0,0 +1 @@ +frog@datadoghq.com \ No newline at end of file diff --git a/tests/integration/api/cassettes/TestDatadog.test_comments.yaml b/tests/integration/api/cassettes/TestDatadog.test_comments.yaml new file mode 100644 index 000000000..faf9d773b --- /dev/null +++ b/tests/integration/api/cassettes/TestDatadog.test_comments.yaml @@ -0,0 +1,249 @@ +interactions: +- request: + body: '{"handle": "frog@datadoghq.com", "message": "test message 1580746675"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '70' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/comments + response: + body: + string: !!binary | + H4sIAAAAAAAAA22MTQ6DIBhEr2K+dVOw8idJ016FCEUSEQvoxnj3QurSzSSTmfd2GIL3Zs4gd1jj + BBKQ2Ur/58vpJ+1wz1v+IB1nuBcEMwE3iCaFNQ6mAmpxaGvRaUromvAmJWUrkE3KzVmblgrMCWOc + FuuoZj3VyycG+9YqKx3s+L0XdVmdBnnhPo4f5qc00sYAAAA= + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:17:57 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:17:56 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - cYNsy3QDuOaYo2clO/PharSNtCykS9KtUfiNevH3xDbHJlRyddWkNpuDhMgHWZ43 + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/events/5309717243760984068 + response: + body: + string: !!binary | + H4sIAAAAAAAAA22Q0WrDMAxFf6XoOSzJmthpYGz/MUYQsZYYHNuT7bBS+u+zF+hTXyTQvdI96Aa0 + k40w3kBhpGlF78mSgrHth0Z2QkhRARriOMWrJxghBeIp+WKHCla0ypTxN7vlIw9RuWX9eZndltWo + YxFtMqaCxCb76v/Ao75r9dafm4ts5Wt3lqK5DF0jhrJIvxkqtxBPG4WAC50eRH0x4BJg/PyqQNGu + Z5osbo8kz9qxjtd8wTre0BRQF/LFg4QpuMRzwa7R63pvD55QP6fR+R9PlPv9D/122zo/AQAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb_events + Date: + - Mon, 03 Feb 2020 16:18:08 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:18:08 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - ScjkFceH9E2GKCW3FjsSaMWoS/Nv1kpTUxp6tNTxcg29DsjAT+Dwa5GhuxoslYvd + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"handle": "frog@datadoghq.com", "message": "test message 1580746675 updated"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '78' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: PUT + uri: https://api.datadoghq.com/api/v1/comments/5309717243760984068 + response: + body: + string: !!binary | + H4sIAAAAAAAAA22NzQ6DIBCEX4XsuSlYFZCkaV+FyBZJVCw/XozvXkw9eplkZveb2aD304RzArVB + DiMooLgW/9eXM8+2Zp2oxKOpBWedbBiXcIOA0efQ4wHoxdG1omdTpNfEhDFqewAJYyKnJVUrmWg4 + Fy3Ji9EJTWkf9GzG4/UTvH2XVBtvh++9TJSrM6AuNvb9B6allBzOAAAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:18:09 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:18:09 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - 562ySu37xnxKxbTr0NFd7oH3+L3JO3D7GcG/Lb1Dr0vgKuyocJBk1SrO7ogLRZuZ + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"handle": "frog@datadoghq.com", "message": "test message 1580746675 reply", + "related_event_id": 5309717243760984068}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '117' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/comments + response: + body: + string: !!binary | + H4sIAAAAAAAAA22OywqDMBBFf0WyLk2iMQ+htH8iwUxViI1NolDEf2/SunQzMNwz98yGOjdN8Iqo + 2ZCH4BbfAWoQ1vOIV4qPNOC6IkpQwYQsBZFKUqHQBS3eZhjW1PCf99HcztlBv4zN3U/v+ofRURvX + D+9rUqSmCULQfY4jhFgca0FrSQTjXNSFh9l+EunB6gim/Unb0aSTQ1iySnCiJCNcJjBHJ6/s+xdA + 4EHH9QAAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:18:10 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:18:10 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - pNNj5PhODCVJlRBPEhZP3s9KL9kvFYv//TnGsiPp+3AqL7R5kIW2JlCWtfMcXeFn + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/api/cassettes/TestDatadog.test_distribution_metrics.frozen b/tests/integration/api/cassettes/TestDatadog.test_distribution_metrics.frozen new file mode 100644 index 000000000..0f2f78d24 --- /dev/null +++ b/tests/integration/api/cassettes/TestDatadog.test_distribution_metrics.frozen @@ -0,0 +1 @@ +2020-02-03T17:19:50.641419+01:00 \ No newline at end of file diff --git a/tests/integration/api/cassettes/TestDatadog.test_distribution_metrics.yaml b/tests/integration/api/cassettes/TestDatadog.test_distribution_metrics.yaml new file mode 100644 index 000000000..ec2c41818 --- /dev/null +++ b/tests/integration/api/cassettes/TestDatadog.test_distribution_metrics.yaml @@ -0,0 +1,47 @@ +interactions: +- request: + body: '{"series": [{"host": "test.host.1580746790", "metric": "test.distribution_metric.1580746790", + "points": [[1580746730, [1.0]]], "type": "distribution"}]}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '152' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/distribution_points + response: + body: + string: '{"status": "ok"}' + headers: + Connection: + - keep-alive + Content-Length: + - '16' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - text/json + DD-POOL: + - propjoe + Date: + - Mon, 03 Feb 2020 16:19:51 GMT + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - SAMEORIGIN + status: + code: 202 + message: Accepted +version: 1 diff --git a/tests/integration/api/cassettes/TestDatadog.test_downtime.frozen b/tests/integration/api/cassettes/TestDatadog.test_downtime.frozen new file mode 100644 index 000000000..819dda84d --- /dev/null +++ b/tests/integration/api/cassettes/TestDatadog.test_downtime.frozen @@ -0,0 +1 @@ +2020-02-03T17:20:36.586556+01:00 \ No newline at end of file diff --git a/tests/integration/api/cassettes/TestDatadog.test_downtime.yaml b/tests/integration/api/cassettes/TestDatadog.test_downtime.yaml new file mode 100644 index 000000000..c2b60822e --- /dev/null +++ b/tests/integration/api/cassettes/TestDatadog.test_downtime.yaml @@ -0,0 +1,302 @@ +interactions: +- request: + body: '{"end": 1580747836, "scope": "test_tag:1", "start": 1580746836}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '63' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/downtime + response: + body: + string: !!binary | + H4sIAAAAAAAAAz2PsW7DMAxE/4VjwSGyk0j12l9op6AwGJk1BNiSIdEp2qL/HtFGMt7hju/4B5n9 + mjNHz9DFdZoQOA7QmZM72KN17RlhTjFIyr3QWKC7wAt8IniqlYlrdG89QuHppDz2qtrGONMiDKHQ + dWt80VQYoQhleaDOG8pnJkVpz7T21RmDsFDdJ5u3syTM/JtiXQwf72+AQF7CrUrJq971aaniAsJF + dHVndPHMpdD4fHNI31EP9fKj6QZBAbZxxhp7OCGsy0DC+xbl/t8Bc4jZnS0BAAA= + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json; charset=utf-8 + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:37 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:37 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - F11u7JCZTPrHz8VfzL5YeXThxcQSR6CdLGgk2tF52+EbYWhXciN8nv9vA8oQ9C9A + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/downtime/728171705 + response: + body: + string: !!binary | + H4sIAAAAAAAAAz2PsW7DMAxE/4VjwSGyk0j12l9op6AwGJk1BNiSIdEp2qL/HtFGMt7hju/4B5n9 + mjNHz9DFdZoQOA7QmZM72KN17RlhTjFIyr3QWKC7wAt8IniqlYlrdG89QuHppDz2qtrGONMiDKHQ + dWt80VQYoQhleaDOG8pnJkVpz7T21RmDsFDdJ5u3syTM/JtiXQwf72+AQF7CrUrJq971aaniAsJF + dHVndPHMpdD4fHNI31EP9fKj6QZBAbZxxhp7OCGsy0DC+xbl/t8Bc4jZnS0BAAA= + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json; charset=utf-8 + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:37 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:37 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - tp1qdVxoUmtlsVp6hgBWraWfL5vEbA116VZkaWKWIZtgPr5Ima8zysCBv+o2WoZ/ + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"end": 1580806837, "message": "Doing some testing on staging.", "scope": + "test_tag:2"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '87' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: PUT + uri: https://api.datadoghq.com/api/v1/downtime/728171705 + response: + body: + string: !!binary | + H4sIAAAAAAAAA1WOUUvEMBCE/0rZRwnStN4l9vX8Cd7TISWmawi0SUm2Jyr+d3evKPi2s8zMN19Q + 0G+lYPIIQ9rmWQGmCQZ9sK1tj7Y3CpacIuUykgsVhgvcwYsC7zgyI1v31K8p/n1yCaOovtNW9wqm + WN3rLfHm5ooKKrlCO8o8MOrIrQWdoCSne/NotVawOt5Ht9/OorjgZ068GM7PJ1DgPMUrSyqb9Pq8 + srgAYSVZPXSyeMFaXZDQU44pNDUv2IhFRE4Nzwl83nPflN+TQEb6kKZOgcBNZ7XRpj0o2NbJEf7b + +f0DaLI3GEwBAAA= + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json; charset=utf-8 + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:37 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:37 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - CPK+34LtKdL5YYX/NFOJUdMpxMoO80HISGpGpzDG5fENYSoZ2QNw1gEubOsJ9JNb + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '0' + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/downtime/728171705 + response: + body: + string: '' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '0' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - text/html; charset=utf-8 + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:37 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:37 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - vlc9b/rJPByGsV/acj3ScS7B1lo9nEAbSgYCfkl0GH3egry4iXeiGBP0WX8DpJ/T + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 204 + message: No Content +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/downtime/728171705 + response: + body: + string: !!binary | + H4sIAAAAAAAAA1WOwU7EMAxEf6XyEUWoadlN6HX5BDitUGVSE0VqkypxQYD4d+KWy9481syb+YFM + bsuZoiMY4jbPCihOMOiTbW17tr1RsKQYOOWR0RcYrnAHrwoc1shM/1bzcGsN9X/QUvajqL7TVvcK + plDwbc9x3khBYcx89O2Qc0VnQumTmO7No9VawYp1JO+/A8xhoe8U62x4eb6AAnQcPqp8x7kI2KW1 + qiswFZbtQye7FyoFvaSeUoi+KWmhRiwiUmzqHl/P+wqc0meUlpG/hNQpkHbTWW20aU8KtnVCppuh + v3+xvlY0UgEAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json; charset=utf-8 + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:37 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:37 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - 3OCRM/4FZbkllI4iloi1acHDABD1SJi2aj2fysEPLLsOVOk5Ki6mi6IOsVG7JIay + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/api/cassettes/TestDatadog.test_downtime_cancel_by_scope.frozen b/tests/integration/api/cassettes/TestDatadog.test_downtime_cancel_by_scope.frozen new file mode 100644 index 000000000..0ffa14a53 --- /dev/null +++ b/tests/integration/api/cassettes/TestDatadog.test_downtime_cancel_by_scope.frozen @@ -0,0 +1 @@ +2020-02-03T17:20:37.973112+01:00 \ No newline at end of file diff --git a/tests/integration/api/cassettes/TestDatadog.test_downtime_cancel_by_scope.yaml b/tests/integration/api/cassettes/TestDatadog.test_downtime_cancel_by_scope.yaml new file mode 100644 index 000000000..f5ce0f017 --- /dev/null +++ b/tests/integration/api/cassettes/TestDatadog.test_downtime_cancel_by_scope.yaml @@ -0,0 +1,548 @@ +interactions: +- request: + body: '{"end": 1580747837, "scope": "test:integration_one", "start": 1580746837}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '73' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/downtime + response: + body: + string: !!binary | + H4sIAAAAAAAAAz2PQU7EMAxF7+IlyoI0DAndcoVhNRpVJjVVpDapHBcEiLsTt5pZfsvP//kXmOLG + TDkS9HmbZwOUR+jtKTz6Jx+cN7CUnKTwIDhV6C/wAFcDERsyU1s9qNtSuk8KT4Mm19lgnYExVXzf + iQ+cKxmogiy3que9KjKhVilnnX8J1hpYsfnJPju6JC30U3IzhrfzKxjAKOmzReFN78aytnABoSp9 + ykITo6SSB2Wa+0K14nR/eCxfWU8O8q1cZ0CrfBest941gW0dUeiwUoO/fygiFDU3AQAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json; charset=utf-8 + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:38 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:38 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - f5hY0MW4w2fhZz0SAfv1+LF9me92dJz6mowUerU7gZ8k/CpuQLqOWzykixb5WZaX + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"end": 1580806838, "scope": "test:integration_one", "start": 1580746837}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '73' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/downtime + response: + body: + string: !!binary | + H4sIAAAAAAAAAz2PwU7DMBBE/2WPlQ91DNjkyi+UU4WirbONLCV2ZG9AgPh3dhu1xxntm5n9hUpx + q5VyJOjzNs8GKI/Q2+dwDMeX4IKBpeTEpQ6MU4P+DAf4MBBRkJnkdKfuR+nhlDoNqlxng3UGxtTw + ciOuODcy0Bgr71X+Saq8pFZCrVLOOv8arDWwouzjm7d3cVrop2RZDO+nNzCAkdOnSK6b5sayijgD + U+M+ZaapIqeSB2Vk+0Kt4fR4eCxfWSMH/lauM6BVvgvWW+9EbuuITPsqXfD3D7FxgRs3AQAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json; charset=utf-8 + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:38 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:38 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - vG5kxpR47Wd0uZGIzWkStfMxs3cmVIjKYEHLQf0xQiHS0P2BwlwJHwTESUSKlcdO + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"end": 1580866838, "scope": "test:integration_two", "start": 1580746837}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '73' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/downtime + response: + body: + string: !!binary | + H4sIAAAAAAAAAz2PwU7DMBBE/2WPyAcct7XJlV8op6qKFmeJLCV2tN5QAeLf8TaixxntzLz9Aaa4 + MVOOBH3e5tkA5RF6ewzP4XQKLhhYSk5SeBCcKvQXeIKrgYgtMlM73VP/R+nhFJ4GVa6zwToDY6r4 + fk984FzJQBVk2af8oU351sqEOqU56/xLsNbAio1P7t6+JWmh75IbMbydX8EARkmfTQpv2hvL2sQF + hKr0KQtNjJJKHuRWlH2hWnF6PDyWW9bKQb401xnQKd8F6613BwPbOqLQTqUEv3/GWJcvNwEAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json; charset=utf-8 + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:38 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:38 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - DNJM9d0LaQZJbuEjasKEmgCwDoiLnJW9mPQJm+yWIlQRbFhX4Vzx4uuDCt38dWhb + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"scope": "test:integration_one"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '33' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/downtime/cancel/by_scope + response: + body: + string: !!binary | + H4sIAAAAAAAAA6tWSk7MS07NyUlNic9MKVayijY3sjA0NzQ3NtKBsQxjawGmOnSOJwAAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json; charset=utf-8 + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:39 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:39 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - mGJe6qmS66N9ddKWdHwHEzQK9VHuaMNr7+EsVTKliCkGq+ayJZmadUyCSwID4him + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/downtime/728171731 + response: + body: + string: !!binary | + H4sIAAAAAAAAA1WPz07DMAzG38XHKQfSAMl65RXGaUKVl5oqUptUjgsCxLvP2bQhjp/l3/fnB5ji + xkw5EvR5m2cDlEfo7VN48I8+OG9gKTlJ4UFwqtAfYQdvBiIqMtP99Tm4/d9r0vvVrfA0NOU6G6wz + MKaKpwsnvJGBKshyy1MTzYtM2PIaZp3fB2sNrKgl5XK7Gkta6LtkrQ2vhxcwgFHSh8p3nGszjmVV + dQShKn3KQhOjpJKHBumChWrF6T57LJ+5eQ7y1bjOQMvyXbDeeqcNtnVEoX+1fs+y2qYzQAEAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json; charset=utf-8 + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:39 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:39 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - fqgAnnBv1js3TBerHAS1jOASlx3n1xB+hOOrFOLO2ZaBfZ3rktA3gzUaBetB5haL + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/downtime/728171732 + response: + body: + string: !!binary | + H4sIAAAAAAAAA1WPwU7EMAxE/8VHlMOmAZLtlV9YTitUmdRUkdqkclwQIP4dp0UgjmPNvBl/AlPc + mClHgj5v82yA8gi9vQuncLoPLhhYSk5SeBCcKvRXuIEnAxE1MtOP1d+q9fxnTXo/aIWnoSnX2WCd + gTFVfN5zwhsZqIIsR98O8YpmwtbXYtb5c7DWwIo6UvbbAZa00EfJOhseLw9gAKOkV5UvONcGjmVV + dQWhKn3KQhOjpJKHFtIPFqoVp9+3x/KWG3OQ95brDLQu3wXrrXcqt3VEoX+zvr4BfrWC3UABAAA= + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json; charset=utf-8 + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:39 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:39 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - BsieYxalcMaIS+cTbK9YL1FxnAIiDF/6CFe3/lefzTTUruWB5XaSb08KP3lTATlu + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/downtime/728171734 + response: + body: + string: !!binary | + H4sIAAAAAAAAAz2PwU7DMBBE/2WPyAcct7XJlV8op6qKFmeJLCV2tN5QAeLf8TaixxntzLz9Aaa4 + MVOOBH3e5tkA5RF6ewzP4XQKLhhYSk5SeBCcKvQXeIKrgYgtMlM73VP/R+nhFJ4GVa6zwToDY6r4 + fk984FzJQBVk2af8oU351sqEOqU56/xLsNbAio1P7t6+JWmh75IbMbydX8EARkmfTQpv2hvL2sQF + hKr0KQtNjJJKHuRWlH2hWnF6PDyWW9bKQb401xnQKd8F6613BwPbOqLQTqUEv3/GWJcvNwEAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json; charset=utf-8 + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:39 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:39 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - o8MFmk+4Ge4vq85ax+5C1nfQs0lbtaPPYZrpqzeG6IsYGNLGMu/G7PbJElpjPS5i + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"scope": "test:integration_two"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '33' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/downtime/cancel/by_scope + response: + body: + string: !!binary | + H4sIAAAAAAAAA6tWSk7MS07NyUlNic9MKVayijY3sjA0NzQ3NomtBQCQ8kwBHQAAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json; charset=utf-8 + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:39 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:39 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - SaHvyR/hQzhMjBxXmmuM76vwlwfocpgL0LhX3u6R0CFONYqUGm7Xe/7/HyTliTFX + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/downtime/728171734 + response: + body: + string: !!binary | + H4sIAAAAAAAAA1WPwWrDMBBE/2WPYQ+VlUSKr/2F9BSC2chbI7AlI60b2pJ/j2SXlh5nmXkz+w2J + 3ZISB8fQhmUcETj00KqDfbHHo9UWYYrBS0yd0JChvcAOrgiOSmTkH6vZF+vpz+rLfaPFNHRV6UZZ + pRF6n+m25iQtjJCFkmx9K8QUdGKqfTWmtDlZpRBmKiNlvW1g8RN/xVBmw9v5FRDIif8o8p3GXMEu + zkVdQDhL64PwkEh8DJ3cY/1g4pxp+H27j/dQmZ181lyDULtMY5VRRu8Rlrkn4X+zHk+JY/FgQAEA + AA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json; charset=utf-8 + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:40 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:40 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - RbevWUvO2oQYYDnX/G1lndTh/kTt+ebFIvajU6/3Ivb5c6aUQf49/uD1ICaXyx52 + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/api/cassettes/TestDatadog.test_embed_crud.yaml b/tests/integration/api/cassettes/TestDatadog.test_embed_crud.yaml new file mode 100644 index 000000000..131ab5194 --- /dev/null +++ b/tests/integration/api/cassettes/TestDatadog.test_embed_crud.yaml @@ -0,0 +1,247 @@ +interactions: +- request: + body: '{"graph_json": "{\"requests\": [{\"conditional_formats\": [{\"comparator\": + \">\", \"palette\": \"red\", \"value\": 50000000000}, {\"comparator\": \">\", + \"palette\": \"green\", \"value\": 30000000000}], \"q\": \"top(system.disk.free{$var} + by {device}, 10, ''mean'', ''desc'')\", \"style\": {\"palette\": \"dog_classic\"}}], + \"viz\": \"toplist\"}", "legend": "no", "size": "medium", "timeframe": "1_hour", + "title": "Custom titles!"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '428' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/graph/embed + response: + body: + string: !!binary | + H4sIAAAAAAAAA6VRWW6DMBC9Sjof+ahQAJM4BoVUao9RKuRlwChmqe2kqqrevSOSG/TPb+aN3uIf + wFGhaQcDFfCi3B+V7Fgu+EGUCkXBCWmWZVwrLUrJBNfaYMcFV+xQFHLPc14yc9Q5UwwSiDguTkZs + b9IPUjkMUL0DAfhIwMbRkcxp6LwccRO8rhsaxiVUaSqXZWdklGbu7edOz2Pae7nYdPX3EucLTvV/ + DW4tDr2NdZFl26/BRFtzejnscTJ1J13ALVmtnxvY3NcNEIHQ464BuiS4+n+dvUFPCWhyPqX3UGfq + YPXdxiE6pLRv1xDncbPC8ERrjzcKQ32vggkYGWx79dTMdHUugWClpx9R31DlxbEUef7gTCR6J/3+ + AYJbdlC3AQAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:53 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:53 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - vLqqOLcWkbQm3aIBHfcEmIwzrJtjtqNdArlWt57BFwl8nfWymjIeK67csuZ1woEb + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/graph/embed/63947baf2186589be836af2c2006cbc89a286ccdef686b2533a461692d7c12b2?var=asdfasdfasdf + response: + body: + string: !!binary | + H4sIAAAAAAAAA6VRWWrDMBC9Sjof+TLxlii2iVNoj1EXM5LGlom8VFJSSundOzjpCfoh0Bs95i36 + Bhol6XbQUIHIy/1RYpelhTgUpaQiF4xUliRCSVWUmBVCKU2dKITMDnmOe5GKMtNHlWYygwgCjYvF + QO0N3YDSkofqDRjAewQmjJZlTkPncKSNd6pueBgWX8UxLstOY0A99+Zjp+Yx7h0uJl79PYf5QlP9 + X4NbQ0NvQp0nyfZz0MHUgm+Wepp03aH1tGWrNXrd/Z0GNndmA8xl9FjRAC9huEZ5mZ0mx2F4cj7F + 93xnrmON0IYhWOLgr1cf5nGzQv/Ez45unIurX7Uj0OhNe3Vc0nS1NgJv0PHnyC+o0vxYFmn64Ews + eif9/AJmPQTBwgEAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:53 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:53 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - ztq+F8HwxRthTKNo0l2MCEDK5uwvgQzF00nWu49lHsBM51hGZBm/pPILDqupy+Xd + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/graph/embed/63947baf2186589be836af2c2006cbc89a286ccdef686b2533a461692d7c12b2/enable + response: + body: + string: !!binary | + H4sIAAAAAAAAAy3MMQ6AIAwF0KsQDmCk6Le4exBayoQuxMEY766D41ve7fupar371W+7WHGIaVok + VwqMmZMYR3xSGkeoKKdMDNViFQyhOcY8ISBRWTSQkPvDerZ2OTuyNCuDf17LfSEvagAAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:53 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:53 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - rk3iIRyevtXsTLLTMsm8PoHrVjRY2UIgJwOnYxasATpPihgg0ps3VPSw7zz+6jrL + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/graph/embed/63947baf2186589be836af2c2006cbc89a286ccdef686b2533a461692d7c12b2/revoke + response: + body: + string: !!binary | + H4sIAAAAAAAAAy3MMQ7DIAwF0KsgDhAFk3xM9xwEG7OUKFJQKlVV7t4OHd/yPn5cqjaGf/htF6sO + MS9JSqPAWDmLccRPSvMMFeVciKFarYEhtMZYFgRkqkkDCbl/2K7e3+601/G0Ovn7C80QqmJqAAAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:53 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:53 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - ty7T8eIeXOfZhM7KDN5nGo8JS7ZSIWAqBNFeZshTg3LLDJJa7mPU5wqGt0nOPCpy + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/api/cassettes/TestDatadog.test_events.frozen b/tests/integration/api/cassettes/TestDatadog.test_events.frozen new file mode 100644 index 000000000..1867ddc06 --- /dev/null +++ b/tests/integration/api/cassettes/TestDatadog.test_events.frozen @@ -0,0 +1 @@ +2020-02-03T17:22:47.569953+01:00 \ No newline at end of file diff --git a/tests/integration/api/cassettes/TestDatadog.test_events.yaml b/tests/integration/api/cassettes/TestDatadog.test_events.yaml new file mode 100644 index 000000000..ac05b619a --- /dev/null +++ b/tests/integration/api/cassettes/TestDatadog.test_events.yaml @@ -0,0 +1,506 @@ +interactions: +- request: + body: '{"date_happened": 1580746967, "host": "test.host", "text": "test message + 1580746967", "title": "end test title 1580746967"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '123' + Content-Type: + - application/json + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/events + response: + body: + string: '{"status":"ok","event":{"id":5309722140426601038,"title":"end test + title 1580746967","text":"test message 1580746967","date_happened":1580746967,"handle":null,"priority":null,"related_event_id":null,"tags":null,"url":"https://app.datadoghq.com/event/event?id=5309722140426601038"}}' + headers: + Connection: + - keep-alive + Content-Length: + - '281' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - propjoe + Date: + - Mon, 03 Feb 2020 16:22:48 GMT + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - SAMEORIGIN + status: + code: 202 + message: Accepted +- request: + body: '{"date_happened": 1580746667.0, "host": "test.host", "text": "test message + 1580746667.0", "title": "start test title 1580746667.0"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '131' + Content-Type: + - application/json + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/events + response: + body: + string: '{"status":"ok","event":{"id":5309722142507686359,"title":"start test + title 1580746667.0","text":"test message 1580746667.0","date_happened":1580746667,"handle":null,"priority":null,"related_event_id":null,"tags":null,"url":"https://app.datadoghq.com/event/event?id=5309722142507686359"}}' + headers: + Connection: + - keep-alive + Content-Length: + - '287' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - propjoe + Date: + - Mon, 03 Feb 2020 16:22:48 GMT + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - SAMEORIGIN + status: + code: 202 + message: Accepted +- request: + body: '{"host": "test.host.integration", "text": "test host", "title": "test host"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '76' + Content-Type: + - application/json + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/events + response: + body: + string: '{"status":"ok","event":{"id":5309722144615820235,"title":"test host","text":"test + host","date_happened":1580746968,"handle":null,"priority":null,"related_event_id":null,"tags":null,"url":"https://app.datadoghq.com/event/event?id=5309722144615820235"}}' + headers: + Connection: + - keep-alive + Content-Length: + - '251' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - propjoe + Date: + - Mon, 03 Feb 2020 16:22:48 GMT + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - SAMEORIGIN + status: + code: 202 + message: Accepted +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/events/5309722144615820235 + response: + body: + string: !!binary | + H4sIAAAAAAAAA3WQ3QrCMAyFX0VyPfa/qQXxQURGcVEDtS1tNhzDdzebV4LelOQk38mhM+CIlkHN + 0GvG7q69R4s9qKLZ5du63be7BLTBwB1PHkEB2auDBAJGN4TLomTaUzYW2WoVs6bK99uyLOq6FZMy + L6tG9pnYLMuMkTd3F1m0IZgFX7nPe6T+8IfHp8T8wlnfIqjTqnXSqArOCfQ40gU7qx9yzg7GJOAD + uUA8CW9deGgjt9cIH790qVOyjLegmZyVMckX/Ajyer0BJnJ67TIBAAA= + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb_events + Date: + - Mon, 03 Feb 2020 16:23:09 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:23:09 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - EalDBln0GyXsmtYpKdBCU5I8r6g3hVv6daVLPtSGU1YL1MYBGsHuczHWOirgrR8v + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"alert_type": "success", "text": "test no hostname", "title": "test no + hostname"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '82' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/events + response: + body: + string: '{"status":"ok","event":{"id":5309722505343306076,"title":"test no hostname","text":"test + no hostname","date_happened":1580746989,"handle":null,"priority":null,"related_event_id":null,"tags":null,"url":"https://app.datadoghq.com/event/event?id=5309722505343306076"}}' + headers: + Connection: + - keep-alive + Content-Length: + - '265' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - propjoe + Date: + - Mon, 03 Feb 2020 16:23:09 GMT + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - SAMEORIGIN + status: + code: 202 + message: Accepted +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/events/5309722505343306076 + response: + body: + string: !!binary | + H4sIAAAAAAAAA3WPzQqDMBCEX0X2LBj/NVD6IKVI0KUGYhKSVSriuzdaerOXPezMfLO7AS6oCfgG + gyDsRmEtahyAp2XD6qJqmzYGodBRR6tF4ODnvkfvIQaH3syuP5aJsDJZ0uSk+aTMWVtnWcnKvMhz + VrG6Cn6SpA4zoadIm2g0nrSYMEizUwfljH/nXQ63Pxh8h4OvKCReHvjjGcOAi+yxO+lcz0rFYJ00 + TtIaotq4SahQe1zw02V4+qJw3z+Bib0nJAEAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb_events + Date: + - Mon, 03 Feb 2020 16:23:30 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:23:30 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - 4WmLzujJordyBf+eJTJ6N2mys3JPZtDGjDvvYxTMJFkvFVHGZtg/PV/bjA2D8/Aw + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"host": "test.host", "tags": ["test_tag:1", "test_tag:2"], "text": "test + tags", "title": "test tags"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '102' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/events + response: + body: + string: '{"status":"ok","event":{"id":5309722854591649217,"title":"test tags","text":"test + tags","date_happened":1580747010,"handle":null,"priority":null,"related_event_id":null,"tags":["test_tag:1","test_tag:2"],"url":"https://app.datadoghq.com/event/event?id=5309722854591649217"}}' + headers: + Connection: + - keep-alive + Content-Length: + - '274' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - propjoe + Date: + - Mon, 03 Feb 2020 16:23:30 GMT + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - SAMEORIGIN + status: + code: 202 + message: Accepted +- request: + body: '{"host": "test.host", "priority": "low", "source_type_name": "vsphere", + "text": "test source", "title": "test source"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '118' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/events + response: + body: + string: '{"status":"ok","event":{"id":5309722856448002944,"title":"test source","text":"test + source","date_happened":1580747010,"handle":null,"priority":"low","related_event_id":null,"tags":null,"url":"https://app.datadoghq.com/event/event?id=5309722856448002944"}}' + headers: + Connection: + - keep-alive + Content-Length: + - '256' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - propjoe + Date: + - Mon, 03 Feb 2020 16:23:30 GMT + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - SAMEORIGIN + status: + code: 202 + message: Accepted +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/events/5309722856448002944 + response: + body: + string: !!binary | + H4sIAAAAAAAAA3WPzQrCMBCEX0X2HGxaU/sD4oOIlNCuNhCTkGyrIn1306oX0cuyLDPfzD4ARzQE + 9QM6Sdj00jk02EGd5iUvRMFTzkBq9NTQ3SHUoMzJAgOPwQ6+nS+JdCoZ02RBhSTf8KrIsjLfClFy + nlVCRD0p0rOYMNDqbWUweD0DFudr7lW3+0PAWyz6BSB5DlAfjgw6HFWLjZGXGGMGrRk4r6xXdI8u + ba+xRG/DB7FedgYq/vojb5qeW88JJBsBAAA= + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb_events + Date: + - Mon, 03 Feb 2020 16:23:51 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:23:51 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - ZXnqfnoulW/f1bxZ9Ft3hNAE2w2HYvkmtBBOZYz/cmHbQvzJ78Xjm/WUDv0z3XOw + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/events?end=1580747110&priority=low&sources=vsphere&start=1580746910 + response: + body: + string: !!binary | + H4sIAAAAAAAAA22QTWrDMBCFrxJmLWrZlWtHUHqILkswwp7YA7IkpLHbEHL3yknaTbIR8/PmvQ+d + AVd0nEB/nWEwjN1kQkCHA+iybmWjGllKAcZi5I5PAUEDuaMHAZQ6M44Rx3wG+mhsQgFMbDcNY+Jd + 8kvsMUuXaPOsuGbd3g8a3utXuW+qqq3flGqlrPZKZS3jDz8YsBk3xoOA3s/znTh3A67UY+fMnEPd + Yq2AEMlH4lP2sP47G94pNKyfYcK48Uw+/WW8XGsBEf91hQlUrOUNNBXPMSn/0JPN5XD5BVZY11lU + AQAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb_events + Date: + - Mon, 03 Feb 2020 16:23:51 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:23:51 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - 59/VjqZmAmoPozUEJbZ0ofaoBijSmEl1FhCPKRCgWaehtXiy8XRGWDlGGm1CtKTl + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/api/cassettes/TestDatadog.test_get_all_embeds.yaml b/tests/integration/api/cassettes/TestDatadog.test_get_all_embeds.yaml new file mode 100644 index 000000000..728ee2c44 --- /dev/null +++ b/tests/integration/api/cassettes/TestDatadog.test_get_all_embeds.yaml @@ -0,0 +1,58 @@ +interactions: +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/graph/embed + response: + body: + string: !!binary | + H4sIAAAAAAAAA6tWSs1NSk1JSU2JTy9KLMgoVrKKjq0FAI2GhxwWAAAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:52 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:52 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - v8lEj/pYmsavh1I0Db6FT/BAvLdOdAv91ctM9ImcmfZ/KHrCACXEdhuskTCPihd+ + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/api/cassettes/TestDatadog.test_graph_snapshot.frozen b/tests/integration/api/cassettes/TestDatadog.test_graph_snapshot.frozen new file mode 100644 index 000000000..b672b51a3 --- /dev/null +++ b/tests/integration/api/cassettes/TestDatadog.test_graph_snapshot.frozen @@ -0,0 +1 @@ +2020-02-03T17:19:51.074858+01:00 \ No newline at end of file diff --git a/tests/integration/api/cassettes/TestDatadog.test_graph_snapshot.yaml b/tests/integration/api/cassettes/TestDatadog.test_graph_snapshot.yaml new file mode 100644 index 000000000..11c20b548 --- /dev/null +++ b/tests/integration/api/cassettes/TestDatadog.test_graph_snapshot.yaml @@ -0,0 +1,260 @@ +interactions: +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/graph/snapshot?end=1580746791&metric_query=system.load.1%7B%2A%7D&start=1580743191 + response: + body: + string: !!binary | + H4sIAAAAAAAAA13OQQ6DIBAF0Ks0s2wqIKhVr1Ibgw5qExUEbGOMdy9duOlu/uTn5+3QW2mGGlUH + JewVWLWsynlXQXl5hLz8jgrc5ryayKglkni/HhUczwNu4GZp3KB9vdoxDAzeG1dSaghKL1H3w0Ja + PdGzRt8v9aGI0flwkbEaqbZ9LXicx4JyxlnEeMQETYXsZM6TtG2Slqt7ljWsQ5GxRnCVMpHnRdwV + mBEz98EyKW9fbR38dguWfzIcX0s9+evtAAAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:19:51 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:19:51 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - fFk0sZgwwse+ZeEmqVGZPgcNG+SDXdM7Y74n6iOGuvoZenvaYEqZOvpOSMu1XDXx + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + X-RateLimit-Limit: + - '60' + X-RateLimit-Period: + - '3600' + X-RateLimit-Remaining: + - '59' + X-RateLimit-Reset: + - '2409' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/graph/snapshot?end=1580746791&event_query=%2A&metric_query=system.load.1%7B%2A%7D&start=1580743191 + response: + body: + string: !!binary | + H4sIAAAAAAAAA12Pza6CMBCFX8XM0kh/qIV7eRUxpGGmYCK0tFVjCO9uXejC3ZwvJyffrDAE48cO + yUIDawuBlhvFFFtodqecl/fRQnzGRBO7OoNMrvuthe28wQHibHwcXepu4ZoHxpR8bDj3DE0y6IZx + Yb2b+KfG7xd6cMTiA2Lhg0PuwtCpUv5JxUtRikKUhVBcYU91pa2UFrFGjdoaaanHWilT6aquxFH8 + a2J+HrIL3WlOXdYPz6yyz2SiFC79F/0+AdsLjFmHcv8AAAA= + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:19:51 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:19:51 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - KKdI9UAf8fC5q7osIllxNui0A1CUm45w7mZBz+tu6Vlp/ga+Q6ZXvY0JoJlUBVi+ + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + X-RateLimit-Limit: + - '60' + X-RateLimit-Period: + - '3600' + X-RateLimit-Remaining: + - '58' + X-RateLimit-Reset: + - '2409' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/graph/snapshot?end=1580746791&graph_def=%7B%22requests%22%3A+%5B%7B%22conditional_formats%22%3A+%5B%7B%22comparator%22%3A+%22%3E%22%2C+%22palette%22%3A+%22red%22%2C+%22value%22%3A+50000000000%7D%2C+%7B%22comparator%22%3A+%22%3E%22%2C+%22palette%22%3A+%22green%22%2C+%22value%22%3A+30000000000%7D%5D%2C+%22q%22%3A+%22top%28system.disk.free%7B%2A%7D+by+%7Bdevice%7D%2C+10%2C+%27mean%27%2C+%27desc%27%29%22%2C+%22style%22%3A+%7B%22palette%22%3A+%22dog_classic%22%7D%7D%5D%2C+%22viz%22%3A+%22toplist%22%7D&start=1580743191 + response: + body: + string: !!binary | + H4sIAAAAAAAAA4WQwW6DMAyGXwXl0m0CAmS0aQ97kTGhNDEQLZA0SZk6xLsvBbXqdpkvln87/2dn + Qq1lpqsFNOiApgpZOJ3BeVehQ/Qeaq4HIb3UA1N1o23PHlu9YZZ5ba/DFXqrUBySYQq8h1WzIFZ1 + ZOq8aGV2jzmOFsL/Nq0FGP4YkQejjyv4tCK9Nk/u4jz0qZDuM23C2+lljo6XaBIwSg6Bm2dxtOmB + DZuQBTi+eV7tnb+oZc+w2a9LhG5rrphzkldonhfiKL/vTCWdDw0UIzcw4zrt67NV4VM77407YGxS + wTwLNt0pDTfj2xgeJXxhIZKb4BJjtcDatjUpcpoTXGRFlmRFkhHMX/n+lRJalkfYNmRHCQha7mEH + tAGaw1Yw3mQAqRlaNP8AR2gXQd8BAAA= + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:19:51 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:19:51 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - F11u7JCZTPrHz8VfzL5YeXThxcQSR6CdLGgk2tF52+EbYWhXciN8nv9vA8oQ9C9A + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + X-RateLimit-Limit: + - '60' + X-RateLimit-Period: + - '3600' + X-RateLimit-Remaining: + - '57' + X-RateLimit-Reset: + - '2409' + status: + code: 200 + message: OK +- request: + body: '{}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '2' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/graph/snapshot_status/dd-snapshots-prod/org_321813/2020-02-03/c4c9483855be6f3783ed859e7e8fe81e6dacf0ee + response: + body: + string: '{"status_code":200}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '19' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:02 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:02 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - fGPsEOteKPqWrypJlOWIRpMZD2l0VjpTiFY5o5e56+jFb+ShdPzcenDH6s8Ah62s + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/api/cassettes/TestDatadog.test_host_muting.frozen b/tests/integration/api/cassettes/TestDatadog.test_host_muting.frozen new file mode 100644 index 000000000..ab469b035 --- /dev/null +++ b/tests/integration/api/cassettes/TestDatadog.test_host_muting.frozen @@ -0,0 +1 @@ +2020-02-03T17:20:40.738359+01:00 \ No newline at end of file diff --git a/tests/integration/api/cassettes/TestDatadog.test_host_muting.yaml b/tests/integration/api/cassettes/TestDatadog.test_host_muting.yaml new file mode 100644 index 000000000..8f48cca33 --- /dev/null +++ b/tests/integration/api/cassettes/TestDatadog.test_host_muting.yaml @@ -0,0 +1,334 @@ +interactions: +- request: + body: '{"series": [{"host": "my.test.host1580750440", "metric": "test.muting.host", + "points": [[1580746840.745515, 1.0]]}]}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '116' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/series + response: + body: + string: '{"status": "ok"}' + headers: + Connection: + - keep-alive + Content-Length: + - '16' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - text/json + DD-POOL: + - propjoe + Date: + - Mon, 03 Feb 2020 16:20:41 GMT + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - SAMEORIGIN + status: + code: 202 + message: Accepted +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/tags/hosts/my.test.host1580750440 + response: + body: + string: '{"tags":[]}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '11' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:51 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:51 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - zgs4/R8U39Dx88K274ycCG8gmotK2r1yjyecTfeITqBuGEc/zW9V1MMOyMl9URns + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"end": 1580750440, "message": "Muting this host for a test."}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '62' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/host/my.test.host1580750440/mute + response: + body: + string: !!binary | + H4sIAAAAAAAAA0XMQQoCMQyF4auErMvQyowtcwfPIMXGmSyagonIIN7d1o3bn/e9N+abcRNc8fI0 + KuiwtJcYV7pywTWeUoghprPDvalJrtSn9ZiM1KaRwpJ8XPw8+25JuvkXh5VU8zZMv2fZwHZWGA7u + 7QEZfj/4+QL+aznEiAAAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:51 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:51 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - WatxAL43AyqgfI4tyA152NzYM3DLdjL7IWr0SzhldiWriTsbw9vUaRZnaqhOCdUk + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"end": 1580751340}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '19' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/host/my.test.host1580750440/mute + response: + body: + string: '{"errors": ["host:my.test.host1580750440 is already muted. To mute + this host with a different end timestamp, add + ?override=true to your request."]}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '175' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:51 GMT + Pragma: + - no-cache + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 400 + message: Bad Request +- request: + body: '{"end": 1580751340, "override": true}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '37' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/host/my.test.host1580750440/mute + response: + body: + string: !!binary | + H4sIAAAAAAAAAyXLQQqAIBBA0bvMWkRLGfEOnSEkB3LhuHAiIrp7RtvH/zekTUpjiLAcQhkU5Hay + lEpryRBxChYtBlSwty6cKo20Xlqoi/7I+mDQG+fMeInH84udnXle4ljzUF8AAAA= + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:52 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:52 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - o1rjyOSbDnvYaQgtO33vwWSNsIwHafzLqam2amG/PbTP69SVY965ZpWutdoYJB30 + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '2' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/host/my.test.host1580750440/unmute + response: + body: + string: !!binary | + H4sIAAAAAAAAAxXKQQqAIBAF0Lv8tYhGMuI9WofkQC5GF05ERHevto93I29ae0PC0uRQLjAo/Wxa + hddakGiKnjxFMtj70JaFvyyXVR5qf/IhOgpunh2eFwbcZDZQAAAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:52 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:52 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - vwiIwb5QepaQFIQrmPfIwwVWkQ/z0inFQwNEDjqDDy4v3CsF5qbv9dnyfb7UGzLf + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/api/cassettes/TestDatadog.test_metrics_list.frozen b/tests/integration/api/cassettes/TestDatadog.test_metrics_list.frozen new file mode 100644 index 000000000..8553b02a5 --- /dev/null +++ b/tests/integration/api/cassettes/TestDatadog.test_metrics_list.frozen @@ -0,0 +1 @@ +2020-02-03T17:18:58.118145+01:00 \ No newline at end of file diff --git a/tests/integration/api/cassettes/TestDatadog.test_metrics_list.yaml b/tests/integration/api/cassettes/TestDatadog.test_metrics_list.yaml new file mode 100644 index 000000000..80cb91c22 --- /dev/null +++ b/tests/integration/api/cassettes/TestDatadog.test_metrics_list.yaml @@ -0,0 +1,116 @@ +interactions: +- request: + body: '{"series": [{"host": "test.host.1580746738", "metric": "test.metric_list.1580746738", + "points": [[1580746678, 1.0], [1580746738, 2.0]]}]}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '137' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/series + response: + body: + string: '{"status": "ok"}' + headers: + Connection: + - keep-alive + Content-Length: + - '16' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - text/json + DD-POOL: + - propjoe + Date: + - Mon, 03 Feb 2020 16:18:58 GMT + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - SAMEORIGIN + status: + code: 202 + message: Accepted +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/query?from=1580746138&query=test.metric_list.1580746738%7Bhost%3Atest.host.1580746738%7D&to=1580747338 + response: + body: + string: !!binary | + H4sIAAAAAAAAA32R226DMAyG38XXEYIyCsurVChKi0ejQZIlpipCvPvMYZV6Me58SP7vtz1BJE1D + BAnuGwQEjIpGj5yT6VFFDAYjN76C61WjiTtZUaXlxznLqzRNBexP5GUCtM2rXeafa1sTBXMdiFXk + NAvokfPboo+Rki1TneF41y3zinnGEoaH7kAWjCAd6KV8LotVuUPb0h3kScDPgGFUxjb4BLlA2zaA + tEPX8eebW+e5u0hyhS7RO8075i0mQF4uu48Nk6QiS9Ja/FXLfIFz9cTVWgA+Pe8sGmePR5r+xc88 + 7WANoze/jYm+06Oyul/PcLCmmQ2QeztLme9nYVdePTBs1rJ9R8eCxyZ7nlO3iyd23AY3eHUdeWH1 + /AuJ45aFRgIAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb_query + Date: + - Mon, 03 Feb 2020 16:19:19 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:19:19 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - wghLN3+Duz26oD9vYHaC2SJLnwdbTdfoUHj9aYrmdszUfV5MzRfJW8QR4Z2cYZBn + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + X-RateLimit-Limit: + - '600' + X-RateLimit-Period: + - '3600' + X-RateLimit-Remaining: + - '592' + X-RateLimit-Reset: + - '2441' + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/api/cassettes/TestDatadog.test_metrics_simple.frozen b/tests/integration/api/cassettes/TestDatadog.test_metrics_simple.frozen new file mode 100644 index 000000000..dc3a5decd --- /dev/null +++ b/tests/integration/api/cassettes/TestDatadog.test_metrics_simple.frozen @@ -0,0 +1 @@ +2020-02-03T17:18:16.581371+01:00 \ No newline at end of file diff --git a/tests/integration/api/cassettes/TestDatadog.test_metrics_simple.yaml b/tests/integration/api/cassettes/TestDatadog.test_metrics_simple.yaml new file mode 100644 index 000000000..c4266d32e --- /dev/null +++ b/tests/integration/api/cassettes/TestDatadog.test_metrics_simple.yaml @@ -0,0 +1,116 @@ +interactions: +- request: + body: '{"series": [{"host": "test.host.1580746696", "metric": "test.metric_single.1580746696", + "points": [[1580746696.581371, 1.0]]}]}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '127' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/series + response: + body: + string: '{"status": "ok"}' + headers: + Connection: + - keep-alive + Content-Length: + - '16' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - text/json + DD-POOL: + - propjoe + Date: + - Mon, 03 Feb 2020 16:18:17 GMT + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - SAMEORIGIN + status: + code: 202 + message: Accepted +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/query?from=1580746096&query=test.metric_single.1580746696%7Bhost%3Atest.host.1580746696%7D&to=1580747296 + response: + body: + string: !!binary | + H4sIAAAAAAAAA4WR3W7DIAyF38XXUUSnJV14lShCtPFSNAIZmKpRlHef87NqvVh7hzGc7/h4gkia + UgQJ/gsyCBgVjQNyTaZHFTEYjNz4DL5XrSbuHIoPcXwvRVUKITLYn8h6AnTtvV1W1drWRMGcErGK + nOYMeuT6vOhjpHyrVDSus5jvymVVMtE4wnDVFmTBENKB/mgXq7ZF19GFrzP4ThhGZVyLN5ALtusC + SJes5c9nv0508ZHkil1Oj7TBM8+ayJC6vvtYMLnIDrlomgzwNnA+0Xj3yv70L2rmyZIzjNm8tSYO + Vo/K6X4N/WkoM5sg/7CG49u+BnY2qCuGzd5vIq8knxvteVrdLb7YdRd8GtRp5ICa+Qf5gjtaOAIA + AA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb_query + Date: + - Mon, 03 Feb 2020 16:18:58 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:18:57 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - 7/EmKgpb6kECHfVLeiiBMh1Q6D7kF88h+ZOQdXcwWmiJ4IhzcVbd63A4lfLK3Djm + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + X-RateLimit-Limit: + - '600' + X-RateLimit-Period: + - '3600' + X-RateLimit-Remaining: + - '595' + X-RateLimit-Reset: + - '2462' + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/api/cassettes/TestDatadog.test_metrics_tuple.frozen b/tests/integration/api/cassettes/TestDatadog.test_metrics_tuple.frozen new file mode 100644 index 000000000..3ca8cca7a --- /dev/null +++ b/tests/integration/api/cassettes/TestDatadog.test_metrics_tuple.frozen @@ -0,0 +1 @@ +2020-02-03T17:19:19.362064+01:00 \ No newline at end of file diff --git a/tests/integration/api/cassettes/TestDatadog.test_metrics_tuple.yaml b/tests/integration/api/cassettes/TestDatadog.test_metrics_tuple.yaml new file mode 100644 index 000000000..748f429f1 --- /dev/null +++ b/tests/integration/api/cassettes/TestDatadog.test_metrics_tuple.yaml @@ -0,0 +1,116 @@ +interactions: +- request: + body: '{"series": [{"host": "test.host.1580746759", "metric": "test.metric_tuple.1580746759", + "points": [[1580746699, 1.0]]}]}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '119' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/series + response: + body: + string: '{"status": "ok"}' + headers: + Connection: + - keep-alive + Content-Length: + - '16' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - text/json + DD-POOL: + - propjoe + Date: + - Mon, 03 Feb 2020 16:19:19 GMT + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - SAMEORIGIN + status: + code: 202 + message: Accepted +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/query?from=1580746159&query=test.metric_tuple.1580746759%7Bhost%3Atest.host.1580746759%7D&to=1580747359 + response: + body: + string: !!binary | + H4sIAAAAAAAAA4WRzW6DMBCE32XPCBG1kOJXQchywpZYNbZjr6MgxLt3+WnVHJrcvB6Yb3Y8QSRF + KYIA9wUZBIySRo88kx5QRgwaIwufwQ2yU8TKofwoju/VoayLoshg/0Q0E6DtfuWq3mRFFPQpEbuI + ac5gQJ7Piz9GyrdJUvIG8934WNYM1JYw3JQBUTKDVKA/1uVKNmh7uvB1BteEYZTadngHwZLq+wDC + JmP457NbF7q4SGKlLqdHmnfMMzoypGn2HFW9YPIiO+RF22aAd8/1RO3si/TTv6SZF0tWM2WL1uno + jRqlVcNa+bNKZo5A7uENjm/7G3AuL28YtnA/fbxwfB5z4FVVv6TizH1wycvTyO208zcf024PNAIA + AA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb_query + Date: + - Mon, 03 Feb 2020 16:19:50 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:19:50 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - WGEt22UyFjacPJiGCGldkpXy7hlYR0GLzW8P2raGKCRis5TmejD07wafhQzZGMqG + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + X-RateLimit-Limit: + - '600' + X-RateLimit-Period: + - '3600' + X-RateLimit-Remaining: + - '588' + X-RateLimit-Reset: + - '2410' + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/api/cassettes/TestDatadog.test_monitor_can_delete.frozen b/tests/integration/api/cassettes/TestDatadog.test_monitor_can_delete.frozen new file mode 100644 index 000000000..f99a77bcc --- /dev/null +++ b/tests/integration/api/cassettes/TestDatadog.test_monitor_can_delete.frozen @@ -0,0 +1 @@ +2020-02-03T17:20:29.656044+01:00 \ No newline at end of file diff --git a/tests/integration/api/cassettes/TestDatadog.test_monitor_can_delete.yaml b/tests/integration/api/cassettes/TestDatadog.test_monitor_can_delete.yaml new file mode 100644 index 000000000..6137ac700 --- /dev/null +++ b/tests/integration/api/cassettes/TestDatadog.test_monitor_can_delete.yaml @@ -0,0 +1,426 @@ +interactions: +- request: + body: '{"options": {"notify_no_data": false, "silenced": {"*": 1580750429}}, "query": + "avg(last_1h):sum:system.net.bytes_rcvd{host:host0} > 100", "type": "metric + alert"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '162' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/monitor + response: + body: + string: !!binary | + H4sIAAAAAAAAA41SwW7cIBD9FcSpJY4F9u7Gy6Hqodfm1FsVIWpmbVQMCeCNLGv/vcOuq6hSpNQH + y8w8z7z3eCuNkHK0fQajYnCQqPSzcxXNesDvn08VNeAA238bLzPEhUqqz8Mnp1NWYvws0zzJtKQM + U+0h17+WDEnF/mzWMaQsy4tfyBciOKcVnSAlPQAOwYPFyWJ/aPfHA8fW7LKl8qRdgop6PRUUY+/O + ZoyEM0TC2NsSrL3qhKXrrpoXjCcaYbiQmDlaP5A8AiIKeSJGxmpk0UfQV5G04Q2/5809b3+Ig2y4 + bHkteLPr+B3n8sp/AyudC/WOP+wOXcvx2VohUrnehLUPx06Iio7aG1e0eNuPAVfX0wypH78anbUJ + w/hS92HC2Zvkxw1Gvl9h2IBJW/fhgEtFQxxUMbVtRCdatDQYe7L/q604qp1TKaMf6u3fWyr+6SKZ + x0C+oQCkl5fnIm+CkiaiHcSM1fCcbfAYpJX6kO1pUXo2Fm3bLtiF/ndhth2TdeD7Ulgp27zd811z + RFnW9242oG7BzHEu+YBXVe5eYUY1hhIvAYu3RT6o4u02+3L5AwYNVs7rAgAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json; charset=utf-8 + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:30 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:30 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - qQjtInIx1/QKXFlq6Yoz4D/caW/S2oJqgJl91CEEpXrlxRmYHcLgIFCRCvW61KAy + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + X-RateLimit-Limit: + - '500' + X-RateLimit-Period: + - '10' + X-RateLimit-Remaining: + - '499' + X-RateLimit-Reset: + - '10' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/monitor/can_delete?monitor_ids=15635960 + response: + body: + string: !!binary | + H4sIAAAAAAAAA6tWSi0qyi8qVrLKK83J0VFKSSxJVLKqVsrPVrKKNjQ1Mza1NDOIra0FADFih1co + AAAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:30 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:30 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - nSRgqrrNNmPPT6VSGZq0R9QdtdJF1qxzho2//eboP+tsIQDRgfSx3bSVb1t6QyYb + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"monitor_ids": [15635960], "name": "test SLO 1580746829", "thresholds": + [{"target": 90, "timeframe": "7d"}], "type": "monitor"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '128' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/slo + response: + body: + string: !!binary | + H4sIAAAAAAAAA4VRu26EMBD8FbQ1Qj44ntV9QB5FyihCPq8BSxgT21ecEP+etQNdpFSWd2ZnxuMN + kHsO3ecGKJ2wavXKLNAtj3lOQZtFeWN7z0dHnK8UhJWcJtBtMPEFZwkdLEpMZuYu0w+SmG5BEc04 + fWfCaEhh4TrQ3g5a8hppBEjN1fyvwJ6Cn6x05IEhxQae21F66FqWMQLjrUfl1pk/SS6MGcl7peVg + f81rhJ3i++cqe4XQ0V44oB0wF/e7uIoGy7pibYFF2YgmL5qyaJkgmbMEFd0vZVWUbcVI7HiXl84n + Hy/vyaVsWH2tmrylrViUxJ5TzhMoYtqzSm1QDeovCoWkaIdvzC2tDaWHX9l/AEhse/yzAQAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:30 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:30 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - ucJMu0SEwqvJ36fqkYRsP+glKObktTtdBf6X17lKXJ4+xOn7nFKnx11beu1ycofn + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/monitor/can_delete?monitor_ids=15635960 + response: + body: + string: '{"errors":{"15635960":["monitor [15635960,**system.net.bytes_rcvd** + over **host:host0** was **> 100.0** on average during the **last 1h**.] is + referenced in slos: [9fd2cbbc4c8d576093d358c82385390c,test SLO 1580746829]"]},"data":{"ok":[]}}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '238' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:30 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:30 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 409 + message: Conflict +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '0' + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/slo/9fd2cbbc4c8d576093d358c82385390c + response: + body: + string: !!binary | + H4sIAAAAAAAAA6tWSkksSVSyilayTEsxSk5KSjZJtkgxNTczsDROMTa1SLYwMrYwNbY0SFaK1VFK + LSrKL1KyyivNyakFAKJAEo86AAAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:30 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:30 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - lyzq/AwBoYy31tqkvQ8mzBugAZOys447o2yCYdRfm1oPuJTtZy0Uz+ukzrgaZfIT + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/monitor/can_delete?monitor_ids=15635960 + response: + body: + string: !!binary | + H4sIAAAAAAAAA6tWSi0qyi8qVrLKK83J0VFKSSxJVLKqVsrPVrKKNjQ1Mza1NDOIra0FADFih1co + AAAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:31 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:30 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - menB+JzZJZWnsBMzYDdvLqZLyJ1Z3XKvvLNUvAnnxCkhc359HSRPRWZhATTwUzcU + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '0' + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/monitor/15635960 + response: + body: + string: !!binary | + H4sIAAAAAAAAA6tWSknNSS1JTYnPzc/LLMkvis9MUbIyNDUzNrU0M6gFADeMeE4fAAAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json; charset=utf-8 + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:31 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:31 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - L5yd3v29mZzDtdpTLB/OLdaP/nm856X8oKVK7IsHIbLmKRYkqq5Jv7+SBx/bs1VS + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/api/cassettes/TestDatadog.test_monitor_can_delete_with_force.frozen b/tests/integration/api/cassettes/TestDatadog.test_monitor_can_delete_with_force.frozen new file mode 100644 index 000000000..8647a6036 --- /dev/null +++ b/tests/integration/api/cassettes/TestDatadog.test_monitor_can_delete_with_force.frozen @@ -0,0 +1 @@ +2020-06-25T14:33:30.290807+02:00 \ No newline at end of file diff --git a/tests/integration/api/cassettes/TestDatadog.test_monitor_can_delete_with_force.yaml b/tests/integration/api/cassettes/TestDatadog.test_monitor_can_delete_with_force.yaml new file mode 100644 index 000000000..c2349cdbb --- /dev/null +++ b/tests/integration/api/cassettes/TestDatadog.test_monitor_can_delete_with_force.yaml @@ -0,0 +1,267 @@ +interactions: +- request: + body: '{"options": {"notify_no_data": false, "silenced": {"*": 1593092010}}, "query": + "avg(last_1h):sum:system.net.bytes_rcvd{host:host0} > 100", "type": "metric + alert"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '162' + Content-Type: + - application/json + User-Agent: + - python-requests/2.24.0 + method: POST + uri: https://api.datadoghq.com/api/v1/monitor + response: + body: + string: !!binary | + H4sIAAAAAAAAA41Sy4rcMBD8FaFT4niN/Jhh7MOyh9wCuSS3EESv1WOLkaVdSZ7FmPn3tDwOSyCQ + XIzVXequKtXKPYbodR9RSe8MBt7Z2ZicRxjo/8fPnCs0SO3fjdcZ/cI7Dtfhg4EQZTl+7MI8dWEJ + EafCYiyel4hB+v6q1tGF2KWPuLFHVgrBcz5hCDAgDaGDpslle2ybSlTUmk3UvDuDCZhzC1NCZdlf + Z2cZc1f0LMvel1DtDQKVtl2FSBjLgGC0kKnZazuwOCIhEnlWjllWEIveI2wiOdEQD+L4UB2+l1VX + 110tilY0x8PpkxDdxn8HS4hE/dDW4nRqaJsQe8t53q13YVUjmuaY8xGsMklLMETmUlzg2St4UhBB + uWF8LXo30eRd8LcNxL5sICrjBNr84/It584PMtlZtXVdtmSmU/qs/1dV8hKMkSGSE/L97j0Pf3SJ + ylfHPhN5IheXlyRswpQjBgZ9pKp7idpZitDKrYv6vEiYlSbD9qc1rr8kZvsxaIO2T4WVZ7urbSVK + QbK07c2sUN4jGf2ckoFvMr26pHQCxbFO5u+LrJPJ13327fYLl4/bcOUCAAA= + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json; charset=utf-8 + DD-POOL: + - dogweb + Date: + - Thu, 25 Jun 2020 12:33:30 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=89; Max-Age=604800; Path=/; expires=Thu, 02-Jul-2020 12:33:30 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - BsieYxalcMaIS+cTbK9YL1FxnAIiDF/6CFe3/lefzTTUruWB5XaSb08KP3lTATlu + X-DD-VERSION: + - '35.2664483' + X-Frame-Options: + - SAMEORIGIN + X-RateLimit-Limit: + - '500' + X-RateLimit-Period: + - '10' + X-RateLimit-Remaining: + - '499' + X-RateLimit-Reset: + - '10' + status: + code: 200 + message: OK +- request: + body: '{"monitor_ids": [19694202], "name": "test SLO 1593088410", "thresholds": + [{"target": 90, "timeframe": "7d"}], "type": "monitor"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '128' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=89 + User-Agent: + - python-requests/2.24.0 + method: POST + uri: https://api.datadoghq.com/api/v1/slo + response: + body: + string: !!binary | + H4sIAAAAAAAAA4VRy26EMAz8FeQzQuG5gVPvrdTDHqsKGeKFaAOhIa20Qvx7HbrbU6WeItvjmfFk + A4UeoXnbQNHaO714bWdo5k9jYpjsrL11rcdhZcx7DL0j5A40G4w4K0PQwGrwi67JFTun8CnwKTuM + H0lvJ4hhximAzgcoej5A3KYJtflneY/Bj47W0RoV9Dfw6Aby0NQiETw8qlbpdTF4YzJuM7XXE13c + j+xJwc62/W2hVitoeCs8QKnEAmXeq16WsuukFGUu8NRnXVUgVUzzOF4f2mld1UUmMia7X+Rp9dH5 + 5TVKyzoXUhap4K0jIFItssvfQRq8PiKcrNIX/ReETbK1u+7hm5wLYYff2L8BM1KU6qsBAAA= + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Thu, 25 Jun 2020 12:33:31 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=89; Max-Age=604800; Path=/; expires=Thu, 02-Jul-2020 12:33:31 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - 1/ye/L7/S9djtmh0CDbapYOAoYP2Xz5NE904aTai4cgQw/Kmmv343hpHqBIP3PC5 + X-DD-VERSION: + - '35.2664483' + X-Frame-Options: + - SAMEORIGIN + X-RateLimit-Limit: + - '500' + X-RateLimit-Period: + - '60' + X-RateLimit-Remaining: + - '499' + X-RateLimit-Reset: + - '29' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '0' + Cookie: + - DD-PSHARD=89 + User-Agent: + - python-requests/2.24.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/monitor/19694202?force=True + response: + body: + string: !!binary | + H4sIAAAAAAAAA6tWSknNSS1JTYnPzc/LLMkvis9MUbIytDSzNDEyMKoFAJ8KlmQfAAAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json; charset=utf-8 + DD-POOL: + - dogweb + Date: + - Thu, 25 Jun 2020 12:33:31 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=89; Max-Age=604800; Path=/; expires=Thu, 02-Jul-2020 12:33:31 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - kg+/Cls6zaJcT2blJLlU62BwgGePGdpqSwWrJ0xEIvzmSMWHXxGNsiyEzBPJ1a96 + X-DD-VERSION: + - '35.2664483' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '0' + Cookie: + - DD-PSHARD=89 + User-Agent: + - python-requests/2.24.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/slo/e18a4a83cdc858bb880530a7c2b64ae6 + response: + body: + string: !!binary | + H4sIAAAAAAAAA6tWSkksSVSyilZKNbRINEm0ME5OSbYwtUhKsrAwMDU2SDRPNkoyM0lMNVOK1VFK + LSrKL1KyyivNyakFAASik986AAAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Thu, 25 Jun 2020 12:33:31 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=89; Max-Age=604800; Path=/; expires=Thu, 02-Jul-2020 12:33:31 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - BsieYxalcMaIS+cTbK9YL1FxnAIiDF/6CFe3/lefzTTUruWB5XaSb08KP3lTATlu + X-DD-VERSION: + - '35.2664483' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/api/cassettes/TestDatadog.test_monitor_crud.frozen b/tests/integration/api/cassettes/TestDatadog.test_monitor_crud.frozen new file mode 100644 index 000000000..0eb22000d --- /dev/null +++ b/tests/integration/api/cassettes/TestDatadog.test_monitor_crud.frozen @@ -0,0 +1 @@ +2020-02-03T17:20:26.734266+01:00 \ No newline at end of file diff --git a/tests/integration/api/cassettes/TestDatadog.test_monitor_crud.yaml b/tests/integration/api/cassettes/TestDatadog.test_monitor_crud.yaml new file mode 100644 index 000000000..eb74634c1 --- /dev/null +++ b/tests/integration/api/cassettes/TestDatadog.test_monitor_crud.yaml @@ -0,0 +1,457 @@ +interactions: +- request: + body: '{"options": {"notify_no_data": false, "silenced": {"*": 1580750426}}, "query": + "avg(last_1h):sum:system.net.bytes_rcvd{host:host0} > 100", "type": "metric + alert"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '162' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/monitor + response: + body: + string: !!binary | + H4sIAAAAAAAAA41SwW7cIBD9FcSpcR0L7N21l0OUQ67NqbeqQtTM2igYEsAbWdb+e4ddV1GlSs3F + gpnnmfceb6UBYgqmT6Bl8BYiFW62tqRJDXj+8bOkGixg+0/jbYawUEHVefhiVUySj3cizpOIS0ww + VQ5S9WtJEGXoz3odfUwif9iFPBDOGC3pBDGqAXAIXgxO5vtDsz/uj9iabTJUnJSNUFKnpowqin/O + LgrizxBIUXwswdq7ili67qpYxjiiEIYLiZ6DcQNJIyAikyd8LIoKWfQB1FUkrVnN7ll9z5rv/CBq + Juq24m232zVfGRNX/htYqpSpd6zdHbq6ZYxtLR+oWG/CmvbYcV7SUTltsxZn+tHj6mqaIfbjo1ZJ + aT+Mb1XvJ5y9SX7eYOTbFYYNmJSx/x1wKakPg8ymNjXveIOWem1O5rPasqPKWhkT+iE//r2l4q8u + knn25AkFIL20vGZ5E+Q0EWUhJKz612S8wyCt1PlkTotUszZo2/bA1vcvmdl2jcaC63NhpcXm7Z7t + 6gPKMq63swZ5C2YKc84HvMv89hIzqjCUTX6CbZHzMnu7zb5cfgMOg0TB6wIAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json; charset=utf-8 + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:27 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:27 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - qA85Kiicwd/s93AfT3MSf+l6IYc5FQ6tEbp4Kft/ri41UOumJ967MPQKmz3gwejd + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + X-RateLimit-Limit: + - '500' + X-RateLimit-Period: + - '10' + X-RateLimit-Remaining: + - '499' + X-RateLimit-Reset: + - '3' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/monitor/15635959 + response: + body: + string: !!binary | + H4sIAAAAAAAAA41SwW7cIBD9FcSpcR0L7N21l0OUQ67NqbeqQtTM2igYEsAbWdb+e4ddV1GlSs3F + gpnnmfceb6UBYgqmT6Bl8BYiFW62tqRJDXj+8bOkGixg+0/jbYawUEHVefhiVUySj3cizpOIS0ww + VQ5S9WtJEGXoz3odfUwif9iFPBDOGC3pBDGqAXAIXgxO5vtDsz/uj9iabTJUnJSNUFKnpowqin/O + LgrizxBIUXwswdq7ili67qpYxjiiEIYLiZ6DcQNJIyAikyd8LIoKWfQB1FUkrVnN7ll9z5rv/CBq + Juq24m232zVfGRNX/htYqpSpd6zdHbq6ZYxtLR+oWG/CmvbYcV7SUTltsxZn+tHj6mqaIfbjo1ZJ + aT+Mb1XvJ5y9SX7eYOTbFYYNmJSx/x1wKakPg8ymNjXveIOWem1O5rPasqPKWhkT+iE//r2l4q8u + knn25AkFIL20vGZ5E+Q0EWUhJKz612S8wyCt1PlkTotUszZo2/bA1vcvmdl2jcaC63NhpcXm7Z7t + 6gPKMq63swZ5C2YKc84HvMv89hIzqjCUTX6CbZHzMnu7zb5cfgMOg0TB6wIAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json; charset=utf-8 + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:27 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:27 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - aHo5LNOt81r33IYjVJvAW38EarOXxgJiaRes9P/xhrf7FT81LvjEnVLCvw9iPn7T + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + X-RateLimit-Limit: + - '3000' + X-RateLimit-Period: + - '10' + X-RateLimit-Remaining: + - '2999' + X-RateLimit-Reset: + - '3' + status: + code: 200 + message: OK +- request: + body: '{"options": {"notify_no_data": false, "silenced": {"*": 1580750426}}, "query": + "avg(last_1h):sum:system.net.bytes_rcvd{host:host0} > 200"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '138' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: PUT + uri: https://api.datadoghq.com/api/v1/monitor/15635959 + response: + body: + string: !!binary | + H4sIAAAAAAAAA3VSwW7cIBD9FcSpoY4F2Lv2csgp55xyqypE7VkvCoYE8EbWav+9Q9ZtVKm5IJh5 + vHlvZi40QsrRDhlGHYODRJVfnKtoNhPef/ys6AgOMP0n8bZAXKmi5jx9cyZlLU53Ki2zSmvKMNce + cv1rzZB0HM7j5RRSVuXgV/JAJOe0ojOkZCZAEnxYZBa7fbM77A6YWly2VB2NS1BRb+aCYuy/3IyR + cIZIGPssgrF3kzD0QATnNS8YTwzCsCAZl2j9RPIJEFHEE3FirEYVQwTzYZJKLvk9l/e8eRZ7JbmS + XS26vm2b75yrD/0bWJtcpPe8a/e97DjnFQ1x0sVRI0UvGvQTRnu0XxL3B3Ho2r/ExY5xTqeMYvTn + 39tI/smi0qdAHk02KCivr6VPM5RREuMgZoyG12yDxyleqA/ZHldtltGi5q27LgwvRdn2TNaBH0rg + QtlmbMdbub/ikPzglhH0bStyXMpw4F2XxmtcEIMb0RT/WyEf9Fik3biv199CtJVuaAIAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json; charset=utf-8 + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:28 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:27 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - HTCsbjwqQM0jTFHFq9ukWObBv4f/yxvHIxzrANPhzJkr6s3+rN5uCN3TcZuK2V2B + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + X-RateLimit-Limit: + - '500' + X-RateLimit-Period: + - '10' + X-RateLimit-Remaining: + - '499' + X-RateLimit-Reset: + - '3' + status: + code: 200 + message: OK +- request: + body: '{"name": "test_monitors", "options": {"notify_no_data": true}, "query": + "avg(last_1h):sum:system.net.bytes_rcvd{host:host0} > 200"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '131' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: PUT + uri: https://api.datadoghq.com/api/v1/monitor/15635959 + response: + body: + string: !!binary | + H4sIAAAAAAAAA3VRy27DIBD8lYhTH06EcWI7HHrquafeqgpRe+OgYkhhncqy/O9dEkdVD70g9jUz + OzuxABGDaRBaFbyFyKQbrM0Y6o7+b+8Za8EClW+FrwHCyCTT5+7O6ogqP97LOPQyjhGh3zjAzceI + EFVozu109BFlevi8eloJzlnGeohRd0AgFBhCzndlsdvv9lQaLBomD9pGyJjTfeoiMFS9dwZ9iDTS + BNAXRUxwwddcrHnxmpdScCmqTV7V223xyLm8kC3NSmPiqXm1LWtRcc4z5kOnEn0h8joviNy35mD+ + Aa43oqT5X2B/hqCtVRFJDMm7zV79+1MlpS9+9axRkyAcT2mpHpLvK20hIGX9CY13ZPnEnEdzGJUe + WkOaFyusbz6TsiWMxoJrUmJiD8tiO74V5UyOusYOLajrCTEMyUn4VukKiq6p6XxF2n8hcl61SZpM + rfP8Ay5W5OoUAgAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json; charset=utf-8 + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:28 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:28 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - o1rjyOSbDnvYaQgtO33vwWSNsIwHafzLqam2amG/PbTP69SVY965ZpWutdoYJB30 + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + X-RateLimit-Limit: + - '500' + X-RateLimit-Period: + - '10' + X-RateLimit-Remaining: + - '498' + X-RateLimit-Reset: + - '2' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/monitor + response: + body: + string: !!binary | + H4sIAAAAAAAAA+1bXW+bSBT9K4in1pug+R6Gh1W7W612tdp21eatrRCxiY2KIQWcrhXlv+8ZILaT + gGM3UWK3rqqqNgyeO9xz7zn3zny8dIu4rIpkWMWjsMjTuHSDbJamR24VjfH/j5+P3FGcxrh8feHr + LC7mbuBGF+MXaVRWIZ28DMrZNCjnZRVPvSyuvNN5FZdhMbwYXQ6unNO5cznJy+rK+dWhhLhH7jQu + y2gc4ynTPEuqvHBm56PI/giuRdVwkmTjcJR/y6oEtzbTSDADKiVXghncNUurxA2qYhYfuVk0tc8a + DDqnMBg4+UVcOAP7x/kWlfhfPRGP2EuZE+EqZuOMZgV+16kmMe6wpjl0Mhh4mNOwiOvZBS4jjBwT + esz8E8oDSQJqPCUFpeQXQoLauPbmMKrshH3CqBHUEELaS3nhBpdubY4QGKqO3EmUjVJrwlmRj19h + JaJRPp589Yb5FA9szGteSzyNkrT7xqsjNy/GoX0wZ9SnHKuUj5KzpHvijHuaM2PUYuJ2laI0DcsK + xob9Y3nAl9beGISJvU7josKkq/m5Naj2Fidqv8zPqyTP8EIv3SyvkrN5GM1GCdbpLEpLvMg0H36x + s20/lkkaZ0P7xSWMS7JhOhvFYeOZ7ZuPv4XWtUI4aQSv5HaV20dneWhXsn3a1dXR/jm7huPAPZ7b + 2Tk5oSTgFH89nwvBRa+zc/i7AbY6nJ1r41O64uxZMpzkwJk3ncXlcNLj9+7b9jbnn/o2+NY1CNY/ + YDM8LG3TniGaE39zPLRjEQd096CfHQ8WmoH9h3TF/vuDvdaULf2/jQqbRvvljz9e2LdvnAWSBQxh + X/nELL3ldtjnvq9Fd9jfXST02NaXGZqcdDsFvM2dNzbwLpLANLYU45AFGsqzmdvr3Qj71w5hOBH+ + D+rst2zrc/YF92uDgAhET664G/Z/FASk+bh88ckt81kxjAOQ/OqT+9JLslH8H74GN83sZwiJdHaO + L4b5LKvvOJ0vRtmPllzjCznFBysKLCQWkuBVmUbDL8ctCz62PwJafi9qFJGM95ClDx/+dF4Pq+Qi + qeaW8f/7/t0bPPAuref0hAoEdzB5TzFKKO9lOgLRX4DodDCdZ6D1zcS5DojvSWWU6GHoXbS+HesH + tEcLwJ/f/b0M53CCrWN5Q9etnMtnVThxAyzb/fS+mkCegiOOaskwLJIqGUaQP9QjYHdZF/mvBcfv + eXaWjO0Y67AxBEYjuFzONZWwpOUQ1mFdPMnO632UWUV66VZ5o9sEJJRSXPvQJckFLjU2QKJN2xsU + FtregCfUP/sBaQ6eGqziAz9WX/ur/APKbUXc1KBpxK21pZFDNzXLEdR5eyHJqri4sLZj4eIsOk3j + 0NoWltH03ErHZnLReFzE48iqLGtKE3YwoRqIyzd4/XFc5LPz36yah6gKG1S7Vw+TSkmGaSYjp6kT + PA6wObFitpH8N1nglsjmALfwKBWGyjXIBuu00N4Jwd6is2vifZlqQ1r2/TjudssugX4T452I7XH9 + Pid/mHc+TtXq/mREOeU7UKaC8zBQOBEQ4WmBlNWfzySlTHR7/Y7qlYVt0mNGEbNdyrPrgnTZM+gZ + KFxTrgptLjor6ooms8WsriR3GzI2+O8LLlBy3AFpA9/hAYpahHvM50QtCzi3dbykjBHUFTuywe7i + 4to2TbjmS8z3JYyltGnWhQV9/PGAi+/pcmySL+CEO4QLpj00WzQxvSzJ4oLpfcTFXdu2wMXGld4f + RfI/HWMSNt89e6+jjoBMBuiPMU4k6dcJEiQPt+xbZmhsU1T4gna3LXqKBHZddCBY96BDZviezLBt + /1tRLtgu1IYbmKiAG4+hKb+WQHFq9qoRcm2bIB5alszviflrYOJvURs+tMgfsB/E4oGLnUkb2uLB + lo3NOqHNmdxDPCD0E08bIlSPZl6DBxPIZR693S88bBl5vP1RFg++3BU8YMuIVJ7RXOi1NEpStXc0 + ytrmW9sk25pGoQwt+nstBzw8Lh7M7uAB7116BtSbrhPWXPly//AA27Rn571i24bCmivEiU1lxYEv + PYwvSY5NeLshs9Gv5sqjSkrTv6UQMtuXe5gfai5opDFi6wIsN0Ff1fauzD7g4WF4UHzZXH62/eS2 + UYd9pLxuSBB/PV8SGi2LfSs7WdsUGnWa6a31tJTYK3/ID09wvgL6AbWcXckPUtntGpLhyMe6Bp0w + /r41roF12KY8QozhPRXVfj0tD/WlJzpvBDwY8ux8ieHI0QnOFzE/4D4a1oqsOYIBMW2ovzf5YcU2 + 1JeExgmSjfPD9VhOtmhLHPjSw/gSTsk8d35Y8RkpPc2YWduoVobvj55etU17SivGe7aZ380PCzzQ + Q//hac6jKiapz3YAD6zWD3XbGsnBkGU55faGJuVrvUdta/j0wjbpKW4w/W4p0ImHZix6Fz0i/GfX + 0zeP6LFbx7Pv37jEJUocC/+/uTnbHt3AS6lPeJcd5y3wavkJBQ8Gs8F+I+3jaGnvfiMtlL9H+41W + bQNlUzgJ3uOBK2674cbtR9ta5A6akwxaEsHUQ85YNztQP/8P/pIS7VFAAAA= + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:28 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:28 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - PmDXJXCpOnq24qtagNCLPTUoILSRgi3DGaXUca70kUEAM8DZBLYkwSVilYSYEHCG + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + X-RateLimit-Limit: + - '1000' + X-RateLimit-Period: + - '10' + X-RateLimit-Remaining: + - '999' + X-RateLimit-Reset: + - '2' + etag: + - W/"0f053579909e411fe41db83a52c9b09b" + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '0' + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/monitor/15635959 + response: + body: + string: !!binary | + H4sIAAAAAAAAA6tWSknNSS1JTYnPzc/LLMkvis9MUbIyNDUzNrU0tawFACeJ/J0fAAAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json; charset=utf-8 + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:28 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:28 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - x4m73yTAj65OpCjnvpw3RBJyiFQpkDOBZ7rE/UM6Q4o0837nUb4ZsWFNJUD0Xh0e + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/api/cassettes/TestDatadog.test_monitor_muting.yaml b/tests/integration/api/cassettes/TestDatadog.test_monitor_muting.yaml new file mode 100644 index 000000000..30a5bd32c --- /dev/null +++ b/tests/integration/api/cassettes/TestDatadog.test_monitor_muting.yaml @@ -0,0 +1,657 @@ +interactions: +- request: + body: '{"query": "avg(last_1h):sum:system.net.bytes_rcvd{host:host0} > 100", "type": + "metric alert"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '93' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/monitor + response: + body: + string: !!binary | + H4sIAAAAAAAAA41STWvcMBD9K0KnxnWMZG82tg6hh16bU2+lCNWatUVlKdHHBmP2v3e06xIKhfZi + pJnnN+89zUYDxBTMmEDL4C1EKly2tqZJTXj+9r2mGixg+3fjNUNYqaDqPH2wKibJ5zsR8yLiGhMs + jYPU/FgTRBnGs95mH5MoH3YhT4QzRmu6QIxqAiTBi0Fm/nDsHoZjh61sk6HipGyEmjq1FFRV/ZW7 + qog/QyBV9T4Ea28qYuk6q2EF44hCGA4kOgfjJpJmQEQRT/hcVQ2qGAOoq0naspbds/aedV/5UbRM + dF0zDIdDP3xkTFz172CpUpHes8fDse86xtje8oGK7Wasexx6zms6K6dt8eLMOHsc3SwZ4jh/0iop + 7af5tRn9gty75ecdRr5cYdiARRn7T4JLTX2YZAm1a3nPS6Rem5P5X28lUWWtjAnzkO//3rbijy6K + efbkMxpAeWl9KfYWKNtElIWQsOpfkvEOF2mjzidzWqXK2mBs+wNbP/4syvZrNBbcWAobGjFutFmD + vK1iCrlsBLzJ8toSt1LhGmLqWLxROy9Lmjvb5fILMsVRe90CAAA= + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json; charset=utf-8 + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:34 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:33 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - 0pa1dtuadfHOUeVqLiK3mljtwHC7xKOrqXlG1EXfeExc1YyvZm51+jZLEiJ3YUs6 + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + X-RateLimit-Limit: + - '500' + X-RateLimit-Period: + - '10' + X-RateLimit-Remaining: + - '497' + X-RateLimit-Reset: + - '7' + status: + code: 200 + message: OK +- request: + body: '{"query": "avg(last_1h):sum:system.net.bytes_rcvd{*} by {host} > 100", + "type": "metric alert"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '94' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/monitor + response: + body: + string: !!binary | + H4sIAAAAAAAAA41SwW6cMBT8FcunhhBkA2E3HKIeek1OuVWR5dhvwaqxE9tshBD/3uddqqpSpZaT + 8TzmzQyz0gAxBaMSaBG8hUh7N1tb0iQHPH9/LakGCwj/Aj5mCAvtqTwPX6yMSfDxpo/z1MclJpgq + B6l6WxJEEdRZr8VG3hayjj6mjTwSzhgt6QQxygGQBV8MUvP7rrl/6FqEZpsM7VOYoaROTnmoKP7K + XRTEnyGQIj/kU0Y8XTZULEOOSERxDdFzMG4gaQScyJoJH4uiwt0qgLx4ozWr2R2r71jzwru+Zn3T + Vh0/HNrmlrH+onofFjJlwUd2aLtj0zLGdsgH2q9XO83h4ch5SUfptM0WnFGjx9XVNENU41ctk9R+ + GD8q5Sfk3p0+72Pk6TKGAEzS2H8SbCX1YRA5yqbmR95gkF6bk/lfbzlIaa2ICfMQv7+9luEPFMU8 + e/INDaC8tLxnexPkEhFpISS89e/JeIf9WanzyZwWIWdtMLaTtBH/q/XqR1a2v0Zjwal8saIR45Sd + NYhrA/ciwKfIFRJYRonta3LoO7XzIqe5s23bT7Qj1RfUAgAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json; charset=utf-8 + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:34 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:34 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - j9H0Mt41m875GBjR2i9r831ZILGOU6+Jata5+JJkOQgIsO+SrMkmgWN80SCun0Sk + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + X-RateLimit-Limit: + - '500' + X-RateLimit-Period: + - '10' + X-RateLimit-Remaining: + - '496' + X-RateLimit-Reset: + - '6' + status: + code: 200 + message: OK +- request: + body: '{}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '2' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/monitor/mute_all + response: + body: + string: !!binary | + H4sIAAAAAAAAA2WPsQ7CMAxE/8UjytBQoKErvwATQpVJTRWpTarEAQHi30laYGHzWXfn5yd40tF7 + spqgtrHvBZBtv+PgrGHnG8YuQH2EBZwEaEzmnv5M5rdxvmuyKpdSyVJAawKep8QF+0ACAqNnqOVa + FdVqo8p1avWE+VTOybLaKikFjJjIeNrNbGwGejibWOGw34EA1GyuSbKPuVe7MYkP6EAhYPf7q3U3 + m/MN37OpEJB7q6WSldyohBDHFplmhHzu9QZdeA23HgEAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json; charset=utf-8 + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:35 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:35 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - 0pa1dtuadfHOUeVqLiK3mljtwHC7xKOrqXlG1EXfeExc1YyvZm51+jZLEiJ3YUs6 + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '2' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/monitor/unmute_all + response: + body: + string: '' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '0' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - text/html; charset=utf-8 + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:35 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:35 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - TAg/qKywM5rz/AUGkmt8+wB4wzGMJfSiHOrBzxBctPLsV/erSD5TChi/uo5ZlVXK + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 204 + message: No Content +- request: + body: '{}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '2' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/monitor/15635963/mute + response: + body: + string: !!binary | + H4sIAAAAAAAAA41SwU7DMAz9lSgnCF2VtmNsOXDizIkbQlFovS4iTUbibqqm/TvOOoSQOHCJEvvF + fs/PJx4hYbQtQqdjcJC48qNzBUfT0/31reAdOKD0d+JzhDhxxc2hv3Emoa52tyqNg0pTQhhKD1i+ + TwhJx/bQnXYhocqHPLNHVknJCz5ASqYHKkIPS5Wr+1Vzv1k1lBodWq62xiUouDdDRgnxZ20hWDhA + ZEL8NKHY0SQKXXqVMmM8MwSjhqwbo/U9wx0QIpNn1U6Ikli0EcxFJK9lLReyXsjmpVqpWqqmKTeb + 5XK9uZNSXfhfwdpgpr6WD8vVummklAUPsddZUVNX6yrrCZ3d2v8WznKMczohkdE/f2dLfmWJ6XNg + TwYNEcJpn+c0QLaSGQcRKRr2aIMnF0/cB7TbSZuxs8T5Ol0X2o/M7PpM1oFvc+DExez2mezxrRs7 + 0PM+YByzLXDUeeSaVsPQLpB0Cs4tfNBdJjVXPZ+/AH4KKRliAgAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json; charset=utf-8 + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:35 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:35 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - i/tjaZJ1Vhpke5HNSziupF5eEnHtDP3NjcuF7Ija0/AGuxq0WEQiFfpqy+mDADxv + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"scope": "host:foo"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '21' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/monitor/15635964/mute + response: + body: + string: !!binary | + H4sIAAAAAAAAA41STU/DMAz9K1FOULoq/aAbOXDizIkbQlFovS4iTSBxh6qq/x1nG0JIHMgp8XOe + n/288AARg+kQehW8hcilm6zNOeqB7s8vOe/BAsHfwMcEYeaS6+NwZXVEVR6uZZxGGeeIMBYOsHid + EaIK3bFfspW9zmw5+Igru2elEDznI8SoByAWehiiLm/b+vaubQiaLBouMUyQc6fHlJRlf3JnGfNH + CCxLh33qSLdThUIkyDFNKJVh/RSMGxgegDKSZlYesqyg2l0AfeqNV6ISG1FtRP1UtrISsm6Kttxu + m/pGCHlSfUlWGpPgndg27a5uhBA592FQqY+6KndlTV343uzNf4lTF9paFZHEqJ+/Zyd+oaT00bMH + jZoE4fyexjNCcpBpCwEp6t/ReEfmLdx5NPtZ6ak3pHmvbaShWt+9JWWXZzQWXJcCC08uyb33Z69X + 8sZ1dupBnbfhYgp8qpSoaDE0bUKdBnCp5Lzqk7Yz+bp+AesBGAdgAgAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json; charset=utf-8 + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:35 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:35 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - 7/pC7B9bYAY6HGz006Bg+ZrYGMZFiH1gxYQ0jMSpdzevd2r/Iy3Bkt2FGvLL5qId + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/monitor/15635964 + response: + body: + string: !!binary | + H4sIAAAAAAAAA41SwW7cIBT8FcSpcRwLbMe74RD10Gtyyq2KEDFvbVQMCeCNLMv/3seuq6pSpZYT + MMN7M49ZaYCYgukTaBm8hUiFm60taVID7r+/llSDBYR/AR8zhIUKqs7DF6tikny8EXGeRFxigqly + kKq3JUGUoT/rtdjI20LW0ce0kUfCGaMlnSBGNQBWwYPB0vy+a+4fuhah2SZDRQozlNSpKZOK4q+1 + i4L4MwRS5EU+VcTdpUPFMuSIQhTbED0H4waSRkBG1kz4WBQV9u4DqIs3WrOa3bH6jjUvvBM1E01b + dfxwaJtbxsRF9U6WKmXBR3Zou2PTMsZ2yAcq1qud5vBw5Lyko3LaZgvO9KPH1tU0Q+zHr1olpf0w + flS9n7D27vR5p5GnCw0BmJSx/yywldSHQeZRNjU/8gYH6bU5mf/1lgeprJUx4Tzk77fXMPyBophn + T76hAZSXlvdsb4IcIqIshIS3/j0Z7zA/K3U+mdMi1awNju2kbMR/tb7/kZXtx2gsuD5frDQHRZy8 + v8YNfRnX21mDvAZyzwV8ykyUmE2FYWzyH+ydnJd5uHvxbfsJ2EEZu+MCAAA= + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json; charset=utf-8 + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:35 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:35 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - QgRGXxkxV9A4PZPYRoesCGgupw+m7xaD1r9nbJHgAaPeprYV0FnzI0EYYO7x6f4+ + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + X-RateLimit-Limit: + - '3000' + X-RateLimit-Period: + - '10' + X-RateLimit-Remaining: + - '2999' + X-RateLimit-Reset: + - '5' + status: + code: 200 + message: OK +- request: + body: '{"scope": "host:foo"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '21' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/monitor/15635964/unmute + response: + body: + string: !!binary | + H4sIAAAAAAAAA41Sy07DMBD8FcsnCGnkPJqWHDhx5sQNIctNtqmFYxd70yqK8u+s2yKExAGf7J31 + 7IzHM/cQ0OsWoZPeGQi8saMxKUfV0/7tPeUdGCD4G/gcwU+84erU3xkVUOaH+yaMQxOmgDBkFjDb + TQhB+vbUzcnCdhObDy7gwp5YLgRP+QAhqB6IhQ6aqPN1Xa4f64qg0aDmDfoRUm7VEJuS5E/uJGHu + BJ4lcbGzCrS7TMhEhCxThNIY1o1e257hAagjamb5IUkymt16UBdvvBCFWIliJcrXvG4K0ZRVVueb + TVU+CNFcVN+apcIoeCs2Vb0tKyFEyp3vZfRRFvk2L8mF6/Re/5c4ulDGyIAkRv7cvSbxCyWlL449 + K1QkCKdjfJ4BYoJMGfBIVXdE7SyFN3PrUO8nqcZOk+a9MoEe1bj2Iyq7HYM2YNtYmBcKw7Zm7EBe + 47+lAGcZ85P0ExRFX0bHN2rrZBfFXNmW5QvVf6R7UQIAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json; charset=utf-8 + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:36 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:35 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - NclXS5F5t+kukUaODU4jY2oSI1KBdPHFdFhJZNfbXLWDOThxbCLlKKmYvikjdDSg + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '0' + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/monitor/15635963 + response: + body: + string: !!binary | + H4sIAAAAAAAAA6tWSknNSS1JTYnPzc/LLMkvis9MUbIyNDUzNrU0M64FAPTfVWUfAAAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json; charset=utf-8 + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:36 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:36 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - RL7BSOiWXeq2P2iJbmiDo/2BPpcpoCDzQceVuBkp6yO348trcqTrfm/pm8rvZRoT + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '0' + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/monitor/15635964 + response: + body: + string: !!binary | + H4sIAAAAAAAAA6tWSknNSS1JTYnPzc/LLMkvis9MUbIyNDUzNrU0M6kFADNJFCofAAAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json; charset=utf-8 + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:36 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:36 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - /Ib6MMQTHlX0/jTb6tlEMzSZs2crLqjkGjYkoQ/zb0RHtMaXT744DZRFpy23W0oi + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/api/cassettes/TestDatadog.test_monitor_validate.yaml b/tests/integration/api/cassettes/TestDatadog.test_monitor_validate.yaml new file mode 100644 index 000000000..40ee0aa01 --- /dev/null +++ b/tests/integration/api/cassettes/TestDatadog.test_monitor_validate.yaml @@ -0,0 +1,161 @@ +interactions: +- request: + body: '{"options": {"thresholds": {"critical": 200.0}}, "query": "THIS IS A BAD + QUERY", "type": "metric alert"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '104' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/monitor/validate + response: + body: + string: '{"errors":["The value provided for parameter ''query'' is invalid"]}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '66' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:29 GMT + Pragma: + - no-cache + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 400 + message: Bad Request +- request: + body: '{"options": {"thresholds": {"critical": 90.0}}, "query": "avg(last_1h):sum:system.net.bytes_rcvd{host:host0} + > 200", "type": "metric alert"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '140' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/monitor/validate + response: + body: + string: '{"errors":["Alert threshold (90.0) does not match that used in the + query (200.0)."]}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '84' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:29 GMT + Pragma: + - no-cache + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 400 + message: Bad Request +- request: + body: '{"options": {"thresholds": {"critical": 200.0}}, "query": "avg(last_1h):sum:system.net.bytes_rcvd{host:host0} + > 200", "type": "metric alert"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '141' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/monitor/validate + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json; charset=utf-8 + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:29 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:29 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - PcnVfOcEtqolY6fi98GEVSGXOZZkwQSBbl/twLr2TucYRfYyGCLXvKm6pTUNQt1l + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/api/cassettes/TestDatadog.test_roles_crud.yaml b/tests/integration/api/cassettes/TestDatadog.test_roles_crud.yaml new file mode 100644 index 000000000..3e4250ee7 --- /dev/null +++ b/tests/integration/api/cassettes/TestDatadog.test_roles_crud.yaml @@ -0,0 +1,513 @@ +interactions: +- request: + body: '{"data": {"attributes": {"name": "test_role"}, "type": "roles"}}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '64' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v2/roles + response: + body: + string: !!binary | + H4sIAAAAAAAAA42QQW7DIBBF78K2phpgbAPn6K6qoiFMVKQ4tmy6qCLfvUMSRV1VZfeZP088ripT + JRWvqn4vrKJa5zNvqlMlS7DBjph61DiQ0cYwaRo9aXB+8ARjkrF0qda1pK8qiwK60NRAkuqh0aRw + XJkq5wPVBgULGqxA3swQLcTev1rnbB9eACKA9Kc5l1P598LeqZXPVMt82T7LcnvFwutUtq1dtXi3 + fH9q/h4/ZHOA0+Ad6Oyyb7JBZEfSySEg9xyYrdq7vxCIaAb0R519uv3XA2GSD4EAkwNU+8cu5wcB + gx0DeQEAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:58 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:58 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - pNNj5PhODCVJlRBPEhZP3s9KL9kvFYv//TnGsiPp+3AqL7R5kIW2JlCWtfMcXeFn + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"data": {"attributes": {"id": "29274b54-46a1-11ea-a78a-03868a07b274", + "name": "test_role_2"}, "type": "roles"}}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '112' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: PATCH + uri: https://api.datadoghq.com/api/v2/roles/29274b54-46a1-11ea-a78a-03868a07b274 + response: + body: + string: !!binary | + H4sIAAAAAAAAA32QwW7DIBBE/4VrTbUsaxv8Hb1VlbUEoiLFsWXTQxX537skVtRT9ja7M08MNxW5 + sBpuqvwuSQ1qnS9pU43KUQR67Cm0pKljo41JrLl3rMG6zjH0Qc7i5VLWHH6KBAV05amCRJWx0kYU + y2lNXFIcuVQsIGhAwXyYbkAYWveO1mLr3wAGAPFPc8zn/CpACEB4BPZGrenCJc/X7Tsv93csaZ3y + ttVVlY+en8+i/89H3ejh3DkLOtroal0vdXvWwRJQapNPCdXevEIQkenInXR04f5jB8IE5z0DBQuk + 9q9d5g9vMjR5ewEAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:58 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:58 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - zDaLXgTwOglSG/+LeCisOhDwAOr7D4UzTY02i97kQg3V5W3f2nMLfChR6yLoaPN1 + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v2/permissions + response: + body: + string: !!binary | + H4sIAAAAAAAAA71ZbY+jNhD+K1a+NkQ2GIzzbdW7VifdSqt2pX6oqsjYJrFKAAHZvfR0/71jQwh5 + IW937afsksnYPPPM45nx14kSjZjM//w6abalnswnpa7Wpq5NkdeT6cQoeMRjKvxEUU8FCfUI0bEn + SJp6gqVRhBUNsM/BVjRNZZJNo+vJ/OskF2vrTqi1yeFLZeoyE9tF9/ilMm8m00ut0JOUurZrKV3L + ypQNLA0/fF2ZGu03g5bmTddoW2xQs9JIJCYzzRY1BXoz+h2JXCGtTIP0m662zcrkS2Rya16hD/CG + qliiolqK3Pwj7ALgRDRIFeAyLxq0Em/gM0f6S5kZaZpsi5ROTQ7b229hhtyeTC6zDewVwRYyu45d + e1OLpZ7Ch66m6G+9nbqnByuuRQ42a503naPBy+3c1vCSqEiRyDL0ewMuRKWQcAANNlLPAC1ZadFo + Gx4fk9gj2CP8lYTzIJz7wYwFNCDsJ4znGIPxsio25Q77X3WuK5ENgtKFvgBkK3hc6RoiKZ33VGS1 + /vZteoUfyk8xHvLDh3+ThDKCueY8ZmP8qLu3PKVI//4/liCyWJdFDlGwkbxOkD09LnKjPibH08vz + FH18s+t0XLDggrfcg/cpNnmDnntCoHSTS0tLYUl9Q3TDiAXR/xTdUEc4jELfU4r4bfZzHQZewEJ4 + nMScYT0W3axY1gsgqlqYXOkvCyc2J2LwGazQb2CFPlkrl7DW6kgPDrLeOnVpUlSoLtYawVIQUPi5 + hlj8DLmcaLSsRA4stoEWKDNrY/+RohSQ4lubUO0vUFoVaycrbicGflOlQkImVujp5RP4+5T2vt6M + cKa/FRlowLGt+2qQ2CtRo2VWJCJDtSxKfT62AXklwTzgc8JnzGc0xOdja7d3mrYWigeyNvKlxEEk + D+MKIp9wzsMIVJ37F+O6LpRJt21kQfPPh/XZGbWBbY2uRxWUs/XtAtwFtc9WkEObPscJd3QquNDv + Y3rELqfOLth/VEAK9PELaLo989AvJoPoD0MIR4wTRlTZiE9RumMcfA72dyrqoJxFS5ueA7Bkx3G3 + oW4Xp8u3uzLr3Wl0hTY+mxEaRyS8hzaPqn0Eh36EsT7kTeixNMApSH6oWXCRNxkccotGmOEJ1JUF + LiSf7SH4Ct+PlwanBYANv3WMrGOUakh0GygboIEyTG19kCOTtiF9N7VGdamlSY0E0y3aH31XIKfx + zOd+zPg9kD+YqUwxyWNxlKmRxwOGccjjyPetAJytv+Dl68W7JflC70i+SB3Jx1L2fEqcq9AOwiBU + S2i5EvlSQym1S6lutT4eP0KxISl3cWvzEsrAVpfPSviBgvdqf17Cp9+j4RTPCZlxP4pIcA8zXIQe + EPGYECGpFIfJyLyIRVCd40TrdLQ0H1CjNKWGWnZUxVtKvAytLsn4ERVsAu5WQLLIU7PcVK4MBzlt + y2lbR19W8G4LVWGL4aI66A2uSXT/ejeL9Mtune6AuE2KbfTZjPMouE8XHpVi6MtEIDSc2cPSDLqz + gAUiJFRKZav/a8JQ9qCePcQ7CHpI7lUCV54NtLhf7ogLd5RtPZu+Rwf+u0oOeAA9GI848+86kh9W + AZZgHOAjHnBPJjoJU0ZS5qc38EBUcmW77EsseBrY3CIBrim3AtA5h79lm/qXDlgLIJ9hilk80uZY + mR/s8zs7WCK4r5X2vUgmrsfhHucy8vwkEFRgqVJsO5LxRCo3CQwOFq24LURpzmJYoxdnh352Imib + C2t3Ece2+beV4g7K+ihv+qMP3J2Ayj0ceth/xfEcWgxMYDIALbl/z9n0qDpxxhUwD3tCKzYAVTCd + xKBQJAovg7p0gwqoXNbaziNGeNmOM6CIf95bXUK0HZwguakb6Po6120LaE/Ecwgyzw9fiT/3GTRp + swhoGcb/B4KK4zSKAztYUTDisbQUjAkvCSimOtRc69EWTYl6lRQwP2ob8FM+fugNrnHQTdj2/s4h + xGEA9Uqo7WFDqH/CkLAR5TtctpsKdul7pj5uqs218VOHEjlGiabKD7BSiYxGeTZAaSe+R5PK22Hq + iGVTtauB78Is8jml51l1CbNHz4wONRjrHHErjYUfYZbGER2TvAFqnfDVK1HZ+nUUPPT7zuJSbjo3 + A6pBFwGNCozFstOpGCjbEesihvkDCD6qb5RSEtEY2rI4IYPsJDAT4wLTBE7kMQTXRW4aqGLdcOwU + t+fu65syc+drJC+jVxLbvMRsxij1+chUeLjkj8hKGmuoPDR0Jofo8CClOKYMi/HatEdnJCdvhadN + wylaw6VEO4VVOtNwVMBMCe4f1AbGcjeiR8JZyO00+3yGjqP3aH5SFTMVpvExfjrhMg5CrDAdrel6 + /FTxnjcGLmJOMnO34W4KjT50lleL+1rbe5PWbe16enfJMrzqmKEndxOC3k2zgrbu8CJHwmy23pQl + jFpqmM/oCibxriUX+XYXDfi5u1npV9oPbqBLhMuZYj+XH0xboRtwRVKzKmCws0NhcKtzJUWgaA8w + qMhIeTQe5AsS8te3fwGEVceNZBsAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:58 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:58 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - aswcxBk1J00Iy18+kQFKF6EQfzLy4sWD4ILciesVMX5rWDYniffEYH6qbK0qwOgw + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"data": {"id": "984a2bd4-d3b4-11e8-a1ff-a7f660d43029", "type": "permissions"}}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '79' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v2/roles/29274b54-46a1-11ea-a78a-03868a07b274/permissions + response: + body: + string: !!binary | + H4sIAAAAAAAAA42SQYvbMBCF/8rga+MgxXJs+VZY6KlQaG6lLGNr7Ig6UpCUtO6y/73j4O1m2U27 + J8No/ObNN+8hM5gwa749ZGk6UtZkRwoHG6P1LmarzBou6VrhpjUqN0WrcimpzlH2fY5Vv90Kowqx + 0dyLKQXbnhLFrHnIHB5mOTQH6/jR2Hgccbpfyl+CPduRBjLwsesozrMMxS7YY+LR/ONubyM8m4HB + ninC5E+Q9gTY2tGmCZKHs6WfgM4AGZuAzhSmtLduAOvm9gB3vKHxA/gwoLO/cR7AIpjAeJZ0PsEe + z6zpgH4dR9vZNE5gqLeO7T1bWMPFk3XdeGKvwBbGec48+xRxoBV/KKzgB02rS/XFxAM67jmQS4vQ + 1XJPspGXBN8DjiN8TSyBwQBeAF0ZiWum1QXCRPN5NkLWuRS51DtZNkXZbIp1VahCVh+EaITg5iH4 + 0/GJ/SdyFHC8Ospyes9kA5cDRb5kd1HvcYz0+Lj6Vz6MFv22LgTnw7ATSZqjUWHeFkooKkkTbW7l + w2Dct57XjPe8kHmdlLu/DW9E5HUQnvVeQdK50MxpJ1VT6KaUa12WsirfhvRy7BLehdNi9BpTCqf/ + UVJKya2qu9zUrbyiJNtaaxSqLYS6RengnU0+3GL0eXl+F6EnrRt8tjtZz3xEta6U2ugbIboe+U46 + 3x//AEv3RHxuBAAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:58 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:58 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - YCJuwY9AAFMveejFq3DmCuXNgWrXpDBQxqXi3LxQxaHO16MK3yMSWa14TOuRlDjy + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"data": {"id": "984a2bd4-d3b4-11e8-a1ff-a7f660d43029", "type": "permissions"}}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '79' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v2/roles/29274b54-46a1-11ea-a78a-03868a07b274/permissions + response: + body: + string: !!binary | + H4sIAAAAAAAAA42Qy27DIBBFf8Vi2xANNn7AOtvuvKuqCMy0QYofAtzKsvzvxZXbZhFX2TKHe2fO + TIwKisiXmYRpQCLJgK613tu+8+RArIlPRsBbUWVATWYqyhgKqspSUZ1x4JijQEwjq0JwVo8BPZEz + 6VS7xhnlL7pXzvizQ2UiZqwfrmo6b8DpF1hn6BtnhxDb49/6gonS9mrDlIQ++bD4mfzlHSPfxMyA + 644pMEFBUAY14zITMmdHkeeszJ8AJECE310/Dvdrt5U2BduiDn08qPnOD27EZTn8Z4lzzgpeNdRU + mt1YYroSQgHXGfA9S23f2dC7PUfP2/ghQz9ZO36KmlWrHyiPJeepKO/7ua180M7r8gVVsPQtTgIA + AA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:58 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:58 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - nwn8Akm+cp12Jtby9xyfYjHWK2KZDWf5LxY+SMa+2NK6hVIBcKsVHXjynaTEG+o0 + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '0' + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v2/roles/29274b54-46a1-11ea-a78a-03868a07b274 + response: + body: + string: '' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '0' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - text/html; charset=UTF-8 + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:59 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:59 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - J7vOWsxZd7Grxzg2TIaQpn2nGjrOScgI4Kwzur8V2oOTYInX6xbVT4leinNkGLPk + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 204 + message: No Content +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v2/roles/29274b54-46a1-11ea-a78a-03868a07b274 + response: + body: + string: !!binary | + H4sIAAAAAAAAA6tWSi0qyi8qVrJSiFYKyUhVKMrPSVUwsjQyN0kyNdE1MUs01DU0TE3UTTS3SNQ1 + MLYws0g0ME8CSiuk5KcWK+TllyikVmQWl4BZafmleSlKsbUAJx6o2VYAAAA= + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:59 GMT + Pragma: + - no-cache + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 404 + message: Not Found +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '0' + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v2/roles/29274b54-46a1-11ea-a78a-03868a07b274 + response: + body: + string: !!binary | + H4sIAAAAAAAAAw3JTQqAIBBA4asM7gVHLa0bdIZoMaJFEA5MP5vo7rl68L1XFRGWU40wq6k+dOwZ + hI8CrXawwafOa98TasRCmkIkbVzsI5mQ2oaVBVg2cBYjOqh8NbprVsv3AzAdUstdAAAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:59 GMT + Pragma: + - no-cache + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 404 + message: Not Found +version: 1 diff --git a/tests/integration/api/cassettes/TestDatadog.test_screenboard.yaml b/tests/integration/api/cassettes/TestDatadog.test_screenboard.yaml new file mode 100644 index 000000000..af678df84 --- /dev/null +++ b/tests/integration/api/cassettes/TestDatadog.test_screenboard.yaml @@ -0,0 +1,938 @@ +interactions: +- request: + body: '{"board_title": "datadog test", "height": 768, "widgets": [{"height": 57, + "query": "tags:release", "time": {"live_span": "1w"}, "title": false, "type": + "event_stream", "width": 59, "x": 84, "y": 18}, {"height": 20, "type": "image", + "url": "http://path/to/image.jpg", "width": 32, "x": 32, "y": 7}], "width": + 1024}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '313' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/screen + response: + body: + string: !!binary | + H4sIAAAAAAAAA31Qy27CMBD8lWqvdYjjhCT4O3qrqsjgJTFyHtgLKYr4924QcOxxVjOzM7PAfjTB + NuTII2iwhowd2w/CSCAgoLHNOPgb6KPxEQVYjIfgJnLjAHq4eC/gwCxCy2ollUykSmT+lZVaSS3V + pixkneWfkoFkxwHnxq3cqsQkVKek2td87kfrju5/k93bpEPXdgS6KmsBs7PUgc6kKh6gRYqgvxd4 + dnoGf2m21Vuy3Qk4XzBwOyDTRh3Qo4nIecj1PMcC3l2xiZPhspDNcBfA5Iy//oKu+R/dpnU2vOJA + TSReomfSApfg+dwRTTpNJ0NdSmPqetPi5jS1/OAVR8l3nFw93Dkfm6/gaf6Qwf1HwDrcriyLQt7/ + AFRh86+5AQAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:02 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:02 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - kg+/Cls6zaJcT2blJLlU62BwgGePGdpqSwWrJ0xEIvzmSMWHXxGNsiyEzBPJ1a96 + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/screen/966440 + response: + body: + string: !!binary | + H4sIAAAAAAAAA41Sy46cMBD8lVUfcgkzGIanpVXyA8kptyhCDe4Br3itbWayQvPvaTPMHKO90XZV + dVXhFeoJjaqcdj2BBIUO1dS+OLIOAjCEqprG/gPkGXtLASiyjdGz09MIclz6PoCGUY4Us2MRi4OI + D+L0K8pkLKSIj1kiiuj0VfAgWHGka6U9Ns/oYPK3Q14XfDxMSp/1/0XKp8i+sarZ1wpKW6x7z909 + djiqLc2om27q0R6HhW133/d03fuxmQZvBgcP+7nDXn5sML7Y67jn07ZCNWjO68zCFZjJi9/vsGnI + 2up+BAxj8oXMnuWOpwF1/wk3uvGdQufcbGUYWmoWQ8fW4IV/ivGWw/tnGKVNUtcoMMvyOj2VuSjj + tKRznKozlqn4Zl+T4ot6NeTMBLcAOtJt50DmWRHAVSvXgYxEnGxDS86C/L0+Uj9a3Dlp/qSkZQDv + CxmuHRy2VhrqCS1tjfkqV+j1hSo7o08SXf1uBke89S/Igve5j9lXThcaXWUdP52BQSssxlfks3P0 + GV0XuinUA7Z0fJtbXvCIEIunnVO8qbM/FvfDLr7R4PYnAP/SyixLEnH7B4TDdTvqAgAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:02 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:02 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - u2B27rQjtu8TuEjzrroc8ae3xeJMLmsxU6SiAszW1tH+EI3X0cOP819eGNRqlxzl + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/screen + response: + body: + string: !!binary | + H4sIAAAAAAAAA+2byW7jRhCGX0XQIZe4qd4XAoOsyCHB5BLfgsBokk2KthYPSUmWBn73KZKiJFsc + j7ZYQEKfjHaTXeT/saq6uvy5n4eZc5NgarMo7/t/f+5nzkZ308lo2fdjO8rdDYzk01kWur7fH9jH + dDAng/qqgZFMcda/6UcORtLHIp1OYBYMFGkxKi9IkiSKExiAZWzhIhiimGKECWL4lhIfUx8LjwrJ + DfkeYx9jmDxxi7u0nFvwe2SHY6THQxguh+olN/e7C8DOz/0ozW0wKm+/tnloJ1FlwCQNh9ORzb3x + DGwc/hjZwkbTZPjJC6fjcik7Lqf9uZ7W+1hN2z7BZDYawcr5nY3GKTxckc3KVzItb17/zYahy/O7 + eqgP0+DiucvSOC3tqee7sU1HB1iThtULHBbFY+4PBrkLZ5nzkszOweysNHlQ/zogIuRBYLGVUgWC + GYUNFcbFVESxNQL/kH/g+rvoQ+aKbNp/vumPp9HapH0FGPUEk0Y2CjzfHMeBNhJjvsdBKWNJxg4P + 5VDNxkskiEGEICJuCfYZUCA8w7AiYh+JJHtCw/slmjC6RqJe/XAk4myafIWDWtE1vJeVvnXVw/Wm + 2hIZxY5Jp1ggcUgiIbSiYYBjQhR9W+/Xr1d7BDNGT9XbYGW0PF9vKm+J8cELcOUxwYSQ+3pTRZEJ + V4g/lhxVLqBa/XC9wRVkbuGNw2CWTdzyK9L3f6qm9T6GP1fT/i0X8C1rDkfCxIxFlKg4lrEMQqMC + 5XCgA6OpFoJHhyCxVcB4hBEjGwWOdQHgl4USr5F48T31b11ewIzWUAAccO4T7jFJONH7HCgZo2Wk + kNbNd18veTgHXSh4MxS8UIDytQLHcyA5L2P4i5TgJQfrGNwr2nmgCLNbIn0KcYB6kmNNWBsPDmXq + HqlAN35Blkt3PJyfGrQoYE7mgUtOXvNQuYLeb5B69kZ2OZ0VvcjmwyoL7cXTrPc7ZDm9cJS6Sekw + mmSyGv2lGu1tb/Brc2W7ayG3RPhM+xRSCoqZbkHJQnjJ6AI9LNwGpdLqw1HKR3buHrwHG2SR/VqA + +aua1PujmrR9rMumGW9bcnhwiSPnMCeKsTBmOo6jgIiIhNKGJLDKBm8Hl3WG/+rdG3w6REJcHSKs + fKw9KanQLXlKyCiaPzmU38+2EIHVh0PUxac34lOF0lYBQ89Aie7Fp/f1R9wX0ifaM7BzoS3+aKE4 + EswhmpZ7yXrXywXtQttldr2A0q4C7ByUyPVRor4gHnhqTpoYvVNASech4oKj4nGygxLpULokShsF + 2BkBjitz3SyJ+0xC7cUjUkFRbz/hFjxFUZohEay2KIHVXYA7P+GGamjllbYKiJNrM7AJkuWW6GVN + 9l0TbngQ5mMGXgkz2YKSNhZlyxwtk50AB1Z3KF0QpY0C6iyU9st8740S8QXzDJFQ5N73SuE8QveL + BQoe1I5XkkeUB7u0++20Gz7mRgGOm4L88WUh8Epl4e66Xgn7lHrcYEGaSL2TK60Sg0Q2QUkc76JE + O690Qa+0VaCp8R6PElMthw/v55Wqc0sKuRLxKCWEtRSrrXlCNkrQkH3aoFRa3aF0EZReKcBPL1Yz + pa4Z4OoHgVNw4WnDTdv5VxrEaKKfEKfl/qA5AlcdSpfZwb1SYJOtnuKV1DWLAdWDEO1j8ErEiM3Z + 7k6AE8UKRUGC8qQsP29Q6ooBF0RpR4EmWz0FJXl1r0SYL7Snob2EtXRhzMEjOQMlSrMtBjDVpd3Z + JVHaKMBP38Exha95cAJeCftcQLrkQX8R/Ozv4Fhg0ELPEX3cHpyUVne50qVypV0F1Ok7OCavnStB + bxhk3sJThsIxXAtKYokoC9FytuOVwOoOpcuhtFXgjLoSk/Ka1e7KK8EODiuPQjMZa+k8nasYJWOG + VnSxzZXA6g6ly6G0o8AZuZIUV0cJkj5KPMV1a10pLVL0afiE8odgByXRoXSxXAmXG59GgZNRwpi1 + oHR8E7Op+tohVhmPU2g4ajmWXT0t0CpaoTDeVK2r1Q/3Lq3txHUz+3+8ibl5vRw2upIYdWrHqjYM + atJ7Jenj9YZmRVM2rUPzKljDcUtp+QEiyXjCkNBNPbBe/XC9v9U2vP4/hv9lEzPZKCA9BbV9stku + /fP8BaQWeELiMgAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:03 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:03 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - Z91NUpPIZnIQ9h7lBFWBkEPGVUEsn4/i71imPPwrChu4RPI5uNM5HGuodISK1HBR + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"board_title": "datadog test", "height": 768, "widgets": [{"height": 20, + "type": "image", "url": "http://path/to/image.jpg", "width": 32, "x": 32, "y": + 7}], "width": 1024}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '172' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: PUT + uri: https://api.datadoghq.com/api/v1/screen/966440 + response: + body: + string: !!binary | + H4sIAAAAAAAAA3WQwW7DIBBEf6Xaa7G9YBc7fEdvVWWRsLGJiEF4ozSK8u/FkZpbj7OaebO7d9hH + m93IngOBAWfZuji9Ma0MAjJZN8Yl3MAcbVhJgKP1kH1iHxcwyyUEAYfiYnIlrVBhharC9lNqo9Cg + qnWHg2zfsQgsxIWuo9+8vaYq96eq3w9lfI7OH/0/kLZuB/kh1Qsyk59mBtPrQcDVO57BSFTdU0zE + K5ivO1xyKD0zczJNkyzPDcfGn+1E9SlNpfQPo/BFaZWAcmwv4AfMJviWtr88Y/D4FrAtv9O66/Dx + C58hH2Q9AQAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:03 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:03 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - u9VEJv4YNx+Fl9tRGJNbGm0+76jyym0t+mec2t84PhoJYEedil3ajyEhP7U3EneZ + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '2' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/screen/share/966440 + response: + body: + string: !!binary | + H4sIAAAAAAAAAwXBWw6DIBAAwLvsdystsvK4jFl266OxAQViTNO7d+YLMdEh4yoQ/DAY87hBbnFb + eWzHBgGWWnMJSuVOqJKkedk7Th9VopqovK9LIs/nybrxvTdPTUhs0b8IxSFqK856Z8VqnHr4/QEp + PQZ4bwAAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:03 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:03 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - o8MFmk+4Ge4vq85ax+5C1nfQs0lbtaPPYZrpqzeG6IsYGNLGMu/G7PbJElpjPS5i + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://p.datadoghq.com/sb/fasjyydbcgwwc2uc-3412a5ac759ea5d85527d87987d725f3 + response: + body: + string: !!binary | + H4sIAAAAAAAAA618aXPqSLbt9/Mr/M6NeNH3um3EDNWnKh4YMBgb4wE83PeC0JASAk1oYOqu/35X + DsJKoVPV0f06ossHkFKZO/ew9to79e3btx//q/d48/ox7V8sY9f5DZ+vrr4t2P8uLvjfxU/+XogL + 0r+Lb//APy8u/u/FP9iN7BP/j/h8+p1d9w/6I/3lG7++xC8Wt5xuptex+/F7Og7/++3bt9ncM6ZD + I17Pe8l81e2Q107w4RmTblvrW4N4eXO/9W9777Px/S15SpTJw6ZyuXuqv273ml0fGeptxZm4u+1N + YxuN9rX+zXK0thvu+PLxsvbZVluW95x86luyCmrkedwtPb5M6/ezVxJ3HH3Um5bb1cT2VvtDLxmt + Vo2nWJkfxq24HiuDo1b2DENTdnHHu7vRb6vOc6VLplq/7M6Hvvm2H5uVm47v67WoHpRmk+fK/WRz + 93Lw7m67u9fqbjN4cNZvH681o6NGL23r+bDziTHaJ0dba4zn0VuzHr/6x/tGd1SrrEn1ZXi/e2o8 + 9h+UErnxLPvtodX3tvsB8YYvnZve3cYb2I3Xl8780h82thN//LbRJiX/cdwYPQa7XnM0JBXTvY0+ + Xv3O23HuDR5WrdrU+LhT42Q1fHu8Hx+quvn0PjXVfi9uOq2n59mWvPXdz0bF/nzv1ubT3brvdj8/ + vLA2Wz02++TOcLx77bYzKe+d8ms4b+/u/PJb9VhbObv7y7i0vlOrDe1+5F3udvjXfSku26O3lbVT + PrT2w/vwzX7s2v7HxAqbz4/vSxIOZ+Nuo7R2qz1ybyh32JzdfLDvq52xW7aWny/RbV1VtWlveF9/ + 7ijbj+iOXLb64Ybo3Val1X3pR/vGQR8oj8dtr98wjfWb82zuV+XtY/nQMNdLdf5y2+oHtePo/W1e + 3ziD7nSvBNHr/masldabwa010Bvhx6uxO0xeBi+zbjm5ORyf9mYQreef+uBJ6fer7mXd3b33no/l + qtE/2rHz9EGi3bAz7/fKa3s3bd3ote3u8PS2bES7sjPT5k/h5PGtv9sv/WlpOGuFu2Wl87R9XJUv + x08vyvagWPOnSTxrzAc1bT63Bs3Zqn0MJ0rt5XKvKqPRXBsumzsl9ieNsP+y2unGg09q23GshuWk + 5e+XQa1pd3qPq9JyuHkdtFX7fm+ZtXYwdsZWq6Q8Jh93Q3IbTz63uydS8dVw+Vnq3u8acf9z3BxV + 9NJ8Po9J83M3/dhudlNnG93V5tpuOQ6742HVVczdx8fDvuMt97vXUfsxiPztw+X7ZNIYuJaudFv1 + ZLLXSvZo0n5ckvvOXdtfvbUH63JiWq2P+/Xx45J8LG82d/ry0TKfb6ehpXZettXXxqe+0Y2tv060 + 6mB873daL/54fRk/zh5J1zWHw/bTdP7RaLY3/beGMlXdUav+Pm/cHkdKvT9z9ivl2Zrd9tej46Vd + 7R7uK637Q2PW9LFc7Vgiwe2s4lUbh2jQVA52XFV3nZdRxVRuhu/PvXLb2HTazsfIv2/U3fGH/XgY + T97t+5dKvf1gadbHUzJZ7efNzXCtPDUqJSd5eTx2lFm96m7edW32dLhsNEy94j6HjWQXtt5Vcx21 + rAel+ekF28++ue3bmmpUnvfNQ3BoWxP9c3C4G09e67q5K++G/ueqPl19JvZ4+3R7+/jgPW/89k1S + arcr2mTQWvrLl+HNoKQ/bQJtqh5Jff+s7Faj0bH7GNdGjkkMZ0vGSbnxeOhMWrXOdDRq7z5Kphsq + l81u23icRsteGJrx893xw9pVw6bZj/f3+uOlpdRiYhnj6frzZl/ZxpPeltRG6qDddCuzl6G5HYXb + dXXS3L+ZRE1eKtZ20+7X7fbbYHzT7e7nw96l0/yYboNgOC/NH+Lqw3J6XNXLy67+urZft55dfTNe + N8/u+8xvzVpPxCzfXJJXd7ImQ8vue41432h3gm1of9haYJfGyqQ8s1zjaT6y7zqP3abuvL22+p8v + n7eH6n7aPQzq+9mh7XsJ+fBbVdWL1fZQjVV9NVUe1c3t42wXquOlEfe2Wu/jslL1DTIKX7xg8v7o + jZfbVbt983DcrHvtw3ZXS9rdeb9Ufuvc9cr7KN63BuPxyGg+x03ycpPsHz7DO7vjr41Le3rZebU2 + hmV7b4dOpzce79Rlr/WwvtESZTx676uzYa2TdF+9eLJSn5aVG3e087XnT9d46B7V5sx7iEvmJvq0 + zP7bZU8bzJ4+ZrvqanVM7o2ny3jSn9Z3D9Ym/nzvhWpUCab71f39uz83799u3p7vVPKkl2pzUksm + JdXb3M/qG+v4rNUn66A6OvhbxxtVnM9uzRtVO1o5CGKvXNn0w6ozM0YruJr98jl5Ue8G7WRkffjt + zWetu9y4W+9xMxxOyW23NByGdft4/2aTY3vQqzcPD+pNa3xja6Pd7UPVne7NTrtZ6Q6O7fGr6ajv + b+t2zWrsjnGyfLWmbnf/tB1Hg+HhraOPKsr70+vNwH37jPvEDD/8u/G+nrwqh/H0ttl3HZiuVb5r + 3EzvLyPHmq7M5wa5qW/vys52Xn/SrJ6hepVDpdauPVTnmpL47xvNar2Rxmy0Is/PD5NaqxEc3pTO + tKyNngLbcg/3id7p7EvPSc14rj1uXw/2sT1zq5+lw6wyXnUOPVNfmrP7wdOxVD4uH97etrOD1ZuF + tfl82mq/OK897WWtz0KzZjeC5mhrv7yU/fbtON7onSev349WD1PNaaq958HTZvw5TuIh2Vv3RuNt + eDmL6/cvhtLY1artwCRu8LZqlnc7R7u1bOduQsyHwUDRnw/Ed2Orr070/XT52tjWP9fN7qM9HLeC + 6Yv1dHupvs/MWmA8d6eP9mvl82bbrb7Br18a7dhsjuYPL+qDP3q7n4y7N4bRqT1Vjdlh+vG6MWbb + 3v2wDhWM9O5RmT1ZzlwrqXpjFPUGT54zVDof8a7yuH+fOb59Y+laC2Fn6yztl9tdcNO9XWk9z05q + TWUysldmt367v911X6uPowd1GU+ipHtc92rrwfgtnD3ays3lclXXu+2h0jDmb8F+RaqN2uukrlaa + neFWe7gz3uz9QKuPmo8rs/y2eZqZq/Xd7fP943QZNadYafxeKQfWxuvd10zDDof2R3lS9zYvl0m9 + 1/18fq7OHP31oO4Gnv3wUmk4Rn+oNV7U6PJYGXjq62f78aYx2JZmtYFVbz6XfbJ5ffLVnevctO26 + SfySoSzvN6RbvblvBx/Pxt1Dbx8eZ8lw3nLgmnev1nslHkxH991Rp7sceka/+tK439/0nNKmr46a + q17PDD7m7TuduJ/O+ObtAVY9KWl1c/sUfpRGq6fD/Ph5p/8KSHp1BRTNwTTFsD+WRDV++8YQL/0Y + AxKQr88XFwZ8leFbFzGJ4ot/nC7s8a+/bizl7vzh2N76YhkS89fvyzgOol9KpShWY1u/FkMuN9e6 + 74ovS9tStX5dKVdrbaVaWkWlkKh6fKX7Ibl2be9aj6LvFy4xbPXX75EeEuJ9vwiJgw/xwSHRkpD4 + +0V8CMiv32Oyj0vs+lJmYS6J1QtPdXFB6Gt+jJ9134uJF//63fNtzyB76YbTyr7+Aemd5PRvrg4L + d33v317ZvytkQ42WGhCV8W/NpEBUTJdKX7r1Q/ONw4XuqFH063d2vecvXOIl375nlQ03Gfb2gu3p + r98NOwoc9fALrvXI377/9sP2giS+sI1fvy/UJF5i72wdGuV7i9hfU43g+/uTH7l2LG3DoJduVSfB + M+ie/yjhoV/TwDb/gIrZQcy/26rhhUEwkYtfL8qVi/+6aCj4T1lRlL+xlUQkfrVd4ifxX8zE0+l8 + /vKfF38/SWUH5fJ3147P53oNtfVV4y//ye/+/a98cHz8UUofixlQOQh5BapF8G+ihqa9h7y+/XBV + 22NyYJt3pQYBpFOi39Jf+XKwDDHMn4uTWxQbbOEHdAUwDy7Mwp8KRfn3//0f1drfmN2+kDi2PSti + 3/xywX8h3jb94r/5N/QJ4hL2B1f8QveaffjrBfsTOYmVvebsdxJubZ0sdD/xYnGhlzjO7/9PDGAQ + U02ceIHBFz8dDLvA7jVJrC+naoiJxSSU55+VBNO37LRMNVodDoamW7udXkn0q2qtXFHrqt6st4la + N1r1eqVptJrtVtNoVupmld2cPnapeoZDNDUsFFyMEO2oMVkEdGLypP4/TUQsn2mAWBbfIq4TzLdn + l5uNCux7MQB2HxrkOQdxsak6ETltBFdvaFdmn8SPcOlYYfps9ntFqShXSuVKqb6WG79UlF+UynWj + prTK1UsFH5Tsc13fsE37z+6vXldb5Xq5cn7/ktjWMlWfZqMlZrWzjXgpJltWKrWvry0S55Q5CZ2s + hGjMQ8gL1HhZiv2S7cKIr1cB12UxjvTUilLw0GpFfJkKtCk+78WzThdQk8w+nz2QfXGyBDsVb7vR + qNWU31PtM9IoAAU7UN8kxhFG6se2eVg4dpQ1r39mT6mP9sNF3sontr70EQkuHhIS6Vy8YriTpsPn + 2qqGwC6mQi1aXGNHiz/SstMi2Z3NBrkKm6urptZin9Pn/LP6nNvU0klUpczI+JZhpCuKkbKPEUq9 + UFPJsR//Fb3+0yF+rtpiF7iLEeJkfzyxD9cu24f/I8Gy7DJgBefqDrdqezYNFWJMri3/ghX8geaK + 7ULsRXSVnvOn9pA13ZOR5Azu95Np8Ccs8lMxATTZY1NTSSISLgKAWhISTz+p59/T37m7tD3Tl2Zr + Ob6mOosYMEF8n1Fo4WJz9hst1VD4sy/Fz4yziIhD9JgaCUIb/ZOaLfO5v//+ewbaZIBrBldsEeEo + ePISVyPhRfoRIIMY33+78YNDSL3ihcD6f70Yefr1BVXfi6uLL7R+cXWCOgz2qTnUv9vtcpDfIZbq + lILQ3qr6ofT9tyn/18XUd2z98KOk/vavjqn7/tomEca8Yf/KDPmF3QU+opNlE86CvfRLAdp6vYUQ + y6/fv1bMESy79wTZ0hu/RM2KFRe93sXdywXNujim+3MwhrTEtK0ral0pCJO++gPwpan62jdNSfcA + x+xtqnZcN4RCbe0oUR37yCB0dG0i19LVKIZmhZDhYpOQkPp9105NrwrMe/LUDgFGCg/XsWottMPC + QBBZnJyvFPf9HYC6S6JrV/UQlBZAw/yLReA7zsJGFhYCjotZN9JHRHZMgJqtCHPYkkWs2s4iSjTD + pzhXXMz+0J/ZheyTmCCNvxU8cU/VGxEDq4lOIRvgXVxGp30N8EZXrYb6csGQvrTqSj29GDh7UeDg + aGqLn3JKnp0MewqdS2Dra3gQ5H7xMlqoVuomyvXiPUGaSX6+H42v/eDg9Nr22Hbb8YE5G3jNxQkw + tnG1vG6+wyGJgoXpxqmmC9GmyCOnJfBK6s9nVP96AhWqc7j+ibxy0soKi+ognBoVthzMq612u1xp + thqVZqNdlgK6Fvo76pqR+hOPOrXrxAtgDMTIjbFWrcBAKmb5rm4hZhuBt/J0VSeGr3ku0bMTya0c + 2WzsqsHPF1+uZAR8PiM9CRE04vyE1pqnm4FhLA13bboqnLrlBUt4SN0iK1/NTghxBVxrHCZ6nIAX + SQKEbGQFMFffwLT45H9BflqsTMgvoj+YPc1o0zupZsAHXGsQFRUiNXVbT9EYm9MfSCpCpgubDhz/ + D5wJl5Zkh2mqhvhAPANhDQ89xdgvm2WO4dwN/BS/cOuzPeZukJbmPU5229i1CL0GXAaFwgvVxEqE + SVRPniBG8OXuL1KRnJFFiJ0QVynX5WL5g8/AgrSEMRY/dbKSFsFR2LEfRteE8hWc67BCNVgutpWF + nsRw9vC8h3Rj0uwAKuHaEbUDOVtMh2MIWkyX/anVauVGraVfGS2tfFUuk/aV2myqV2UNNqcqNa2q + 1LIbTsXEcAsH49mhjLZiNlpV5cqoGq3MUFq1ptRInbQJqWSHYh5+F2JP4R0DQj2epGetclnVa7p6 + ZRjlCh2vddUm9eZVo9lQm6aiEWK2fzZe6AOjRZBfdoKtmqpWVVKTB2xdqdVmVa2Xa7puSLmlaoCA + zA7QxggVzcAAVa3GZ6SWTRMSMxsNxahVlcr5jIJEA7gBTUGDO5ybnR2xrLYrxCCVq4ausTW2r9pt + vXFV0apqTVV0w1QkR8BkBhshVO2KjLPdbBtmE/mzSoxmZkC1SbQW1llu1KUBT4qRxubs7GrgLYy6 + 2corB9Haegs2YSg1DjmE2oPm9YwvIoENBZkZKF5SrTjJDHZ3pWm1ZllpkzaIkbNdZHkeo2ah5DGf + 8C+cqKmThlJv1CvyJlavqs06vobWNhWJRjK+FJbpWnZ9QmPLeY0F2V+pKmiT0BuSsDJjiV1lYL1g + SEwvZwRmS600lKbZakj2lDECstedhOFy03YyJBQbHuSR3m6psNOsMTSu2tWmotTbCI0VbtdiKzLj + UnQDoCSbQlNTFJi2PFz7StcIigfNMlRI2tmTmpzJsNYiuJ7ASmUH0q6aoGxQqlFlo2ITY6zNge+w + PLFGRdeVaiO3TsxTa7fb9QZMrH3uRE4wMbsTDdgkcBKRl1i/appVxYT21Umzyq7/ogDRUxKSBYqG + JwLzv9kVrr0PVI9woCoEDBqbqibWsIBvtyzkhcYXVyIuIvvAD4HEuDNnY4lf4A6iRBpQaBStLyBY + Q68QsLJ3qK56hG64KnYT2JCywl7+kiOdP61uACgDFNketcg0WxVP3qlgOgGA8Ax/t4iqiwhxJo22 + 0uxSnwV1lEwq4N6VDh8gE13kEbjKphFtnAUgdQSqHvHXl9Z6mgMDNgswO5ZkaJbvW4ituuMnxgKC + AfrPigIkkWdHEoEE4wHRgRwD4RWJW/ZqLTEMzhoUSsAlrk/THaKui0SB0G1zsC7u5qtje28Cc+Pe + 7MOiAwA+wZqxJJ77nxithWsjgp+IEzGcEOYV8iMvvqKFsiS6OtM2iiCQPlk7IgnilLmZITY9Ow+D + ZmUAxZAum2rkJ2FOEVQPCZVDEz6P7BYYyl0UDiNoCkY4cEZIWvAGpENe5Es/YogZWCWG8kj5GZef + 7keuHxnSavgvSBWdQwSVoMOC4JduZt4jdWg0+wz93PAwHM2X9p9sKfimWRGFGNm5Y4qw2kSXvqS+ + UrI7ps0eiXd+uOYIEvsuXXJS51Nk3i0Rak7MqaQ5SG+ABwSiQxTfqbICUfArf8N0gsrStjyk6gvV + sXwAuRBJXs4BAKovFxwmBihtpgBWPD/xkCAeFwYkwVIliDiQnpRuW7bgAfFGX0FYjLQ0TGlbbM1d + 7FTpK267aV0Mm8mSbGQiWdmGiSsrbazSGW7tMGYEBVyqMOtzxYNqo/qz9F34iSXR13lpM9dCvwS5 + jWYYWmRAYmEwuWQfSufubqRphInIP8Vy1wc/9uMDOCYOB9P95B6ZrfTMcoQTZMuW1InfBHLWXyBd + omlJ9me+ea4aUrYg4VgxfRzy2qWkd3EkoAKoDkdysCJYWChnObYprVezeS6EypK0Gs4JIA0NBVuS + 7vWXLafikzZa+GpmttTvZVez0nxoz852DFNUhsSgjFxK46LQWJ4YScLIhoEIThFBTdo6j5EFtKYu + qCox/NfeM2iQTWLFFVx7DB9+mv9TKGjhMqRwZNrgTeD2pZ1gfmkJZUMoxaZCg8lOkkTk+KCPfTgb + aTPwbNDp6wIFEaEzv+UCSix4hQFOIXvrFxChlePcj9IiYjVaS/Nb2gEsTZpbgnowjItbHpyOQ+FC + WnSlFofvoCzZCQRLEEqBZNPphAvyWOnOBK0csm+w1bWsTa6/pUk8XL6T0F1H3Rf6zwtiYlv1BFsA + 1g98qCRT5sMNjUa5hXsANMn+yjaPE+rwPtlfOEpboKGQIkNmbXKsgG3QKtfCCP2AZlDSQ5nExIyo + MxbRn7srKL2fBHJk5K4h2si+1ZY+msnxeCjS6FMQAlZgsTry1AAYL8aT0DXjpHVLISnjVPuLfN2G + ytKeDXkHUsuGiRG0bsux06PVQRtOKlUJRCMonTRXGWqqO4RuRysCWXQy1Dxo0p7FK1lxshIhiQGB + RRikqIVGLOmRVGU5YMEEi/AKZa3DBFGNhgUZDSS4QZCn2QdLhqMhh2eeUpqaj5COQIMAK8+GBwEO + QKxQDiowDMECix2RnkNBmwn9yT7G9rZ+HhCluCS/GMp7w1EtdJ3pfBJA+Qxpudzfo7EKPmsRAMar + liV7V9fFgiTsy5EegwS0QragHVcitP9UYpRMQw0MeUg+1GW9+zb4aWjlnlkef6na1AbUEzuQuvUd + tCT099L2iGBIZG+Sitt30VtES2l5DefFtQUq1sB9R8Q+ElH8Jyp8Z2YUBw6gPq0Roi9I0kkQikSN + gCGhkdIPImpYS5ppcZZYLwRkwH6CIs2KgScLTI9zvp4+SUMdLHu1bke6v1B1CVNEgicUq+H6unUZ + MqG5lOTdkXwsMai0BDlTQ7MKzSOyj02nwsy6IAtZovwZbqXdteOlnO/xCO3jQg6bqcul+UGm10gs + IIA/yIWE0+5LodI4uT+GmVnWnZ0E2msSVwotYhw/iGgiIF2cPkJku5iuB1nnvKnsDGM9AIRnVy5W + iYviwnkWfuYQzmDHyg6lvFmeiEYweqw6khrQMhdMEZsLMGkwR5iPairKQvArWXGg/E2pY3iJ2Ldo + ag4yG644e8ka/iz7me43BfPCw4AnMSPpghQZ0OYJahsIJzwtKpKtG0mroMa6R6UUoS2K0CyKnAmo + QA5fkboF5KcRAqRJkJ1adIgQTSS+TOSeAdwKq1UiRZIEIO0FkkY5KKNbFUbOy6kQrB9QnIJI8wfR + UPeWRXuOhgz4SlYogI5HuRD1lWrgXlwFWtmQNoHqP5WJpMqIeCk+EbVdUFwybjugYUcaiG1KAW8l + NAyuoDA/p8mUoUmW5qjojVyCFl7LKQAIqCTlY4X1nnwF1U1X5cWBIm0A6kdFhaiSp1EDlzEZRX6B + MRxo56cYLJDyMRzi+bIFKf6xX75USyiGdEkagljkXziqRhxputHOBtm2YM0j6B1hdJG0NSyHFakH + LAQ7x3ia7DUuehhtDi1EwwbjalBoK2b/GALm5B9t4pDm46LgKaeQGeyLuS5h3nAY2cdzw8ia5wLe + WRo1VDXNjuUcOgf9OAVd4DWMLxhqG6koss8/6QTNiSnEps2QkKckIs6vQqFcXHVIUxY5OcJKj4sc + lWdsFxycibJn0aDUf8E9fTUvSNKhGRD6HgVNkf2JukyRWtJsEWSmnfMpJxnRSymnhHwe9aFcLBcW + h+IvnBbKkgaq07IWcRhBnICJJo+yoJpwTzK6yvHEAsKfbzujDh11jY3JZ7xydsiVtMBfsLkh1QG3 + yuqlkuKg11p6pgt6SPpGJMHnE2MVWo4qZVVwqM5rAGuh5PFldUyrGdlxD+CUz58DAp9GGOScwIjy + XBkb75EgTuTESLVDLYTIslcbJy1PkzOudjREncNbk8j9XxCThMK/CH4Eb1oqi6RGMOFMz5Mv4wDg + 5CMLLsAaAM0UBoC4iGwwROgjyc+f+dY0RxOdgXAXLGM6p26E1iJSRsscASUIdLIFlMo+5ChFeOZ9 + BXHDQo28zyQMUd2lz4/OCFP+ABuZbw5Hiknhl+xzgyQIZLQjrtN9ixbhz7ViTQ6LLe0tl35C+IFY + inwYnxBDDYszD6zFoUym8v6ZAmVziARIM6UG1lGHJCY9aiDJFXsGOS5JIkdSZOPoHElQEz0ILoJK + M41A0rWgm2EIyG6yk2KWzeqftABEufhskMheycAaMicG1jIRNXsNGmRYu3HBsvM5CmdJdqpUoDSh + ZGA5UGPx0jQe9K/ouhQWwWZM/0Ni4CAEuuzDIgddQ+dbTatY1AtqSOCyv4LpusoxXcJXZK/KNn7A + SE8FXt5dI4mTV2MO9AiLT13KeS1GtEdlmaSVvKUZPnoZSdhIJzLqSRMFOY+hmWRoJFAIGykCTsvI + 6OEPaluSaHjDg0E5HbQPgyQQpxigQJQ3AD+nyQgHpxvy+BOLjdBMIKc3bAND4odIrTLSzJbLxVYD + zZIDzjFjPdmpgTMsZONpf5CkerLzTFF1keNkc6KukdInaN6CFXJ4ln0uxwEQBsVsFLqH6ZGHNPEW + 1F9MQKCKs0RFeytKrnL8T0sN/DyMJts5EjlAD1nVxMN2kgWxyBkHUlYU2VYkEwYsY2W1QmRJtg+l + ptU14JZGdr0nljqbF9Ec+1yrUTuXVxOt6VEcWmjLBVGhs4boa5Qlx04IIjAiTEhembNc7MxXSsXq + vpM2BqWsgWAeGZOiOqjIUc9G6e6vfjbJNYgaDrKQgycRzSH4kbRjXQxOJ+VLjptrg0rvPfnbs9YB + vqnQLiR5SHeyj/eRNSBUF7klKU2llYq838peUESD55lzOe9NCiASCgNFdDSwuE+drS3Jh/JcyHCA + d2W14qtNaL67gNlz9yxjRdYLcLKNtEmZWh8lOJlMIxpkkLgie87K6xwJecU0tMDBnky7Cr1zz2sG + Fkgcqbsm1UnEVgPsJAAVao54lgGmJfjqpqRyKdAnOYOSt7LIJ0isg4kKIPSVUv3ZoaVRUCiDKysk + +Xm5NoeMMnQvMw0gCYoP4VtRly1mLEVdKnUAcgomJIk8XDPkxqqDrFjcvvahXBpPORrWiXvWGrDG + myykQU/R7CwC5I3iS0FoO8dPEEoaMh2pZ0GQxNlAX7Cz0Rnve8xDeroztBWyKDc5wWCR39IaBg96 + uWTP4F0frOkILJQU0iS+n3JUIPFoozSs7TwFkdQGNQ9WHcsuLGX3OClILVAU6rmrpR3AP1XDOEiy + v9HgSScu2kpZJcX32Kfc8rgcTmk+ZV/QzcDOChcJHZtJtZbG5FyTBRUQWFzJK1PODr3FP501MvOi + iCzK6T+9jWZVZ8kzt9MF4qZGu7HOK0ro86C9OUd4ecJOuZ7O0uUAw9ImjhSy0VJxBSSKEJdjQ0+N + SdmpUo8AHpFnfLwwWSBJRnij+yDXyMOtNI28HlrBsvfGoOf0XB4paoS0/0/SJgweSuA78mXIEthb + SkwtghxLL9NtPLDynrKCmj8/EAONwXEXaugFKw3JmiV9uQgibJ+GJxeoRObw0o5wlglzllwaeZPI + HD0jXyx/y1xu9srAkJhJ+goEQ67PiAwGtQlwPWmPszTECcGf6P3sz6lbFpUj2Tufh0kaugq4Aigk + xJfL3k93MykzGEx70qh78VSpvrNSt3LmBToRFAfS/Vy0YIKiDR7C3dOT3DYGzXPzaQ00dZ35cbie + rml/uWzCMrihFI+0s7JAeBYboEW3SCbZlqnzw3o5vM4nRPQKGrBkP5kyD1Q1oM22hDk0CmJB/OLI + aQGLzCg4eiIh9gN0M26Jw5yGTAX4OyR4KDnBAoDCioahbTa8uEA7N6W7T/I4tblxU4Px4pz46cyB + 8FBhokkWhi2+kriUUHOowebMKZNkMnoCSBFmgBwDPQqc7TqdMhcPOp8WmlqRKJ2x39w9hGgboYZa + 5CAyHo2it9y2Ue6C9pwgxNmIf3kfwUlVSrCfeKuvs7Jy6JDaUeiJZ7kRUtJLaoC5MpDQEorvUd4q + 4iWBzvL+OEJDgORu+EnHU5+l5LMgvVzAF89El4gjTsYL8TOfRDvXYT2A4TB4mTtCBxxKarKjoww+ + Jw4pOXjqA6NdU9lpZHSBndSjBVNJpyRBUUhCnWJ2BJbwy8JlicnJNxUCVDQLFqEGrj9svRkSJvu4 + dD6nDuzzrgIOYlnYlyQuKHkWfNCyVoRwRZ7PCl/Ikgr67QDqPMeQi52ZWVOdzj70ZDkIRdiQI5JX + giI7BcBUe2gRkp2CWqx8DV+y/pu81nOTQSNdQt2yLGrKnRfxp6C/eGDJ6TXvCuS1GOMnxDJVMMyI + HWlDxUxXva3ceEqJ4xAhAdngHxfvhEZDPh7IP5ne50JL48558ynWm/NbyGkoZZVzzXzLlvQwC/NH + 51sumtJSiknuTEuJ4xzATVl4fkKGFQFyhsFv5D1tBQ7krAWOKg5AgcyAsg61tJj2ddIlk+Ty01tS + MQYJMeKLSlu9bHCmRZBZ/jLr8KOiJlbiyuwVhwWgSREKi7UxqmblTKtrInNgcyq0AEb30A41qcR+ + djAiM1lxzjM7Wqo1rNW5KJGjLZYUTGdvwqs3dpRDQfk6+3UEwgBJLmK05DpZUzdMMXspQybZL059 + 3HRR9NQ43Al9Z4Acz2g/U/Yu2kGR68+WEZARUmddAAhT7pq2e6B9AeAlRxyLvLUg5Kb1sBNhgBDA + +4GzU2ONpmmGyRjH7K/bCMdm5X5cYdvAqIuzbh2eCJ/VYdIDcMzvoLYnhdUvp5J9sMVjm7QX4pwA + BSnIYtGgjqCIAc+KEZl+WHbyPDsItcYCWaVmzz3x/jxrTIWYblt2zOzRXPhN45RwS3Lm3pxVO+uS + EYHbdkHgZi/m2i73qPPE4txfwvA01FC4dWUHSfvq8s3HZ1zPIt+vnkJlpJIGQb5bBAyYS4PaghWm + nBvsROLdaChHU5oUiE8qz6qlIF9ZA9EJ4uZGWFLEIXcbnlwNVdm0BzffqyGfjji9ciFlMwXGKOAy + 2S+c+6ElN3SBsIPFcj2Fndn+STeqpL/0jJkfUxpCHAqgnSoFCkFyFUokIoj10lDZZk6ZKl23FxGK + STnXkioz7ys/lbQl78wqpkVnhLaS/yvK0wq8VGodeBEc7cZAtIaQ8pBTaBUjNeh7BHIIBT2k6NTA + i4BliVvy8QNT1bABWfHsab9O+l4xEUAj8MzQWwnxfrUvC0QtySPyXKkQIEJH9kFUrMwx5E70QZQo + jUpqgjKLnA+kBwORctO0XaP0Gi2wI27QU2Jf7wITC0BHc059+AkIyRLjzYmcpxCCUnpUcbnzkBMS + qaDNGyT8MMCxPpQQc23ezAws4tuSPHgqwo8AFPhPKVsQgfQsTeIkkUzF87moGliptLgMv+HK7QsI + KLHczsKzvFMLTSFiF+d9+FsWgBJNHBPJHzXlvhCbivS/AMxtPRnvqLSaiYr9WUOz0O20DSSrNhQj + Aawtgh19/0Gu71fcR+Sur8DSkA3rOX/B1iyOstqojhgGLfpnH8XrhtI3CPlon5Qthja/U5Rd4I0s + uJ/c5NFeRw8C5k9M8XBGW3G1BIeQCsbCiwyy38quhHdsFLgSduBsYZunzBVpCOyFvkMGiYf0HIRS + NH6FxVVd5ARI41C2QWtYMbu6lqudK+KBx5KdJM7JZJcgxR8IP9emglCIEo7l4XWPxFse4Kik6Xo5 + Zragfvr1grk0E2CbztqHCtRTaA8q/KiVZCeaZle55ly4WNbaKq2JnpWQA4Hx1VKFpkCWguiRxDMK + LaQddWjYyQ7HC6iSNrAlcDvPNQ2hvJB4MjkN6xKnomT6II1p1Jr4YV3WBFx47JA7wOI6xzmjlT2k + XERtof9SS5CSygw8XnUj9/nSjUdKhz4FmtvzpCArBtCCPhQE7hnv70VK5MjWIXLvpfuTzBsgglGH + oEj5oYRydnCKoTOt2le80VqCGwINZu8KcDAs+1mHTwh9sGw5XMBNnW52Qb0nE1nQP0mPxJ9ed5od + W8rfMu24iy1fSPpm0XR/rtP31tCOvWvETDYYXlv79dLC/EuUTmeuof3F79rKvEnJgI5f0w2h+aF4 + 6U+mifiXi/K19Bod/orbwiuryunS4tlj30TnhDR/lJOu8X8AWZt2yf3sLVjSq96uRGrMz7Szdz1T + bo6+KJFWp5LoWu5bYssUbzADM0fj6Onlgsp1JX1D1ukAGF6kS19wkHrbU/daqrNiF/CmDUXJvgzv + n3zFMPcbOIMfL9PXsElfFb6GjT3zj153zS7IvJnvn5wMb/oCIrON0zvh2GuyOkEwwleFk8m9L+tf + fWr67jQapOj5R3kCz/zNai9fvxbOJfsywvRVeeLFexdRqP9r7winJ37FW7RXeGExfzB7/TctLvHX + +tEXI2df8PdvP5SqKatKs3dm/1PPLXhBtvzVjxJ9PzZezF7ib2b/H9Ckjxn6aAAA + headers: + Accept-Ranges: + - bytes + Age: + - '0' + Cache-Control: + - max-age=300 + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Length: + - '10422' + Content-Security-Policy: + - report-uri https://api.datadoghq.com/csp-report + Content-Type: + - text/html; charset=utf-8 + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:14 GMT + Pragma: + - no-cache + Strict-Transport-Security: + - max-age=15724800; + Vary: + - Accept-Encoding + Via: + - 1.1 varnish + X-Cache: + - MISS + X-Cache-Hits: + - '0' + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - kg+/Cls6zaJcT2blJLlU62BwgGePGdpqSwWrJ0xEIvzmSMWHXxGNsiyEzBPJ1a96 + X-DD-VERSION: + - '35.2134903' + X-Served-By: + - cache-mad22038-MAD + X-Timer: + - S1580746814.868192,VS0,VE157 + status: + code: 200 + message: OK +- request: + body: '{}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '2' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/screen/share/966440 + response: + body: + string: 'null' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '4' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:14 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:14 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - dPTJBBDv5jeY1gnH1FisDpda5Hi0boOGbsHxIOi4qkMt+QLOH7F7P7MeSr40vXZ0 + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://p.datadoghq.com/sb/fasjyydbcgwwc2uc-3412a5ac759ea5d85527d87987d725f3 + response: + body: + string: !!binary | + H4sIAAAAAAAAA618aZPjupXl9/oVmpyYDrvTSu3bc9WLzrVy3/eeDga4SZS4JRdttv/7nAuASoBi + lt2vxxF+WZJIELi4y7nnXvDbt2/f/9fRzeHj2+1xbZIF/u/4XK9/M/j/ajXx1/jib01eUPw1vv0d + /6zV/m/t7/xG/kn8R37e/M6v+zv9SL98E9c3xMXyls3NdB2/H78X44i/3759ezi7OG02n/db/u3N + 7CW98b3F8UH+dDk6H3jny3frKJtMd0e7zf2X8/blyZilF+768PpleHvZmoz3l+vn0b37uGSTwat3 + 2Xi/ebu/Ojh1VrGb3jiT46ufzYU5efCOD99nT7fJ4UXHP3vuxrP1x83luR2chXezjt++Hp6c3r9P + 2Xn3cLA6DON22POu2MfLx337Kl/MvYP5HWt1j053D55f3Kf37PV4OHp++Bh6bW/yet2cr66fo/Nh + 33ntP+3e3oQnA5u93wVu8P4ydjM77zwdLO37+5OHJHhJo95wfnOU2idZ49lqPLmvDwft3rJxtjo9 + Tvz09OB10Lh++7g7HDye30Wr8+D6admO3sym9REGb/ur5KzdXdxOWNwKowvrabRusuDtrTGKZ53h + 09Ftenl9u5q930S79lN8cu3Fb/Ozg4zNZubt86F5tbr7+TO+uulNnZn5dr/OWsdnzZfkxby9c46v + WdxoPbrp+9n65PqDZecLO7r4uT5af4xP7x5P3y4u3qNwfXW6vOgOe52xe8oerDv7/jF6N2/eg9P7 + /fxjNI0sNjpb2V7jJDWfuu3FW7YyF52gMTxZDrNkdjtut+duz342w6vHYdNajK3n0yfferrMbz/M + 9e65dfw2u3k6u9u9OHvprMft53x8cz15Pw27q1Xc7LXOu4EXn/9s2Skb5P3gyjs73c+O8lW0euyM + k9lD9/UmWzStpmUfxKuxtXt55JuLND6+7U96w+HHUX/2cdN4PLPWR/3gcGoun8ej4Ck8bU7Obp7s + 18koOrx9vfbabxdv4e5Bqzl6uXkfuC9+6yq4bzUPVqHXv+ieTu8tJ7pqOFN20Fs3ejdZ27yz2vPH + Vbcz2c9Xu4N11w/tQft2spxMr/afb+NZcnt57+7v95q7Z42rj/Pb3dX+6PDVeW0O3nYbaeYNB73r + g4/zYLLs/zxLLt8u948Cz2wu/Fl4eBXdXnZnl4/DxnVr7Tpm53piPbLuvB/f3j/MpsPz/KOTWK2z + lRk2r4+OHl/vfDM7j0Ynp8PzyHzYv5lbjbfp4Pmqz97bx41JfP2x7AyPz57vJv5F0++8J1bn9uhl + cv2WRFeXty8/+4fR0XS1+7Qce4Oz/tti1LTn0dyeji4eorS7dC8OB4tkfhPFl+H07eLn/cH9+OfV + 6v5s92L6nLwuF/nKeXXfnXhwZV8fLeZv0eHLoxmOR295ut/3L0/t88Hs2vGtaevnQfrCOqvs7uBw + cnoyvU86+4Oe/bQ48R/XkTtMRg9H3ebliT1YnvTOlyfHhy+H9z/PmXe/+9x3Y7d3fNl/bA29i8u7 + j4PDM/dqsYoOdgd9t3t90eu4z5N143Y177PVy0Pm3bXenpPZWfCSXOevH1eDmTu8Gs3Z48QZZx/v + 50+7NrtbLz8edm+GB4cd1p4OJuuPu8fLZD2fh8vTVnf/eDw8O33bzZcvOYvP/fnobunMD/tnl8fd + 8/FjLzMPu/lD1nr2nbuH+Uf/pLVMns5Wq9P++c1rr7k+uOwP9/30Zrr7uOwtJhNz2eoGnU508u5f + r4Ytd33w3syfs5/vYWL+fDm/Wi1Os/Xkun9nvvqjlzN393ByfBo3Z+FH43rAgtPL07UfZE03Wu+2 + Hz9ubg/3x/F6zG4er+P3ANvtPV3s7jutwJ/8vGgfHbnuyfOjnYeL6SSf7j+9TO+y0cnb8OK52Z6P + AzM8bDy9xGEnHlxEq/ZycHA967XZ9bmXnXa6L7v7h93nYBqF++2DQXz01HVao7eP+UfzOLMO3szg + dJXN1573Zj48nARvB1G/MTjPnq6DaDJIFkc3p5fN8STfb+yeX827l88/z3vvTnuYmAPLPJxG7zc3 + Z63Tu9myfTMad0yowpUXX7Tu4nv2mE5ujp8e1q3meuyz16ubs+vr3cbRYe8mbFnhZfZ0eZhM4Ymt + 6XEyzbvxggWHi/PUPR+es7Oj2UuwuLsODk02vnZfrVu/eXrVGQ2d62OnFfk/F1d389PT92487C27 + P+PdbNGIV8t3b+CM5uPhrP94f/TwctjY/3nmn+6Gk6vxZHYT3a6uvIvpWfdxkSyG80un8db1Wi8P + l63Lw1F4OWJhdngZL7Et71OPZcOQXd0PF+2by3Ryl08PB37D3E0OnNayFXTmS/P56bm5np6OnXR9 + e3h6xl5br9c39zft/R8/EBrrdURzEdQpln6fOMz+/RuPvPQxcDJWC1ng/NiZe84ijpJsp2ZFYeaE + 2Y+dhWdnkx+2M/csp84//KXmhV7mMb+eWsx3frR2lMHSbOU7n5/pAf9RjFr72+ahxcP5gL/V1OH/ + unWR9rjfai39in9o15uRvar9rRYz2/bCMS6Ol3+t6ZdsXb9ns2RWDyLbqcWeleWJsxewWWTIDzUv + GGtfGL43nmQVq7G9NPbZ6rdaGIXOP5vlb2GU/enz0X/+F59Nc/3vPlpb8dZqaMS//EoouOM/08T6 + 9x87RuLMnSR17J3/qpiD6/mZk/xWMxOST+ik6Z+G7b1Rt9Ua9LuD//PnryTyHaFL05rvmZfpWoTp + HSdJlNS6zS5g4BHLmB2Nv9X+vpn35qtCrRulQb77XjirTRLH/bEzybI4/a2B57LMs7AJfLjJx54V + BfLLxrzR6e21W53uqNlpTNNG4jArq1sRaYcX7llpulMLHNtjP3ZSK3GccKeWOD4+0FrSiePAirJV + DKvKnGXW4Nc3Pi1jM/Ev/vG98Wmm37lWWz5L0x873xRzg3F/+2578+I3h2RkxGzs4BuHJa635Fdv + X4M9leN8L9Rcjq+q/uZR33F9jfnwB1LOtctoHNX+9MDs2oGXpbVs4tQgwj/Dc4hpqsMIg9mpQYn+ + mOxTZtdNPGYvnY93asIl7bQH/Z3axCFd+7HTHvUU8f4Pp0sW8f9hthtr+dem/b0hd4LvWAPbWr11 + Qfq5dYW0sdU16LdTbzVrpG711Fs79VFffCBnnrA0qw/lr2bk2+In6bJcZjk7v+8n7h5mIZT0+6RV + jL49cne48zuZ4nWU1U6iPLShri15X47kUYYW3/scYu7UydxymIFn/9jhqlp88/v3hu8pd/3+nUlL + bez8XmjcaRQ4XLX/LWFJ8tfvDfb1bYWB25GVlsx74vixOio+/isjNphlYaFZI3WyDKEl/ZzYg5Pl + ce2ApZ6VVg71vSFkIjdV/uEZNi0I8bn4CvZchEaybR6fFfsm5+tFoRHmgekkteIjzBwu+ffDKF5x + 11v4x7/UzkJrr9Zutpu1eu3TndXqm6dsBF1IbLFYlATmO2PmN1y4uHqWUNhnY/w7ADqAGE/o60f6 + urZffE0780ceESfenFkrjHor/lW7jXzPWv3R8awomnlOivEO+b+U4YRkhY2pMt8InHvxHzt6RN/5 + /bsXxnnGFbgT11kcp3VCJzsSP+nfCe8/8WybgsOc+TlG/Nu//e9O968ppJesDDsN+cffavxPsQeW + Pex2Wu1Btznqddu9EWtaXYcNhp2BbbeGlvsfePDe2MnEKDxo9Zqdbp8P8hcxlnxCyoLYd4yEZY76 + pObeoKdebdsGPITlGNPUsHwPszOyaOZos4tz03E7zHT7TmfQc9oda9hvD9pNu8f6Vtc0mWt/NaQT + MtN3xM9yrVmSixnJ+XrAmwlWYuSpkxgTlk7kfMPc9+U1aye0nXRmWBOWyV9pmPIIkI7haQ9bjvLm + R3/W/mp+eCThW1VCC8es595XN+SJr15cbBxEBIWFmUCUdfoS2gHaxNnzo3HJD6kjRz4CzvaC1E1x + wrn6QA2xfDVJ2zHzsXqXy/xUE7pwyOSDPMBd+qOJrTXrL9ZxGk/d0YiP8w+KsRtf9empyJPVjo5q + 5w81SjYEJvnnJoS45HpjzYK0r35hQCazZpHrytUJowI+8+bFJvK1/kOqxtxLc+Z7a6C9KEz3XIA4 + C/HQwL7DQRgfuQNj9L3AK9Sq02w25b2Z48PZwVr3MjY2TBgtdPNTv/hz5KV2tAgzL3BSpAshpGls + vjDiyPcNruPwA3LW/eIRqZcJFcEc5o6RMc830ty0o4B5mgnSz1yX1C0nPWvjiUsKC8AcWE2aiSuQ + /2zWQdPec8WqWWJNDNtBrqKtut0rZkQm9IWOk+/RAbOmf/QUmguwzAyGHAB6TFKDjSO56Favek8I + Y3y9H/3P/RBge88L+XZ72cogkUd5ZgTFoke4Wl+32OHESWPDDTJDBk45pXb1jBgQ/9cz6n0+gYTq + r/a+kFdJWqqwSAfhnLf9VWc4GsH/k3/tj1pD9R4ziRbkIQHynJDAwF4exjAGxy75vBkbx7YfWeMo + sMaA5nYcTkMLjsmOzDBwLHXQkn0g88gCFn+9+FZbEfD2jKw8SSiE6N4E7HFoubFtT+xg5gagDqxx + GIM3Y9bYmUZMnZAXugRYk1yk43kMjXOMGOYa2ZiWmPxvtY0FlRZgsiT9xexhFBv1IM2AD9gzISoS + Ipk6cJxUDf7nF5ICA5LBpmM/+oUzEdIqTMsmC7Edl+V+ZgBbIaYZhAYwCfnQT5vljmHbDXzt/bn1 + eSF3NwCpZY+jbhufx4QlNlyGHzHbYC5WIqfQ2XiCT/e3DSWae61q0wFswoLMnNztL/ZBnQ4chZdF + SbrnEFDijtoYJyyeGPO2YeUZnD0876qQ0UA+GCoReCnZQfGLiAXFcFgc02Jat9tt9btDq24PzVa9 + 1XJGdTYYsHrLhM2xZtfsNLvqhpOYzAhiSreGskdNtz/sNOt2xx4qQ5kdpEZOzxk5joY4yHEbiwR7 + Cu8YO+TxND0btlrM6lqsDpTXpvGG9ZHTG9T7gz4buE3TcVwRhuXa1fGSyALfAvnJDeR/hl3GOszp + 6gMO66wz6LBeq2tZdlNdK7OhO+oAI4zQNm0M0DG7Ykas5bqQmNvvN+1up9nenpEAQYaI5HBuAkRJ + 2Ndio7ZjO+163zL5Gkf10cjq19tmh3WBc223qTkCvkbYiEMItso4R4OR7QKF1pljD5QB2cAxAZg7 + rX5PG3CjGEVsVtfbtYcDu+cOy8rhmCNrCJuwm10BOeQOAD+FIAs0BYPM7LbbJK3YyAyKXjfN7gCl + IGc0Gg5UofMVkpbCWG1nCSXPxISlxHpOv9nr99r6JnbqwOD4Glo7aGqYTlFYrmvq+qTGtsoa23Xt + dqdp26bV14SljCV3NYXHqBoS0ysZgTtk7X5z4A77mj0pSussLT/n+axgDjXdHdgDazRksFPVGPr1 + UWfQbPZGCI1tLd1QxiV0A6Ckm8LAbDZh2vpwo7plOmbPHbSgQtrObtRkS4bdoYPrHVip7kBGHbfb + HHYHTaYbFZ8YyGXPXYkd1ifWb1tWs9MvrRPzNEejUa8PExttO5ENTFQ3tw+bBE5y9CX26gO303Sh + fT1n0OHXF6jYRZgH2W24PpOI8rfaf/IrAm8Zs9ARQFXqehhx1cQakCp647GTIFr6cPSqMjtLKh8Q + 6iNnrv4Cd5Dm2oBSo4iWRbCGXiFgqXewgK2hGwHDbgIbEp8Zli9Z0/yJ3gJQBijyQrJIeEJ1nAXL + rAkAEJ4RLYy0Y6SYWhFt5eLE7AqfBXXUTCoW3pWGjx0LUbuEwBmfRvrhG4DUKMNaiL+RttbNHDiw + MTz4NM3QxlE0BhK0/Ci3DQgG6F9dwpyWLjNiOWMYz4SFyDEod5VxW/5k5ra9+loCgRNElO44bFYl + CuyoJ8C6HE6sju89aELIThs7XQHgO1gzluRHJvMNTNajdB9a7yGCUyRXJyOFCRYJqEcSgKDBStpG + CALp0xhpuHrzJnNzE2y6+otNWRlAMaTLp5pGeVJSBBYioQK5kRqhszAwVGBUDsPDPbIwrMEA1wyi + RX0Qtnlb5JMo5YgZWCWD8mj6J+RnRWkQpba2GvELUkV/lUIlJAuh3cy9R+HQKPtMotLwMBxTNzVM + BLaZSz5D7iJ5RHUZ4tmhky2iZCZwIpHAlWqzib+LCQJK2eblKuIY1IfEbYjVC6arCSe8NDlyMpgk + 5o1DJOQG88cRAmGCVK5k5gDkEwkG44gn0epAeYg0cG3YyAJ5QgRBxtqTis0RmaPYXQgx/Qy1hYxs + VxO+ZwbGgmlfCQt18xDJJ8e2PJVGvqHOKMkDXTUzRjOce0nGaQg4Tmm82+olaLgJuG5c41izsrS5 + A6Evia9BEpxhMKQQXC7qQ2nuwYc2jSSXCZlc7mwVZVG2Ak2qGaj0u3ylW/YhXR1ftqZOwlkzAHWD + ysKlHRRIPgDNBS+t02oM2etE07sslYAAhIavuVE5tTGK1b7naus1PZHxoACrrUZk/kg2E8mJFHv9 + abGF+LSNlh6ZGyd5N3WxUzOC9iw833Z9TdM4hVREP6mxgmNSb9ecfQrXh9ClbV3IKQGQoQUhJef8 + ufccAKipqrxCaA9VPSSfKxW0chnaPFwP7Aicu7YT3PtMoGwImNhUaLCz0CSR+uANkwjORtsMTMNx + PUFpFlFEKIgMkOUtl4DBQCGVcP5cgzyfcIMoytKP2iIyls60+U28eEMWy4nkMfFF0rHD6fgECgT/ + KywO30FZ1A2JJ6CNYs2miwlXZKvanTkqwbpv8NhM16YgmlOqDsfu57TrKDJB/5eadXGAYZsUtIxg + hRCk/sp3SYQruBn1FwlrJNDjZqXtL3lG6vww7CSKKSHSHspFY+XY/YAHCBnMpZTGcNSxHujEFqcf + uhP1tI9uvl6vqlR3A5EQ+nnoTUMWA7JlsCNU132UO9TZ2UVejkhvofxkULlcF7WwD7Gdm9HKsQVd + GJ7rwSsVOoDwAy3TnqUjSLZARPbNKuxEkyJ7oFxchSHqxDOHKjIZgoeMewRGaCO0R5KOikiFCVbB + ECKjk5xQFhmF+oAgxw2SE1W/1yzF9MaC9tamFiGGI7IgouqzEV4fvR9QzXGiRxFYgiR3pYFpzyEs + 5kKP1Md44TxCtUVbbwFEyoshOhueybAsrvt5DCW0teUKB49GDDgpIwY6Z+Ox7k6DAAvSIK0AcBwD + UF3ToJK5jOVfSow4MsBbpBfl2KZi93n8ZSwVrlgff8I8sgW2SfoLP76AliTRUosuMvo5uvsoxB0F + qEw6wAJABZqARPHN4EUmoyihIZRoEgEH6LDUMUjbtI2RIWA8oeRIELtWJboCkJOsprpEUWHiOlpy + 3PQkE3VZ9WrLS63IYJZWd0sltVcEEu4L5wGHGZT+aK4a+cIEg2pL0JMrn2UE/dXHFlPhJluROEzQ + xpOI+luh41420VM0EXkjXCgwMLlVgvSiWUB9WgxbL/t3GRwtLe7ZGxfHATBPlNWBUKHNAz1OiHGi + OCVUr11cKI9MUDHdELIueUzd0WVWDDzOrzSmeYB6wHbivGXsWxhi6iVaqqtPxHQwesZ8TQ2oMgUz + w+YCGdrcyZUjF0MlBz5DFQcKx8SjwQNk0ZiyafDPcLPqJTP4KvVzEcWh8xmpPiKBSGGqRBek2iTJ + zpaoXSI6pSn6wpDfIILrEShlc8Bzcu6gMWL1yekqRSDQGKwij4JH4NVD6rBQb9FEjQRPj6toTIMN + iwIn5BbFhCkQJH4RyKxwUrWlDhmWUGKocFqKLp9pAe6l7jnDsvX8HqslmWiaimBVQAxZbQXppGOs + VTbRN4tvSgWTJBUIll6ZMVPiY5uaIfkM+dqEurp0uA5KKC8YUmnYG1dAqhcwQddXaQP1fmRQN82R + sDjg3EKV2XPOwYdX4LmQppVorNiouha6ZvTLp2pJxdAuKaIHD9qGz0zH16abLjzQX6KrAr2PnMDR + toa3fcg0Adwedo4zJ5qZoFTlCVQgW484e4LSVzUfxyGjoOOoHUmbT4ASpJ7uKfAVc53AeuEP1MdL + qkIxTwPOVxs1YSb6A/V8t4TaBClc4RTsTyTp2YUo1OdvdILyV0LJCGdogtW3QTCeUKgAV62K9EJP + ZLDStVFJrjlz+GxtzZSDGACJgihQf9qsixwecTbIl1FlKYVXaSUoocLRoLhno0Sq77yI7Gh/48sp + gxqoE1yKDmZKbKtEzNtbxQk4H30v3L1oqqRnXwFXrAob53NDhgGGkjcwabKxEg0fsAD0i/aNTDK3 + J8brnALE6dvHO7pNEAGJ5qV1FSpqAuq4KzCz288BDU5RATkdIJn2swjQoRNnuY7SmJeYCUSmXm1v + NLPIiQQEp7CyjSZdR3ZYSVcGMWkQ75MmRzylglMKsPjZlyXv2s517BWwTITksyL8A6NSZAYxkHpg + YNCNUZ4/94dFSiToLiSrEU9QtqkRqbWIbumkRPCstejLPaMkQHgY0PeTd5fy54g+uO0t8pBYliCc + fDh+US+P8zjWgYa8zorGVOXYHnrmrECCQ7W1nxAasPwq/yL8G4/oxpZ3NLNEJyVFt0mFUvmOhgUV + Yp43ViI3QIuDAAja+hL4alD4ue5/kOSizyJHBXFFhohUn6RZRAftWtC2UHjQ/uqw3IJ5tZDKJcRc + q/hKvZIDKRAMHEgp0U69Bu0k5P6r0qFyeiAsbMG0cp4LZQKJgIpEWGTHoFEDzdr4jOk/TgaMgiCk + TiD10WOzvdVU8yFvZyJ30oQ6ieslxkj6BPUqtU0Cxrgph4peFE2conaxMohgJdexXbmQzUQqUTOt + Ai4cwE1SDbdYjo5ICoyupxCUxCV2DoXwgM7R1KpH9l9UgjTR8B5JSA1UCUoryL3h4DnpR007IJwD + w9TRB0NKVcKGWGyK0rueWfANTJwoQVajSFMtLksnh9GclclQ2tGEDO6tktWmbhpN9XQnSQkdKW+V + g+RzIhdIrARanbBOAZ1UkYjcEcIgPEWwOkHI1pRNMmuZAyJS1h6q9lYWKPU4X1D2vMUV+Ee7ETkU + qo66qsmHLTQL4hEyi7Vppd441XN1nizyyhoyGC+CUhNnBHyi9SajfKxPMZ05BFJFs2zFwm3Z2ldQ + AGKG/PQNohp8v+ZqBSPE+60L+tKK/KI3Ro5RhCTOTDAf5SpyV8QFf7Z0VcyEYP8q1JrnEvANiPPq + xTSpSPPGYosZ3btxolvVc7FTUBnYBx6kjhgBpiPOVvmaLRq/7IzUC6qo4wTNtOqz9EQzr8A3YM2r + KFyA34g8qKcNR7wRUgqAVV1XxGpzSjAN2LLwuTrQ4+XwjcIXfbpkUkQGcpmmFDmQKZaqzNswJqym + bCWIDXWKUjrAYJtnH4MU0RpMCp1EwLTB5AENoSCHZ9lgLuLPhkKSS4U+6SmLvpVVhq6l+S7KY9BX + ose/3D9UkeCfdD+nPaYEdxRqlJsG7ITAHRwmipbVDKAs2hRkip7zSEki8TVtvbdoVcWULhO9blyQ + IrwZlRyYpiAz5s60QTchSrdGuPWyUXwqCHU0fAE7ijjoa2V7Saiq0btiZ9MtHnVdxuO0M9QNWJVY + bLCtTCiJ7xeRrJSp2aLxgffdgPbR4pTGjRMpBP9PvcKwtu38QdMK1Ad4RUldGDkfqmMLko0sUFax + haulJtgv1TCLc/U3iog0cdlZyasOUcg/lZYnizhFXk10B0r9/JxtldCxmaS1FGhLHQgkILCimlcm + kgzttV/OGml1VZiVteYvb6OUaCvzFXZqIBia1JC0XX1BEwS1p6zh5flpa4RQHaBKbURm5PhaHEa/ + QR3wEiGuRD9uenPUqZJHQBog0jVRzKuQJCeQUZov9bLokTdEN5R6bwY+zColgbKeRi1wmjZhcHno + RXrQNNJxSOzNiQky4hLrrfNbIrCKtippM9pq+JkQaAxOfJChV6w0cWY8kytFkELaxMQiL9dJs+JM + CE9jhUFoI3/kOufNmZNxNOfgW70ytjUqkI4X23q9Q6Yl4PpB1BRtvtoQG1i+ocvVnwu3LCsxunfe + DpMUuioSfSgkxFciqTZ386MYHNtSWxa5l5Bp9ZIpm+vpFPg78BPI4UvRgguKuh+kuwdFgbaZCAcW + dDK8qBcWrrM8jtDTGbVY6yasgxviZ7Sd1QUiUtMYXapVMlH7iWTvG1eHFNpRplmLSGK10Z2k+8mC + TiDVgDZ7GuYwCcSCacX59AralvNndL4vi2I09M0dnzsNzWfF0QJZG0o4sACgsKphqAdFsPnUvKjd + vZHHpgdMmBqMF+frN2330oST3NQsDFtc18uMpk8GWzInJXPknAOQIswAO466vqCqovCLBGwzLfR1 + IvvZopuFe0jQU0E7U+UgFI9G6K2UOxEhQQ0ZCHEe4l/ZRwhGlBjtDRmFIih/YUVxIqHAh1oLB7VI + 6r2Aml6SAZbqLlJLqE0H9aQqUhHorOyPUxTPNXezQI85zg8V1qX5LEivFPDlM9FZ4Weam+I+iZq3 + YT2A4TB4TWnGaA9DDUt3dESZC9aPmL1NkxS1FKnTUHSBH1ajAqSmU5qgCJKQU1RH4Fm8LlyemGx8 + UyVARSddFWoQ+sPXqzAr6uOK+WyakLcr8ALE8rCvSVzy6Tz4IEhVIVyZvPNKE7KkimY0gLrQt/Xq + ojJr0mn1oRuDRijChqyRvDooWhMAJu2hpkZ+EMiYRia+5L0qZa0XJoMus5zcsi5qIr6rSFFwWiKw + lPRatMyJU1houq1khUnBMCN+qgslKouFc70rk9jgBCEB2eCvq2VSoyGfEIyezs0LoRVxZ7szE+st + +S2qPICHKrlmsWUTOs/B/dH2lstGroI30ru5Cja4BHALvkIcEuEMfskwxI2iD6zCgWy1jZHiABTo + tCaneovq1edhDyXJFQeYtEoKEmLEF0ZtUR6I0CrIrH+pOvy0qsPTCXRKSsACcJ8IhdXamHZUOVNp + TGYOfE6VFsDpHurm0mraW2cDlMnKo47qaIXW8D7gqkSO+g8JTKs3xWy1IA4F9WL16xSEAZJcxGjN + dfKOZ5iieilHJuoXmyZnWhQdnK6EIXR8Xr2LWhZKzcs6ArITctYVgLAgpCkhRL8AwEuJDZZ5a0XI + LYpZG8IAIUA0y6pT412YRYbJaUT113mKk6N6aVHaNjCqsdX9IhLhreJKcQaM+x0cNtHC6qdTUR8s + Y5u2F7KJnkAKsljHmCAoYsCtCoPSLMoPX6uDkDVWyKqQUbEr6i2iGEhPoqOEmuyEh+blx55mGCCh + A1ytXiw0WG/KFsnCtg+EMZkodgiLUQfZejWDBntUQF9u0C7gL9JD20EOWxXsuZuCKoLpJR4Nuq9x + aRSe0bilBdeNGvPyJQhV3oWzga2lEfAmzHK3nYzrNPXtiK4fA9i8QaBgJiVeqOAl+S+Cx6GaGFoo + +DlZveDBjyB/0YWp6SIdmYoyohRk9zu9PKNCEZxSCRFJBeK2NpTaxKjTnrORkaLaU3ITRTwSDdSb + 2rLmaXlJs+rIy1zzZVU5V4XHKUwBL1Shk1KIvBBSGT5KbeKlAToWX0Ib6J1Ey0RC9JC2evniB6m0 + LsMrzDRMuaRmF7x8R70pBWcMfdXQKxmjaNuV6Lh0R6CR+jIMqIOSWDk1VDqgBlGidqlNGiUTHdsX + 59ygspSCm0SVUQVcdO3BJeFlA79WH9Hqr1lg9rEh2gkOED1Hiiuchp5caBVn4ZyiJMYpNdT4Su3N + 3AzGTuRp8hBphWg4r/CFOm0tguJWyiNeAqLT6mIuzATDVFR/4S8CveMGwSHT+0pExrbpZalE3/Jg + i3hpABCfS+9RKZ1lE8AMm4pUvgKYzUMduzAqN6KkvtXIK3W76MdQt5LwDoCXES/oOH+pJ1be5+gt + U/HYRGZrlfwFX7M8memh0mHbVJVXHyUKe9o3CN/oPdQthpq+CTGrF8qJjOF+SpNPxLm28tEgEcao + TdXMcdqmYiycy1e/1V2JaKmocCWIizgHmFTXUgHakWehroLXIFXTnzO9HDl1QhBNuufDKQ91Xhss + SpgGEi01hyCuocYyDg1CAZMVvI+21rBEnVYUOL2gzBfwneTNORU6J3cCdXUUM9SJFulPqV0VfpM3 + e2prosZ/3bvbnw1LaJPjOYKVakSgVC3qV0ObjDqcqHBqW8yXIIy31KoD/j8PNQnTSSSBV0pUYBGo + yETEgVLeFlt5aE54tepCxDYTph6kreKe0JFo5sgZdYocr2PRO19p45FzoTuAkm9BHqpiAG8XQUHg + c/HySuQsvq7yMjme0BmdqtQYyIBze+AwRRd+Sx2cQK7SvFwXrccahpDQTr0rxrEm9bMFQ08i0GCl + YC/slza7oiCjhAt0J9KxbRv6v0WcaQmW0qBqzMVC/ksG7WJ/Nu9WoX64PQRCPlG8k/XzPWKlt8V8 + nguG9le/D0p52w/p+B5tCCVw8sU0SlstXrm0p73qRbz/rfLKTnNzafXssW+ytUGbP+o9e/g/zjR5 + 1Jv21ZuatFf51WXuKs5d83fGEXkGi6hT+ShP9/RuIb5M+ZYtUGcUHGV79m+15l67eIvT5jTTHvgI + HMKnt0qBh7E3PWOFzspdwNsgmk31NWL89Yb//FVh8s1qKFxNipftaV9VviqMP/NX73rlFyjvNPsX + JyNarQCzPLuYjPhqP47P8FXlZErvdPqjTy3e70VBit7eoE/gXrz96+Hz18q5qK9x4688pGJMnP3x + F7TykvDny3GneMmn8u5bqv6IJ+ANig3xL7wlTryEUX48OsIrvbwMEvzTn/Fuz81Fny+Z4xNt0Ktw + 8Trrhnif9f8DWPeId7hiAAA= + headers: + Accept-Ranges: + - bytes + - bytes + Age: + - '0' + - '0' + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - report-uri https://api.datadoghq.com/csp-report + Content-Type: + - text/html; charset=utf-8 + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:24 GMT + Pragma: + - no-cache + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + Via: + - 1.1 varnish + X-Cache: + - MISS + X-Cache-Hits: + - '0' + X-Content-Type-Options: + - nosniff + X-DD-VERSION: + - '35.2134903' + X-Served-By: + - cache-mad22049-MAD + X-Timer: + - S1580746824.287085,VS0,VE163 + status: + code: 404 + message: Not Found +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '0' + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/screen/966440 + response: + body: + string: !!binary | + H4sIAAAAAAAAA3WQwW7DIBBEf6Xaa7G9YBc7fEdvVWWRsLGJiEF4ozSK8u/FkZpbj7OaebO7d9hH + m93IngOBAWfZuji9Ma0MAjJZN8Yl3MAcbVhJgKP1kH1iHxcwyyUEAYfiYnIlrVBhharC9lNqo9Cg + qnWHg2zfsQgsxIWuo9+8vaYq96eq3w9lfI7OH/0/kLZuB/kh1Qsyk59mBtPrQcDVO57BSFTdU0zE + K5ivO1xyKD0zczJNkyzPDcfGn+1E9SlNpfQPo/BFaZWAcmwv4AfMJviWtr88Y/D4FrAtv9O66/Dx + C58hH2Q9AQAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:26 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:24 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - mGJe6qmS66N9ddKWdHwHEzQK9VHuaMNr7+EsVTKliCkGq+ayJZmadUyCSwID4him + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/api/cassettes/TestDatadog.test_search.yaml b/tests/integration/api/cassettes/TestDatadog.test_search.yaml new file mode 100644 index 000000000..beff6f516 --- /dev/null +++ b/tests/integration/api/cassettes/TestDatadog.test_search.yaml @@ -0,0 +1,108 @@ +interactions: +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/search + response: + body: + string: !!binary | + H4sIAAAAAAAAA5Va7XLsJg++l/xuPObL2O+tdDoeDDhxs2tvbW9y0k7v/RXsZi3wx9Jf5+SRBAIk + IR7vPy+jna6neXr53z8vZzuPnYb//v4y22nOZvWW3bCX316MyUynTvPQ22z6sLN+r9Wb7edselej + yYR4rlPQ5zqiSNBhz3V4wlycJIzzXIUlLJ0luJygQsvn7pCUlefPx5HPVXjCyouEqUTCslj13B+W + EDyEPx+nSAiMhKlEwjAs4bRowtJpwrJowlwJp5XgTcJ5FgnhLhIWJRI85gku8wSfWYIOTQgMmpA3 + NGF/aEKAkQSfSYLPJMEfkrAuluAzi89Unew418P4VqtLd79xSB57HWg9z3KWUOFEQuQQpzN9T7M9 + Z92QfY1/nesphD6aGFn9rb5UNwdW17k7BcD0qedzgIzrucbVXGM011iv59pAPt/q8a96+juYUAEa + gu2Udf1g7JRdJ2sW5QWfh1mhhSyCrq/BZsukHS3CTTd9wLZ2s63n7mzri0Yb5YXh1B6KZvXYaJXZ + GyJ2xhvc/XjW+6QEUkLtTcgenjAVj/Nio13jCZWVJ2Q8SxiHJvhM44zf8DlhB0nCVCShAJEEd9ww + 96y/XlxcLn9PX+oSxaSHopj0GARzHYa7hzcgrfQ7zrDeztnVXIpMD31v9dwNPSo7dyk/lGZTb+rm + 2tZ2HIdxbZ2N+vNQPlzn2ih4KIzqvGHeD/VlGOF18diqu1tQM3bnBNHBkCDV0/W8aT1r2A1fIsJi + 6ib1suFi+65/C93xklMH9XtHBm8h1YDCave9pT4N09aY/MATvusJP/CEZ0eewEnveAJ1bx5VP9WT + fYuOAlaQuTPcltw2pR4+7diehq8N27uGGYfLhhTOanvgVnUna+q7X+cujhDnVqP0x2l4qzfGdgEP + K9If2Vn9Cg9zEdlfF0iK+kBDD9ce3SRnuL/Di+SGwNmj3HZYlMcOmk6qWTzxCLyLcbo6zCX7dVoP + eIGndO2CzKJddAZhGXCIHs6wXzPsnop0b5L61IE8dCWuHG4cyPrWBg6eBmWyfhjPmbuI7qVtAX1z + t0YjMFZaWaGyCZ1AC3FQv6vewMrXDUQgDQ4yNo1v71iuTqdBK7dr137VpwTTPDSXhenL1fk2hsht + KyJsHk62D7FuCIuRG62D9YZab1fIbYAc8VJfvuf3oWcZ/qO+jMMZLgD9AVq4IUFdL+JjDrtjp7dM + 76rjBZLJzlMNtSDzd8G+OMoabN31R8Yg3bBtvmGVUCZij2443D+op4Qt0HaaIFQ+hxPkrhq/az3/ + qqevDvgonDuL5vzuuj6UKotovPa+G8zOHTqzLTmuMhty6IqX/Vrk/fXc4KhZJC4BP/GFGYrGadNf + ZwWLQb10aBb0xZHIFZhWOabPrXYY739sue0LFdZXf/4nff3encxo+/q/TrQYPpmx69PP36XbzsY4 + kat36tT9DZXhpuVaDzO8uau2O98qxgSbcSdCp0xfp3lwr7A9xfdh8tfZnhxuqFl1vfXt1o/SjUq9 + 5X0G1+0EzdzW2bgGpvb1Cl5ceuwuc9C2LYcOVcc/mKKMWyvc8ixNbcujbvCvqoMBvPzpNIsWzNLP + 0Ja07WRdmQr3CJL23qZ5jhqOCvqy0+lWK29sda11WZBCmLJtGRGcG91qoWxFy8LqpnHvdW98V5+v + l5PNiChzycrCP/mx2PU4D6l/zGCp6/0Wa+F5uH0585zznpyTspRZRWVexU7gaUBNsqwQOS/ddXsw + msiBmGfU8zt7akxW5UO+vZ1u06xlnBYqz3NrZCNbYRqwk4bb2Ae8nbLyfM/Odsoqdw+tXc8KnrtH + 3461kKtdwlPDccZngU5SFJ6A3J1aCHl80lUZLyyIBEFF7Hooz1feBXJeeRJ6zz9OKv9JYV9eVkfz + uxByxOC+PeNHR8MJO/SfE1ochzB54h+RcVyFKUD8l6J9/8mTTMv9F6Jd+7wqn8g9GbxvX66CM/A/ + L/PD88v58f7lxH9d2JufVaWnivbllSd09+VlHq8PJ1eRyzi5gvDdkKPk25AG1hX1/JR3brMiCalY + 23LCC8hTm1cNbUXe5JTBrpqmjSMvKAtQD48W7uplJMdlYy0NPc89BYW3NZgcyufh5Gs5nnwtDSYv + SfE4s81to01hYGuVtI0ppJbUFLoVtGhJA+lWqSPXYLOPVrYhRp5vSAPPJal+9gXIsAH4c9/bHPkj + Sm/y+D69HywE7gnVllIY3RStrVijOG8sp8QawqQbZfru53c7w8fv7H2G3mOaoKH4ofRCEXwvvwDR + Zp/KJ2gqN4b+sd8Qtd04za5R2hvbDF+9f5Dv+Gb6aU905wg3xO6LCnLqFrrw7aF/y1wrC37+qT4V + kEud+9rvxb5VomXpv6utpI8jeRsCqwce7zawEN0IXM2Pc5HdfTYpXPytZrtLc3/dblrKSh6WSkIO + mzZaVZRlDBZbuaK044AQ/mP4pgOCHd4FVEpP2e/VYioLeSgvKv+FeNe+IN7z/QyBDtnw0pSlIIxV + tjKwYs4kIZaTgsv2aHAhDy8qCvKj3Qf5sfOCV3HdwaWD8upWl05TNlmIcjPVJ9v60gGQUd+Pv1HY + /eR3PQ818ITd+A36kfwg0Z35dh47iaOzgPz72EpjJ0dZ2g7D4cHknDM4CgMtOmlUYfKWQ19mODSX + uZKeW9uMx8LlprsLNuIRZHC0h9OS1irFKtLInElDddlWUouyhSCvODt6dBTQdvov8jvBCHLi83g/ + GGlRGJMXhAtVKF1WjW2syqVuREFLruLLGQUDrKz0n833B9eWKs0UafO8rYgpTZ5TaCA0AHBF6Hjw + 5e4uSpJXB4G8KX/cgKF0824utJClKQplNTVA2BalzhnTstWMg7/uKsF7ijyTcNax43hXtuSLZzfp + H7+93HgL+O3X2/B6K/avbsLX+yWwgb662uvCyf924kCB++9pewqSCuab/QMFiPhDBVn5u2h/BOgB + jpyE20zeC/hq8X6ZTqFyPjiyYXg9df3116uGG3GYXt3iMHxtgJu8ethl52rAPfxnP7n/HdOm6UOl + +LkMV6Pf3RW3B8T+KLDtUO4PJ5IUCo8LraNRyttL9lBF+sfmoUrpPwGfv28dhgu6W3TBO9w5uRJU + uSy3BXAluP2LLSSj3P8YDQuYaHNKcs2NotAKq9YaSwXXVDQWypCLKayuIBKA4lGVNUy20EQ30HJL + oqCxpk2l3C2L1bXgQJAA4SQbq3PIUluUpjKsgfvVEv8hGqtblleiMU3Jy4qrttDgANWNNFXJqsL/ + SM6XALc5vgcrgtTGMOKhFjgoYQiW6K2MYFxMA9j/1jPyJKjrSBtfBw9YPm7tYDkQkeiyR9q4Rwjg + pbUI4Uc7hGDcBWEYNU8Ixj0XhlGrhuCy9D8CCvcEGpugc4ykDPedy1jAEC4PLQwjQgjBAr0JQ/jR + 8mEYsVIYRq/aAF44MAwj6gzDiI7DMKLhAvjx4lviWTLMDCBtTAQiGD0b8SCYVsTaiIsM4IX+RDB+ + TGMYMbgY3ko4xyYvfDHWRiQzhhGniOAKcQoYRiQJhhFxE8CbUVUhGmjR5phdwjAipTCMuKwAXp49 + GK4W5iyAF8INwQTxdBjeKlmSE8QKBtoLmRjAS9+EYURdYhgxngG8EKUYLlfceyBdEfihdF3rYXHr + jwOB0fZiEE/stX+aT/h68PRtsVSsh1UCmbJllfigCD1MfQlEVokkV2iV+gQOrVJ5yNAqtdUPrVI/ + akVWiQ+e2Crtiw/88AsYs0evBjdamUGbDHX85zaMNCg0i9C6CLLU3EjDPTkzBkTuUjkjDWDyeMZo + IZeOKNBwrToTGZOyKrb9gEtc+Gbehapz3/378se///4f/genyA8zAAA= + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb_query + Date: + - Mon, 03 Feb 2020 16:18:16 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:18:16 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - F3738B82Bnl6EabXfiP4n/DlxbFte7MGU1+K+9g4F1uwykMhpC6CLYsVkhnNCJlZ + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/api/cassettes/TestDatadog.test_service_check.yaml b/tests/integration/api/cassettes/TestDatadog.test_service_check.yaml new file mode 100644 index 000000000..007ee260f --- /dev/null +++ b/tests/integration/api/cassettes/TestDatadog.test_service_check.yaml @@ -0,0 +1,47 @@ +interactions: +- request: + body: '{"check": "check_pg", "host_name": "host0", "message": "PG is WARNING", + "status": 1, "tags": ["db:prod_data"]}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '110' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/check_run + response: + body: + string: '{"status": "ok"}' + headers: + Connection: + - keep-alive + Content-Length: + - '16' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - text/json + DD-POOL: + - propjoe + Date: + - Mon, 03 Feb 2020 16:20:40 GMT + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - SAMEORIGIN + status: + code: 202 + message: Accepted +version: 1 diff --git a/tests/integration/api/cassettes/TestDatadog.test_service_level_objective_crud.frozen b/tests/integration/api/cassettes/TestDatadog.test_service_level_objective_crud.frozen new file mode 100644 index 000000000..c618c0420 --- /dev/null +++ b/tests/integration/api/cassettes/TestDatadog.test_service_level_objective_crud.frozen @@ -0,0 +1 @@ +2022-11-14T12:55:56.691827+01:00 \ No newline at end of file diff --git a/tests/integration/api/cassettes/TestDatadog.test_service_level_objective_crud.yaml b/tests/integration/api/cassettes/TestDatadog.test_service_level_objective_crud.yaml new file mode 100644 index 000000000..f1cd97409 --- /dev/null +++ b/tests/integration/api/cassettes/TestDatadog.test_service_level_objective_crud.yaml @@ -0,0 +1,350 @@ +interactions: +- request: + body: '{"name": "test SLO 1668426956", "query": {"denominator": "sum:my.custom.metric{*}.as_count()", + "numerator": "sum:my.custom.metric{type:good}.as_count()"}, "tags": ["type:test"], + "thresholds": [{"target": 90, "timeframe": "7d"}], "type": "metric"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '246' + Content-Type: + - application/json + User-Agent: + - datadogpy/0.44.1.dev (python 3.10.6; os linux; arch x86_64) + method: POST + uri: https://api.datadoghq.com/api/v1/slo + response: + body: + string: !!binary | + H4sIAAAAAAAEA3xRzUrEMBC++xTLnFRKaXf7f/IBBA8eRUpspm2gSbpJelhK392Z7SqI4nHy/c18 + WUGKIKB5W0FJaCArkh6zKjtVVZ2nVS7q5Jh/oDglBZZl0kEERmgkZkAfDq/PL4e0KKrsWNR5QWAQ + gyc3CJcZG6bAewTaGhWsa28gvYTRoR/tJJm8QlAae7f7lvJq4wYM0NRJnLApT61Ufp7EhbLpGTa2 + oRQaNQaneDWeW74jjUCi75yag7KGKASeF3QkXgkxVisjaCVC/KIbfYm7xQer491qfdxi4dvOLibc + P5DWLBrdf4LrvYO18odwi6BzuOvWW3FmmaYIRmHkxLv3zg5P/AfSDuM57qymONRCTX+DX5YoW0EF + fZdfcs1S9eoXwEWhc3wsZ293nwAAAP//AwB+Lez+9AEAAA== + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Mon, 14 Nov 2022 11:55:57 GMT + Transfer-Encoding: + - chunked + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://logs.browser-intake-datadoghq.com/api/v2/logs?dd-api-key=pube4f163c23bbf91c16b8f57f56af9fc58&dd-evp-origin=content-security-policy&ddsource=csp-report&ddtags=site%3Adatadoghq.com + strict-transport-security: + - max-age=15724800; + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '200' + x-ratelimit-name: + - slo_create + x-ratelimit-period: + - '60' + x-ratelimit-remaining: + - '196' + x-ratelimit-reset: + - '3' + status: + code: 200 + message: OK +- request: + body: '{"name": "test SLO 1668426956", "query": {"denominator": "sum:my.custom.metric{!type:ignored}.as_count()", + "numerator": "sum:my.custom.metric{type:good,!type:ignored}.as_count()"}, "tags": + ["type:test"], "thresholds": [{"target": 90, "timeframe": "7d"}], "type": "metric"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '272' + Content-Type: + - application/json + User-Agent: + - datadogpy/0.44.1.dev (python 3.10.6; os linux; arch x86_64) + method: PUT + uri: https://api.datadoghq.com/api/v1/slo/460fe48438895185a9025bea306e770c + response: + body: + string: !!binary | + H4sIAAAAAAAEA3yRy26EMAxF9/2KqVethBDM8F71Ayp10WVVoZQYiEQSJgkLhPj32jPTbvpY2vf6 + +LWBFEFA87aBktBAViQ9ZlV2qqo6T6tc1Mkx/0BxSgosy6SDCIzQSM6APhxen18OaVFU2bGo84LE + IAZPNAjrjA1b4D0CbY0K1rU3kTJhdOhHO0k2bxCUxt5duaW8YNyAAZo6iROGctRK5edJrNSb0rAz + hrpQqDE4xaNx3PIeaQQSfefUHJQ1ZCHxvKCj4o0UY7UygkYixS+60WvcLT5YHV9R2/1lfjUY61Du + sfBtZxcTHh6JYxaN7r/iS+1grYz+xuwRdA6vlO12UrNMUwSjMHLirXpnhyf+jrTDeI47q6k5aqGm + 38UvJMpW0Om+31LyA6Tq1Q+BT4jO8Rm49373CQAA//8DAEZ/bx8OAgAA + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Mon, 14 Nov 2022 11:55:57 GMT + Transfer-Encoding: + - chunked + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://logs.browser-intake-datadoghq.com/api/v2/logs?dd-api-key=pube4f163c23bbf91c16b8f57f56af9fc58&dd-evp-origin=content-security-policy&ddsource=csp-report&ddtags=site%3Adatadoghq.com + strict-transport-security: + - max-age=15724800; + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '200' + x-ratelimit-name: + - slo_update + x-ratelimit-period: + - '60' + x-ratelimit-remaining: + - '198' + x-ratelimit-reset: + - '3' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - datadogpy/0.44.1.dev (python 3.10.6; os linux; arch x86_64) + method: GET + uri: https://api.datadoghq.com/api/v1/slo?limit=100&offset=0&tags_query=type%3Atest + response: + body: + string: !!binary | + H4sIAAAAAAAEA+xUPU/EMAzd+RXgCaSqypU2aTvxA5AYGBGqQuP0IjXJXZoOp6r/Hed6sPAxMzA6 + z3528p6zgJJRQvuygFHQgmBYF4JxXUqsuNb1m1JCswIFF7oRPWTgpEXKjDjF6+fHp+sd53VZ8Jrv + CIxymIgN4umAbUqB1wysdyb60F1AOon7gNPejyolLxCNRR02XqHONGHACG3DcpZIU9QpMx1GeaLe + dAxroqEuFFqMwaTRUtyle+wyUDj1wRyi8Y5SCDzOGKh4IcR5a5ykkQiZZtvaU97PU/Q236iWm/P8 + ZnA+oFpzOXW9n128vSMeN1sMvxWfawfvVfYzzZpBH3BjWS5P6uZxzGAvnRrTrXTww0NSR/lhf8x7 + b6k5WmnG78EPSlSdpKf7lKVIAiijzRdgzS6ql5xpLOvyvq6baldXsmFF9YbynnEUgv2ielNxmupf + 9X75U6o3lfhedQLS4mAIyfyb48jzcvsFFjjIgbxHG+mjHDfTQ0sO2mJtxoi0EZ/Auq5X7wAAAP// + AwB/jsc7QwQAAA== + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Mon, 14 Nov 2022 11:55:58 GMT + Transfer-Encoding: + - chunked + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://logs.browser-intake-datadoghq.com/api/v2/logs?dd-api-key=pube4f163c23bbf91c16b8f57f56af9fc58&dd-evp-origin=content-security-policy&ddsource=csp-report&ddtags=site%3Adatadoghq.com + strict-transport-security: + - max-age=15724800; + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '1000' + x-ratelimit-name: + - slo_get_all + x-ratelimit-period: + - '10' + x-ratelimit-remaining: + - '999' + x-ratelimit-reset: + - '3' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - datadogpy/0.44.1.dev (python 3.10.6; os linux; arch x86_64) + method: GET + uri: https://api.datadoghq.com/api/v1/slo/search?query=test+SLO+1668426956+AND+type%3Atest + response: + body: + string: !!binary | + H4sIAAAAAAAEA7yQT0/DMAzF73wMS3Bpta78qaDShCZxRHDgOE1R1rlrwGu6xK2Ypn534gXEEGc4 + RfaL7fd7B1hr1lAegPcdQgke3WAqVIQDkrKrV6zYDKg8alc1yqHviT2koJmdWfWMXqY92fAuluOY + whbjxk5vTKvZ2FZ+2Lr2yFBOU6iN86xOGp3D4bRu8f1bz8ME6Z8DZLYm7BLp03fcpqIQupY1hWPi + h0z7Fk0i1QGxYe58mWW6MxOhX9tNs5tUdiudbMizAJNF3vtdj24/C5CcvDw+J3lR3F5fFnc3RTJ/ + ekjk9vnVXOSQiJiEsu2JUhCCvzp1EYLFRQRezvJprI/kUgYnkueXk2PY/2Pll5NxPPsAAAD//wMA + ckUGrmECAAA= + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Mon, 14 Nov 2022 11:55:58 GMT + Transfer-Encoding: + - chunked + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://logs.browser-intake-datadoghq.com/api/v2/logs?dd-api-key=pube4f163c23bbf91c16b8f57f56af9fc58&dd-evp-origin=content-security-policy&ddsource=csp-report&ddtags=site%3Adatadoghq.com + strict-transport-security: + - max-age=15724800; + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '1000' + x-ratelimit-name: + - slo_search + x-ratelimit-period: + - '10' + x-ratelimit-remaining: + - '999' + x-ratelimit-reset: + - '2' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - datadogpy/0.44.1.dev (python 3.10.6; os linux; arch x86_64) + method: GET + uri: https://api.datadoghq.com/api/v1/slo/460fe48438895185a9025bea306e770c + response: + body: + string: !!binary | + H4sIAAAAAAAEA3yRy26EMAxF9/2KqVethBDM8F71Ayp10WVVoZQYiEQSJjELhPj3OjPTbvpY2vf6 + +LWBFCSg2UBJaCArkh6zKjtVVZ2nVS7q5Jh/oDglBZZl0kEERmhkJ6Gnw+vzyyEtiio7FnVesEhi + 8NC8Aa0zNsEC7xFoaxRZ195EztDo0I92ksG8ASmNvbtyS3nBuAEJmjqJkwANUSuVnyexcm9Owx4w + 3IVDjeRUGC3EbdgjjUCi75yaSVnDFhbPCzou3lgxVisjeCRW/KIbvcbd4snq+Ira7i/zq8FYh3KP + hW87uxh6eGSOWTS6/4ovtYO1Mvobs0fQObxStttJzTJNEYzCyCls1Ts7PIXnSDuM57izmpujFmr6 + XfxComwFn+77LWV4gFS9+iFwBToXrhBa73efAAAA//8DANZQGsIMAgAA + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Mon, 14 Nov 2022 11:55:58 GMT + Transfer-Encoding: + - chunked + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://logs.browser-intake-datadoghq.com/api/v2/logs?dd-api-key=pube4f163c23bbf91c16b8f57f56af9fc58&dd-evp-origin=content-security-policy&ddsource=csp-report&ddtags=site%3Adatadoghq.com + strict-transport-security: + - max-age=15724800; + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '3000' + x-ratelimit-name: + - slo_get + x-ratelimit-period: + - '10' + x-ratelimit-remaining: + - '2999' + x-ratelimit-reset: + - '2' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '0' + User-Agent: + - datadogpy/0.44.1.dev (python 3.10.6; os linux; arch x86_64) + method: DELETE + uri: https://api.datadoghq.com/api/v1/slo/460fe48438895185a9025bea306e770c + response: + body: + string: !!binary | + H4sIAAAAAAAEA6pWSkksSVSyilYyMTNISzWxMDG2sLA0NbQwTbQ0MDJNSk00NjBLNTc3SFaK1VFK + LSrKL1KyyivNyanlAgAAAP//AwCI/vbUOwAAAA== + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Mon, 14 Nov 2022 11:55:58 GMT + Transfer-Encoding: + - chunked + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://logs.browser-intake-datadoghq.com/api/v2/logs?dd-api-key=pube4f163c23bbf91c16b8f57f56af9fc58&dd-evp-origin=content-security-policy&ddsource=csp-report&ddtags=site%3Adatadoghq.com + strict-transport-security: + - max-age=15724800; + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '100' + x-ratelimit-name: + - slo_delete + x-ratelimit-period: + - '10' + x-ratelimit-remaining: + - '99' + x-ratelimit-reset: + - '2' + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/api/cassettes/TestDatadog.test_tags.frozen b/tests/integration/api/cassettes/TestDatadog.test_tags.frozen new file mode 100644 index 000000000..42bd5b23e --- /dev/null +++ b/tests/integration/api/cassettes/TestDatadog.test_tags.frozen @@ -0,0 +1 @@ +2020-02-03T17:16:35.483962+01:00 \ No newline at end of file diff --git a/tests/integration/api/cassettes/TestDatadog.test_tags.yaml b/tests/integration/api/cassettes/TestDatadog.test_tags.yaml new file mode 100644 index 000000000..e1db5595a --- /dev/null +++ b/tests/integration/api/cassettes/TestDatadog.test_tags.yaml @@ -0,0 +1,438 @@ +interactions: +- request: + body: '{"series": [{"host": "test.tags.host1580746595", "metric": "test.tag.metric", + "points": [[1580746595.492079, 1.0]]}]}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '117' + Content-Type: + - application/json + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/series + response: + body: + string: '{"status": "ok"}' + headers: + Connection: + - keep-alive + Content-Length: + - '16' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - text/json + DD-POOL: + - propjoe + Date: + - Mon, 03 Feb 2020 16:16:35 GMT + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - SAMEORIGIN + status: + code: 202 + message: Accepted +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/tags/hosts/test.tags.host1580746595 + response: + body: + string: '{"tags":[]}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '11' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:16:58 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:16:58 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - vG5kxpR47Wd0uZGIzWkStfMxs3cmVIjKYEHLQf0xQiHS0P2BwlwJHwTESUSKlcdO + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"source": "datadog", "tags": ["test_tag:1", "test_tag:2"]}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '59' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/tags/hosts/test.tags.host1580746595?source=datadog + response: + body: + string: '{"host":"test.tags.host1580746595","tags":[]}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '45' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:16:58 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:16:58 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - sAPKocoLMDEnM5qY2PL6SCQ+dkENYAR/6IistAQ5iiTU/UnJHAba158nxOvVRvKJ + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 201 + message: Created +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/tags/hosts/test.tags.host1580746595?source=datadog + response: + body: + string: !!binary | + H4sIAAAAAAAAA6tWKklML1ayilYqSS0uiQdyrAyVdBAcI6XYWgAMjxlTJAAAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:16:58 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:16:58 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - mIWJPPM06xs5rSGFgggpdD5UbOnt6ntntAO8/8YDsVuXnSmp/k0aZ5dEUtAKB7Td + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"source": "datadog", "tags": ["test_tag:3"]}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '45' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: PUT + uri: https://api.datadoghq.com/api/v1/tags/hosts/test.tags.host1580746595?source=datadog + response: + body: + string: '{"host":"test.tags.host1580746595","tags":["test_tag:3"]}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '57' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:16:59 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:16:59 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - rxkz+JB0yzarEINDeNWQGs9dk7PLNAMnAw2wV8MNkZOhKDtz+JOpGuIyyBUaWwyF + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 201 + message: Created +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/tags/hosts/test.tags.host1580746595?source=datadog + response: + body: + string: !!binary | + H4sIAAAAAAAAA6tWKklML1ayilYqSS0uiQdyrIyVYmsB0eZOqRcAAAA= + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:16:59 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:16:59 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - gVnECQ7ifaEfJ6BNPsXSglLjlU41ay4U8jXHC6V3+oC4U6gHkBb20H5zrSJj1zee + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/tags/hosts + response: + body: + string: !!binary | + H4sIAAAAAAAAA61X7W6rMAx9l/6e2wAJH3mVqysUErdjogkKYVs37d1vKOvU0aaXfqhSC5Vj7HPs + Y/O5cGLTLfjnotYOrRYNPJvOabFF/roFlaGkKs1BMoVAI0aA5TmDPI2Fv5CFpMlSLpVwQpkNDD42 + Vrja6A4aUS0PThf8z+IZm8ZAU+v+HSRqZzogi79PC9G7w997k6ltX/Xa9d72KehiH+6x//HMKsGE + SipSWDMZA5WCQaVin0OaElpJkYmcBB/nQ6tM91zWWuE7D9uFwzrNePqwYxz2D1PYNma39fDwB+Ai + LQqHqhSOx8TnTSKIExdlnBFO8o9pNDOQrnXnhJYIteIxi2mRkzjL04KxnCYkDnr0YL6Y6gxHl07U + LY/IcvjE6Uy7bGp3jPAk+IRGSZpkeUKzlGTshOFzR8uNNX17ZR5jGSmuPETFWhFfhIoCFRWFSlQM + kDGBKhOkKC4G4bBz5VAWFhsUHfJu1zncQmtNhcsw+OEK7Tu05dC9vPRXr/6Gl0Mz+Z+Z3V9a3NSd + szt/BLVqjW95f8lXK/9FSqW7b5c//qO0WMaMLr9/LxG2B+76umlhODh1PKO6VW1Renj5zR5GsbiX + 6A+jkWNvTYvw5kmPoLohG9+i9wZyqqnBOA5yOaVr7KLpsd+9davA+gzvlfhGNII3Zhw7GwOyqb34 + wtBq+0k4TKh9GYqP+xmZSjpcNyFOxtyI4upels12A9pA3U5ZmtEyB227l4fRz0zBnxpfVP29cQC6 + acLHZelV9cWrAQ8tN9OzM8DS/RZtLctv16Wv3yhKc5LkEc1ZeMaFBfxnpLldi1z2nTNb8KsOKW7x + dn3vPnZfQf1aW6P364/HSPVyWCUHoAOdOcrtQ2rvpEJGPoMse12QjenV2vRa2V3QLsydNQ3yN6zG + yXghy1F/lLLYdfM2orMnLnbJOPHgToW6vE9kkcwzlkeAVbQe1vEUqPK3Yi0YLRRbS5k8fJ/4acsz + rxq/9Pf/fJ99RZqT1C2vSMdxB8i5oeYOenFmj/3/oA6EcRWKX1//AIsDAeRzDgAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:16:59 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:16:59 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - /Lq4EjXKMzRKp9qa/TaJTTVqSY3uTwQpdi8SFIU3firYrLG0qdPC+ksTJBROerQS + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '0' + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/tags/hosts/test.tags.host1580746595?source=datadog + response: + body: + string: '' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '4' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:16:59 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:16:59 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - 11g4TM+MO8VJV6iUJTOff4hAGEXsIqbG4IMv2YuWygOleCGxCxx6NihCkVtjenZN + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 204 + message: No Content +version: 1 diff --git a/tests/integration/api/cassettes/TestDatadog.test_timeboard.yaml b/tests/integration/api/cassettes/TestDatadog.test_timeboard.yaml new file mode 100644 index 000000000..80b801a43 --- /dev/null +++ b/tests/integration/api/cassettes/TestDatadog.test_timeboard.yaml @@ -0,0 +1,322 @@ +interactions: +- request: + body: '{"description": "my api timeboard", "graphs": [{"definition": {"requests": + [{"q": "testing.metric.1{host:blah.host.1}"}], "viz": "timeseries"}, "title": + "test metric graph"}], "title": "api timeboard"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '201' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/dash + response: + body: + string: !!binary | + H4sIAAAAAAAAA42Sy67bIBCGX8Vi0U19wT5xEiMdtS/QrrqrKmsMOCCBcQC755wo794hdtXLpt3B + zMfMP/NzIwKCIuxGvATRu8m8EjaCCTInFw+zCoR9vREhRz3pqN2U0FW/EUaitjJIr2UgOb6+LjLE + jb6mLN70dCmtjF7zsr4pFyIbDKgyncr6Tu7f7jlWiUbufLbB2aMxpjEr7Wwgyn4Fr2Ew2ItNizE5 + Kgrc63mTROxrBrPOkqTBgReo6Gfhv+McB41SYMuGNrSgTUGfvtRHVp9ZXZe0O7Xt+T2ljFIsMsnv + vU7sy+GliPGt4NOC4RSq2+O5o11O9oL9gJvDTemQdCKwb1HBJB4TTporZyCUFjfF1UcBEYS7qGvJ + nU29wCbs845lnx7Yr0m2uXXoQViNPkS/oEnepeJbDjiXIfRbiCCGj1d0aNRJz8ZLC9r8hxrNk9dE + xTgHVlVB8sXLEp1ZUbZPkqvtWNUtPwwDUDgeT0P71J1o17SdHJtWjNC19EN4PpzfiWePP8ERtNw6 + sUv6twWILz7prdI/xV6PpVdoavG72V4Gt3ie9pdy1Vr/wZP7DyTlsljoAgAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:18:11 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:18:11 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - SaHvyR/hQzhMjBxXmmuM76vwlwfocpgL0LhX3u6R0CFONYqUGm7Xe/7/HyTliTFX + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/dash/1568909 + response: + body: + string: !!binary | + H4sIAAAAAAAAA42Sy67bIBCGX8Vi0U19wT5xEiMdtS/QrrqrKmsMOCCBcQC755wo794hdtXLpt3B + zMfMP/NzIwKCIuxGvATRu8m8EjaCCTInFw+zCoR9vREhRz3pqN2U0FW/EUaitjJIr2UgOb6+LjLE + jb6mLN70dCmtjF7zsr4pFyIbDKgyncr6Tu7f7jlWiUbufLbB2aMxpjEr7Wwgyn4Fr2Ew2ItNizE5 + Kgrc63mTROxrBrPOkqTBgReo6Gfhv+McB41SYMuGNrSgTUGfvtRHVp9ZXZe0O7Xt+T2ljFIsMsnv + vU7sy+GliPGt4NOC4RSq2+O5o11O9oL9gJvDTemQdCKwb1HBJB4TTporZyCUFjfF1UcBEYS7qGvJ + nU29wCbs845lnx7Yr0m2uXXoQViNPkS/oEnepeJbDjiXIfRbiCCGj1d0aNRJz8ZLC9r8hxrNk9dE + xTgHVlVB8sXLEp1ZUbZPkqvtWNUtPwwDUDgeT0P71J1o17SdHJtWjNC19EN4PpzfiWePP8ERtNw6 + sUv6twWILz7prdI/xV6PpVdoavG72V4Gt3ie9pdy1Vr/wZP7DyTlsljoAgAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:18:11 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:18:11 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - WatxAL43AyqgfI4tyA152NzYM3DLdjL7IWr0SzhldiWriTsbw9vUaRZnaqhOCdUk + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"description": "my updated api timeboard", "graphs": [{"definition": {"requests": + [{"q": "testing.metric.1{host:blah.host.1}"}], "viz": "timeseries"}, "title": + "updated test metric graph"}], "title": "updated api timeboard"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '225' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: PUT + uri: https://api.datadoghq.com/api/v1/dash/1568909 + response: + body: + string: !!binary | + H4sIAAAAAAAAA42Sz47bIBDGX8Xi0Ev9B5w4iZFW7Qu0p96qyhoDjpHAOIDT3Y3y7h1iV+2ueujN + Zr6Z+fF93IiEMBJ+I16B7NxkXggfwASVk7OHeQyEf78RqQY96ajdlKRX/Uo4idqqoLxWgeTYfVlU + iKv6kqr4p6dzaVX0WpTsNroQeW9gLNNXye7k/uOe45RoFOqXWUJUMkt92dqUPQBQhiplZ4P17gpe + Q29wJ58WY3IkC8LreUUj9iX7PQhmnSXE3oGXSPh+0fu6QAMQAFFqWtOC1gXdfWMHzk6csZK2x6Y5 + faSUU4rDJvWz00n7vH8uYnwtxLTgcTpizeHU0jYn28CuR0fRQR0SNwo2d0eY5OPmkxajMxBKiw6K + 8TP6ANKdx0spnE27wCbZ102WfXnI/txo9UGHDqTVmE/0C4bnXRq+1kAIFUK3HhGUYfMVkxt04ln1 + yoI2/0GjRXoDZIxxDryqghKLVyUmdUVsn5Cr9bNijdj3PVA4HI59s2uPtK2bVg11IwdoG/opPO1P + H+STxxfiCD4F6+SG9M8I9s2O1c0WAcoXn3ir9H5x18P0agu/wHCLv8P3KrjFi+RjhbXqyt70kfsv + LETj7AgDAAA= + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:18:13 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:18:11 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - yL2jMz/muX5URjcdpTHzlehf0qi0hyVxH7uShvIhWEeYrIRwdt0CU/7wzCTakK6N + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/dash + response: + body: + string: !!binary | + H4sIAAAAAAAAA7WTy27bMBBFf0XgolnUkkhK1AsI2nWKduVdWxgjkrKY6OGQlJvE8L+XtGSkqBHE + XXQjEDOXnDszRwckwLTSoOr7AWkJYjMO3TOqGuiMXLmIGSfNJapQDDsV70ns9TFhWZqnJVohIQ3X + amfVOKBqmLpuhayynb9xp35MmLDSfbEUNyZYq17WI2gRJDi4gyGgmOKA5BVJ3UvclbdSuIs+HGIS + JnhNMpesMI0YoRnBHzGuMHbiQf7aKK/dggiTl/swf5xc+BR6Nbc8ualdRwcklIG68xWW7loYxMnp + vdIqepgGDvqzAAti3LaPER97Xwn6y2aCL9Pgm8MCtNMsHc/tK7MB0Ss3DqsnP8PR15hzwLk0ZjOH + kJO5y3upVaO8rVkve1DdO6YU9+NGrbU7U8WxkXzSMtpq2Dv32juP52OcZVLmDW0gZwRYzdIiIUlR + AymoJIkoP5nbtPggbrW0ekTHFepHsdhZ9kBDnKxJUjFasTKitEjyctnDcfWP0BQlvoAG9c/BtHNj + lyJwjAX2TMnrYNFb+b+Zmb06ZoqKkAiXOWPFJTNP6VNo7UvIhz+ZWbxdy8ygeDt2YKJ+cr9A+xY3 + 3xZZ8PUk+1+wvOfmemAI42ldA4Ysy2uWlDkuKStlQ5looGT4WmDOS0hZQig7A/Pz+BsNgzUbcgQA + AA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:18:13 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:18:13 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - NclXS5F5t+kukUaODU4jY2oSI1KBdPHFdFhJZNfbXLWDOThxbCLlKKmYvikjdDSg + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '0' + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/dash/1568909 + response: + body: + string: '' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '4' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:18:15 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:18:13 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - wB7h0Rt2IYxDUBLtoJ4y0ZOq10ZaMdDZiRuFZ3d/FUUtC7gfBEZWTs0Y6dZhoLZS + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 204 + message: No Content +version: 1 diff --git a/tests/integration/api/cassettes/TestDatadog.test_user_agent.yaml b/tests/integration/api/cassettes/TestDatadog.test_user_agent.yaml new file mode 100644 index 000000000..6990a10a9 --- /dev/null +++ b/tests/integration/api/cassettes/TestDatadog.test_user_agent.yaml @@ -0,0 +1,51 @@ +interactions: +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - datadogpy/0.38.0.dev (python 3.7.7; os darwin; arch x86_64) + method: GET + uri: https://api.datadoghq.com/api/v1/validate + response: + body: + string: '{"valid":true}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '14' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Wed, 01 Jul 2020 17:22:12 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=89; Max-Age=604800; Path=/; expires=Wed, 08-Jul-2020 17:22:12 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - SaHvyR/hQzhMjBxXmmuM76vwlwfocpgL0LhX3u6R0CFONYqUGm7Xe/7/HyTliTFX + X-DD-VERSION: + - '35.2688709' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/api/cassettes/TestDatadog.test_user_crud.frozen b/tests/integration/api/cassettes/TestDatadog.test_user_crud.frozen new file mode 100644 index 000000000..cc00e02ca --- /dev/null +++ b/tests/integration/api/cassettes/TestDatadog.test_user_crud.frozen @@ -0,0 +1 @@ +2020-02-03T17:20:54.020888+01:00 \ No newline at end of file diff --git a/tests/integration/api/cassettes/TestDatadog.test_user_crud.yaml b/tests/integration/api/cassettes/TestDatadog.test_user_crud.yaml new file mode 100644 index 000000000..a590424e1 --- /dev/null +++ b/tests/integration/api/cassettes/TestDatadog.test_user_crud.yaml @@ -0,0 +1,1887 @@ +interactions: +- request: + body: '{"access_role": "ro", "handle": "user1580746854@test.com", "name": "Test + User"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '79' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/user + response: + body: + string: !!binary | + H4sIAAAAAAAAA32Oyw6CMBBFf8XMwhWRZ6GQEP0IXZuhHaRJAdMpbgz/LkiirtxN7tx7cp4wMTmo + nqANY2NJQ9WiZQqgw0FbgurdiIWMiiyXIjt5Yn9QYw8BDNivhfOS7C4rJwBv/DoaJmsDMHxF3Zvh + w3Tj94lKEfN1i8CNy/hBzrTmx4F6NPavglHjgofO+ztXYcikJkeHm8MHenSrZ7idYdmIQipUeRrp + olFxJIqszZu0lbIUUZIcuc7kXteO/CIzzy8ID459GwEAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:55 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:54 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - AZX6w/8zD+VN3BjlP7mTxsWKLW39bs6QmKw7eyNlBdxzsMsZp5eTFn4umzElZK4n + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/user/user1580746854@test.com + response: + body: + string: !!binary | + H4sIAAAAAAAAA51VS2/bOBD+K4QOPVkOKZGiaKDYLbaXAF20KNxTERgUOYoJUKJByu4Ggf97h7IT + p2532+5NGs/je4zGj8U+QSxWj8VWj9ZDsZoDTLRU8qYV/M8J0rQ0YSgWhTYGUtrEMOfFgCHrku48 + 2GLVa59gUbi00XZw4yVgAr4U22napdXNTQKzj7C8j/qgJx1z55vT443qhGyNNk1NrewMo0Lyvunq + vm2VoFX1R3rN21f2dYRpnn2A6Hr3YvaohwxsjYjJp0xrgSA9pGL1+bHY7x2iLBRnsuKclx3QumQM + VKml1KXtWCsVa0XFKNaZCHoCu9ET1lSUqZK2JavXTK0EXTG1ZIxRKjHzPPQtsrHhnnwEbcn70T+Q + j1mmReF1mjZDsGeo/90sQ1SUMyYXxQ7i4FJyYTwRQKl3Xj9szgPfhft0mnY7WviHZADZEEgmut2E + ZVmKLRDdOe+mBzIFgqQs0d6TEEkKAxCPiF0uh7Qkf+mRdEDQmhGpY5xo4t3g8ovRO21yF0R1qiB9 + DAOZcMCMxGFN7LWB3PvNh1vsd9s/9zo4PadmTb7PnX+60CVbnci9D532JJmwg2U2Ek2NziCW5826 + 9ggNomXN1qxe1Sp7JCvJRXbzLBmyxe1FDTYz5409SXbeDAENFY2oSmtZlTejLRWIuqylwHDXKkmh + OC4e885/Z8Q7dwCy1s6TN/NH8jMnDg6+zLR9LpxyYQ+oc4/qZYNeGLMgcICRuJ4ELIhfXAKSdmBw + oQymPqCrT9L8L6F4u6xUhct/JVRGtsnIMH6WqOmbpqEUvpVIlLKvad91XICsfyTRW522XdDR/pou + 9jn9mtAU93hjro3Hj1Oh92vGs/GCLZUQTIoLn0vD2f4LIato37Q1LW1tcX2er0FXc8pBgAKofkTo + 7zC6KcRfozOck3+HTLNmbSZD5VJyXqkXl+ap3RUVvGms4a0pbduxF1QYbq7SlHc15cXx7ni3KCY3 + 5Qs+7r0/XcinZxiy3f/+F3A8fgX8uwdMMAYAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:55 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:55 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - f5hY0MW4w2fhZz0SAfv1+LF9me92dJz6mowUerU7gZ8k/CpuQLqOWzykixb5WZaX + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"access_role": "st", "name": "Test User Alt"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '46' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: PUT + uri: https://api.datadoghq.com/api/v1/user/user1580746854@test.com + response: + body: + string: !!binary | + H4sIAAAAAAAAA32OQQ6CMBBFr2Jm4YoICoVKYtQ76NoM7SBNCpjOwMZ4d0ETdeVu8uf9n3eHgSlA + eQfrGCtPFsoaPVMEDXbWE5QvYq10UmS5VtlBiGVl+hYi6LCdgdOULM4TtTh6mWJxMhe7wfsIHF/Q + tq777Ib++0RjiPnyjoDn8kjB1e7Hg1p0/q+GM/00D43Ijcs4ZjJDoNU14IiCYXaN32e8rVShDZo8 + TWxRmXWiiqzOq7TWequSzWbPu0wv7S6QhB4ejycwkZ81HwEAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:55 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:55 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - 69kiClanS8NcBSsdd51HHifvhQSGoRbJJjhU9l40yqxQHVNrndFN9zVtFJW1OcSf + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '0' + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/user/user1580746854@test.com + response: + body: + string: !!binary | + H4sIAAAAAAAAA6tWyk0tLk5MT1WyUgotTi1SKAUShqYWBuYmZhamJg4lqcUlesn5uQopmcWJSTmp + KUq1AESKcF4zAAAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:56 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:55 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - og1WGdy+2nV+rkkclmd3Cf2I26XhV3/6yjBeQCP8aHbH2k2cKwC+X9WmhIghcJ94 + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/user/user1580746854@test.com + response: + body: + string: !!binary | + H4sIAAAAAAAAA6VYXW/bNhT9K4Qe9mQppESJooFiC9ZuKNAAQethD0NhUCRlE5AlQ5SSekX++y71 + YctKXCfrU2KH5P045957br57rdW1t/zubUWpCu0tuy9InGJGkzSmvzXaNoGsdt7CE1Jqa9d11Z2z + DXyljBVZoZW3bOpWLzxj10LtTOktc1FY94Ws4IO3bZq9Xd7cWC3bWgebWjyIRtTu4Zv+1xuexSyV + QiYRViyTBMeM5kkW5WnKYxyGv9p3NP1Fvat1U1dg+kHXJjfO9GCqFDvn1wocRn9BEOi2cC46d623 + /Oe717YGTnucEhZSGvmZxpFPiOa+YEz4EYuiKMwTzgiBe7LWotFqLRq4E2LCfZz6JFoRvozxkvAA + szhMKZwcDL+HiFS1QV8aSKWoFfrsErXwCmGb9a5Sg7c/fst5yDElJFl4e13vjLWmKnv/Idn7QhzW + g72jndsOF4eGtrI2+wZuuERsjUWnN9DGPGiLDlWLmq1GIjOFaQ6oqdCD0Y8IfEZamQYBJPuq1GVj + kSnd8RqNkVX1RpTmX+EMwCOiQapCZdWgrXjQSH/bF0aapjggpXNTajWxbgPU+WNKWbTgJ7q9v1ug + Dw/OzqIzXoFXNbxW+hBP1ZYNuhOl2OgdHEF5W0pnVTifA4cqoFwbCQAd8Z8DBmhhn/AViZdRvAyj + gMUJi5ITYHYACr4ZqZFSFeYY+yrKqKNG6osQPmYZZQRzzXnKvKfFd8f7KRSfqo1Fdw7iA/pYKv0N + GPccjrOkA7tUF3fHjAMSRQHJ6a4e8w4F5xIxT90MPigmyJD7snPjs3u4c6LDrbPR/eHv2jQaffgG + ADhOoT9M0UDGJwwBLvQodDWzQHlVI1vtNIKfE/8GfyYXodYr1Plh0aaoMlE4k4MXnUODF8/N916Z + 3cidt4AbkRWJlhFfhiwgNE1IfAK3gJD7qjush7yecE5CKXGUSF8pEvY4cx1TX2Wc8zihEebhRZw/ + QR2hlTAFulh4z8vLwVO4i427mGsoDpddl1XwdER+gTRUBDJ5j8OjsRrZvZbQOiQcPaAT7f9Xnmga + hDxMGZ/lyXm2dp5NUpQnSYKxPk9R7LM8wjnUQ6xZdDFFPaj3Zq8L6ANXa0GoniAShtAG8gQJ2Q93 + oR2Vudm0ddd0gJN9AzHlZt7FZmUwuFBXrjdW9VknvMbz0fjYtK4z/X60M1TZz/CZwnRhAedJ9Ayn + R1fC66N7J7RSQoSkUpyjxfyEJYLlONM65xfQGjw+RvBWtKYNokPumIpz6AL0uyhRpvs+AfyH6SKg + JnYQkkJS7AXMjoNrRyfwAaiR/qjrSTCrxKnVmRIaWC5k16Bu7z8G6GN+fH486aawa6nnR7tHJrhu + xbFzWVnt9dvLC2CDGcNBPoTzNjTAduTiBDcqRCQ0dJ5pI4KpA2JExIRKqfAl3P7UpYay0OgOFJGR + V3Hr5yOSrW2qHdr1l1BewwfXLF8ZMcgg5ofxioTQdaH3BgkFIZTOGspm8G09mDlFzBlXOQuxL7Ri + vfriXCYgwXSWQtQkicVLEb8XdptVIKuuxtnJGXU8Po+rF6pzsQBhcdALK0LdPIlJwOOYsAmQpwfX + bnSfAlIc50kaOc2gQHIc5WQWUUx1rLnWL86S1wc0AOdG6NAhL0c3aOHr4SUhpxPxOgmvY+uz+Mg8 + PpqrEKS6ymRyBTD0ZStq9+Jcn56NSesOTXADPQklC4qveK3ge45hwjB/Och9m4FYXXdGn8UazmPN + 8lSECWZ5mtCXyHlXlaaBIXMtxo6au+HwW4iZrEjqiIlZwCgNOTvV2/jcjJaUUpLQFCROmpEJLUmW + ci4wzSL8U6H0TFygXdvoXsArXWjoRiC2zINRLWjAS5H+kKRjqCQOYljF8ESrHEOdUZSmOmdEw/Q7 + j5VHOcUpZVi83EVH2IZNA72vHsvG7K4LFqvd6jMc7oRctydN96MA3SK3SqNH02xh2pzvYhJGoW33 + e9BzFkSgrkE4d51YlIcxbXDdyRxxtHRShzC8YMma7F7nIr7b45ptBepxTNlkO5vz7lVowGiLMJRT + +ALxxkyc6oiqlKk4T+eA6IzLNIqxwjT3nr4+fV14DSyMsLaXbVH0u/r4u945PXr5/xFPT/8BLR5h + eL0QAAA= + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:56 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:56 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - PmDXJXCpOnq24qtagNCLPTUoILSRgi3DGaXUca70kUEAM8DZBLYkwSVilYSYEHCG + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/user + response: + body: + string: !!binary | + H4sIAAAAAAAAA8y9W7Omx3Gl91cQuPCVSNX5wIgJy5YPE56YufBIN7YjJupIwgIBGQ1qRp7wf/dT + m5LwvZ+AZnVVsWyJokSqmb3zXbtOmSvX+q9f/+FT++HT17/53//r1/WbTyl/2+rXv/nxhz+0v/j6 + d+m7+m37+jdfS+u1dcqLKJ367vv8ff3Hv6rpx1S//+3v/q9fl+9///VffP1d+v34o//h4//Lv/7x + mx/Hf/a7P3z77V98/c2n/5Tq77/57uvf9PTtJ0L/8P1P/89USvv06T/98d/6+tOP/If/of3wTf9m + /CT/9Ofb79M3307+JN+U7/mLvv7djz/+/aff/OVffmrlDz+0X//2h/QP/Mg/jB/3L//4f/5lC70p + r2IRSSb+qccWnXDN21CFbOK//fRvTPhv6r/5of34w/df/z9/8ac/UfA6WvMnPlH7z199fK0/80f6 + 7M8y/5Fc8En3KJUvvZUgkjVO8V86K2+bNV/8kbQyRtrw/4uP9PmfZf4jiaxVtFkFmXVoQjVrtVQq + CJdaSt1/yUf6sX368ZvvfjvW5S+tMvk3f/wzX/0tf+jP93v0uZ9k/ttYpXrsUVejVNIxKQH62dUk + apG5yC/4NupzP9E/7UDqyrf57E8y/22KFS16PomT3nXZk8i+6VCdS6pU9yW/N3ri2+gr3+azP8n8 + t3Giu6alqELUnnwx1abse7Ku+OSU+9zvzT8dG/9ygnGS/dD+869/X/Iffviu/eLp9d99/LGv/n35 + 7z/+2C+vrD8ekL98inHaPY6xP/75fz7F/tRPM/+NYte6Kul7d93lEj2/PiKHHIMK1pr6uW/0dsj3 + 77//q/Zf0u///tv2PNL/p++//+UPsXme/+u/dD73bEJt3RnLcZ0jG66vxihTgqkyxd4/l/v770f/ + 4fvf/sJm+8cbzC9cZ/Z+D372b53/ACok6WpvmhuLzk4UWa0NXpUsupRefe4DvIH/2+9/NbaPX41D + 51dc97gFBS30z/9C/MIf3vgl4Xb1mUvfL/x9bz/c/HcLOgTvXZOuBRmL53en9Rh6z67IIr7kQPr5 + H83+/HcbX/irP/w9N+f21fjQGx/s87fkuZ9q/oOJ5kp00oWma4opjjtzkKaVFJLq5bMrbeoXzf38 + B/v5PNzGd1v5RXv/4ea/W+pdZJOFUpLfNeNljTn16nopMary2avz+w712x/ab7//4R9//X//4dOn + 9Atb1df/8x//0Ff/2/hDv/yZ9natz/8k81/HeeOEiF7qJEIuztnE8WVsLJzyWnz27Hr/Or/75u// + /vtv//HH9uvfte9++MXz/d/+8x/76t+OP/bn+kJ/6qeZ/0ZdGMMnEl1U2YtNhpMu+1ASK7CFlj63 + xb9/o//zmx+++fXf/eG7kn7xXfG/fPN//IHbeeSfRatf/bs/fDf+tajpM3vV3i/T536o+c/kXGu+ + K26JViabrQla6pCTDKpJXeOXfKbvvim/+/7b9OnXv/9D+1R+90tL7T/80x/76t9//LE/16/Sn/pp + 5r+RtMXknERyzmer+aWKysbWleV2He2XFDv+P366/8JfP/8pWunSpaS9Gi8Lflts4d1VRWQj0rF/ + 0b78Q/r9N79u3+X06Xff/NLvyv/Kn/nqf/zjn/nlX5QvvUo/3xSf/TnmP03oTuWgTKxSyGJ8KcEX + 2VpwMkcjP7uS3o76T+X7H3/8df3mv3z/3S99mP84/shX/8P4I7/8XfY2mM/9FPOfRXiX+BLVR8nN + ulLBijlLGZ0xhQfXZ29A7/vwp2/TP7S/+/XfpfxD/cWT/D9+/KGv/t3HH/qzfZvP/iTzX6fX1oSR + XuvSNffomqWtsvDJZE4+5c9tv2+/NOOOXH5oXJFfnyKWQ8kLFbwQ4ueviqOw8dcf/7lREvvnJ8zr + f+6XP+KfXHifvzX+6b96+aVSREi6UYvmeVKFKioL31yVrEv+DX/gwwbpfVBSfemHffnP/dk+7C/8 + Lvzyjzz/G6u4lZusquQt2HjZVB2sSUXVLoIpn68tTfzGeq/H8aqit1Q7539jn/+5qx/2sz/y/Id1 + VWlLU6XnlvlHNCZEXonOOM21zH3J2/pn4JfG8fDUwfGgij781Tff/dho7Pz4zffffdQvPv2KZtJ3 + P/6qf//Drz7Vv/tV+eZZyvqZpfoW8uY3X85mHo6sTMs8sNgsWlA2dApl2o3Kqqk5ar23M0srnNEu + OOuED/4IHM+QV+FYzWYeDms6e7cNsarKL3HhJm44M5XpjR29f/Y596e3nT/CEbQOrDqlz8HxLyHv + w7GQzTwchSpDTa5YycVCWW28qlHoJiutBjDZXR1KC6/p7Qhv4pnNyj5C3oVjMZt5OJoPKVHFdlJb + WzmV09haPBcgQ4O+2F04Ig8+ZRW/Ul7KM6vjEfIuHIvZzMPRaUP60Dk8VGuWl3IuwWZBS66WQiVq + G47IsRFMNFEZdQiO15CX4VjLZh6O2qoTrAwqylKYYFQ1tEFt7lXZptOXtIp/7mYFFJZOCLsVPRFx + BI5nyKtwrGYzD0fstavRdxMezkcL4/MlF7WMsrRod88O+AAyWIp0inLdmZvVM+RdOBazmYdD2u5D + DaYLKwzNKus9j3+pjPO6lrj77uDbQeixzgQf9aHV8Qh5G46lbObhMLDGbBj9L05yyalePaSNKgzI + BCM+Wy+buOg6xXWNU8NCuopnzo5nyLtwLGYzD4eCq5dMsVblXKFcmU47qccaa4jRq88WvKfg4OoM + sN5xgpy56Dr1GvIyHGvZzMMRaaZHbahBaZlrkCJ267Sm2MfTXPvdZyDfLngOcWUpWx+D4yXkbTiW + spmHI3cdMyVQF40qQVYYNFJqIUIxSZm6fbOCOE11EdYsTRRz5mb1CHkZjrVs5uFIShej6UOpWAfF + RCaIXSnBaiutSbtZ9JZOK2etpDUs1KES4jPkXTgWs5mHwygq2RmeD+vC0zF1KTjdG3xVpUTSn6Vp + zJwd3KkcfCynnRf2zOp4hLwLx2I283BUFTVTClbyAKEha5yRtIXgUxceiNSyNl/ljgI7lQXvqVmZ + U3C8hrwMx1o283CEUGBgqlQ5yfsYAYjsXkyTuE7rPPXdmpXzXA501EHQoDr0DHyEvAvHYjbzcHRq + hzHKbJKlk1dgdBTdfCy6tF7y/tnhGV2gBwWhD+ramc3qEfI2HEvZzMNhXEvs7KEpGYMog1eSSzeh + VcV4wxcNWP1szQrGgXUSMOhin6noUi54CXkZjrVs5uGACScKNGdjupE5RypLNDqU1Eq7wnNw9+zw + JgQbHBVEZU6tjteQl+FYy2YeDg6LLlIILBKpJS2ilCjles3OpVPJnx1wmblZ8RxXEQIp3UB96N3x + CHkZjrVs5uGAuhQkkyMMGY1RLCtihpzcEjwmblZpt9/BYlMUcynhO1bIobPjNeRlONaymYcj8fyu + Y7aJZgfrRMEQT1pQyHLROd12a1b8+MSN41wi5BE4niGvwrGazTwcmdlvCQcNOklX2Uevi/amyWY7 + hOuy2+/wjM7SJYeRHHleHoLjNeRlONaymYdDZmpKNmcIAXy0xpWXv1FzaNTcqZ5sw0E10gvNy4Pp + g0Or4xHyLhyL2czDUThje4TmJprJvEAqYz7Kt8q5zvxE3IeDfq+JgipiMGfODg9L6KeQl+FYy2Ye + DhgL1NOtoEpiAn1UWagnRsHYQ7Iw/LfPDsOdGbYVQ1xsV2c2q0fIu3AsZvMFcFiGcIysvA4Yr3A9 + MLmUK607k1ghdbffwRwZv8qGgr3nWX4IjteQl+FYy2YeDlWYFIbwRnGkae4/osBLbIop2KIk7OfN + Z6CH4eiNpr/llDnzKn+GvAzHWjbzcHCfhSqtXK3VVC0MPVqR6ArWSsFKl913x+AeMs7toUzDezu0 + Ol5DXoZjLZt5OGDyRF+bQb6FNnnoCSWFYlUKuo1rxG6BPUTuuTBTWCB0PY7A8Qx5FY7VbObhMM1Z + aoWj+cQMdUP3g23K9cT0Ru9d7W5WITKEzIitthCz4yE4XkNehmMtmy+AoycnRef+o3NrsSlhi4bP + noym2Lv77qCjGKiT8KiBjS2PPAPfQt6EYzmbeThcYWvKgnoVhaoaGQ2j91RjMTAYMpS4vaMcla+A + INrghEaFGsiBcZu3kHfhWMxmHg5tTbaiwJTXjIl2sAiMQcGZZmbfNLvJQoQQAQ4wexQPc3fkVf4W + 8i4ci9nMw0GRymWmUAuCJZXDoycOEgNVTTVUc8xmc5amCeMdyA/Bt/LuyM3qLeRVOFazmYfDIK1I + f0OlxDHOIuH49kHb1BVNQqs2SaF8O6iMZKEsHJ8zm9Uz5GU41rL5AjgYgvUJogcEA36RU+6MecTB + xRms3bh7djCloAzvQB7lh7qBvCZfQ16FYzWbeTgcDVif4Y4Y5el3dObnkQOEus6DoaIctHmUG4ke + KKRaThCnz2xWz5CX4VjLZh6OgjZYEt1UBAyQMTDCVkvVJER2rMaTfRcO+h08PFA1iFBKjtyszCPk + XTgWs5mHAzCQr9Efo7MxoXPnJDT2QjExe/pPm9QFqodG0lWUqAk6eQiOR8i7cCxmMw9HYjbTF2qI + rhpGNQ1aVC4xecP+Ql9wl0miDK8/QRGRG8Oh2cC3kHfhWMxmHo7IxYrdSurCtKywkSYg8/gMFMDx + kXm3SEKbCF0t6iPQI6BHndmsHiHvwrGYzTwcojDoRBM2u46iyGAhBm66aGUHlO1K2j7KY2RoFv46 + 4iWoi5x4lZtHyMtwrGUzDwfb09BfZF62DfHyXBM6TmqMacJe+BMyjH+aZ8UQGqNuUEkU2u+Hzo5n + yKtwrGYzD0dEohgREjCRSKXTn4UER+scBqISzKV9iRDdz5BCKRzSQhkzVVahHXBkdTxD3oVjMZt5 + ONAvg+8PEIx4JHi0bFEyRjq2vSECJ3drVh9nH0X7j9nAIwMFozT8EvIyHGvZzMNRA8pUhom0WOiR + I5uvauEZQtejjtH8TerCEDmRxgphA8vjzNnxDHkZjrVs5uFIAkMPzUR5QBuSrUoqrrqcuwCClFLY + FFBihwINrlcMAfhDr/JnyLtwLGYzDwcvDDe29cw7PFZTUD4XWJIgqCyRUvK7NytGNGXgRe5pmpsj + lGn1DHkVjtVs5uGoiFSgbs1Hawm1CuubhSkNyRmlUJrlm81ZBREGeiMUrnPdwGfIu3AsZjMPBwzB + yPsCcTcIDAVlBEufNmnJ7Cwqv2G3ZuWQZKIwLKEgooN65Gb1DHkXjsVs5uEIFKc0rw3mOWhvIDic + smWcGbbPoJKo3ZsVgunjDGfgjYG0I9QFxhNeQ16GYy2beTiEpjPeSqQjOwY0Tc8K5amek4Mpauom + sUcxqmlgICK7YOj+nlkdj5B34VjMZh4Oo1JmEuaDYqBVq4UpD0QYfLcpckPdPjs8XijW8eZg2z3C + swLh15CX4VjLZh4OFJYLM8wOfTGVLDPMzZZKz8O7zEzfrviecgi90Q7kjjCUMM6sjkfIu3AsZjMP + R+IFWBB+VjB0mS2XLReLAQREHKS2m9ntlX/IM5lBRcR+40yR5BnyMhxr2czDUXiNN/TdAlZ2sDdd + koz5w7QSFf3WbDbPDoBgThO3QCFPFUneQt6EYzmbeTg6fqSIwUOapiySu+RaRUuQ2U3uVmPuf685 + i3CcBgjUu0nlzDPwLeRNOJazmYeDCxXKRvAWKKejhCiyLDjQ2YIeRuVA2SySjAScxJWGiTQZjlx0 + 30LehmMpm3k4UkYqNGR2KcYKuGahSakc3pJ8Pgn5Yx8O3C+Qx0KAHUhOHOXA8RryMhxr2czDgf66 + pPlUbcYK1guMrrAAY4rWwn8rUEl2NytoxuxV3HRxHj60Oh4h78KxmM08HEw6oYKRCgJ8lA4xioL8 + geQHhFCsabvZhoMCOyy6oXOJqdKZ1fEIeReOxWzm4ZA01zSv8orCEZokmVsWUwVJIE5ibN59lVvG + bx2nOGMKgrfmETieIa/CsZrNPBzokDA3wIwspsQNBzUmwA2NKCF6EC2ZzVc51SpKkij24BiBZMEZ + OB4h78KxmM08HEO/qmJ1wrg/PbvOXDOUK0gMqCgxDJ52j3ImAjn9eAIKQcvjDByPkHfhWMxmHo6G + rrikVS7oklMs4Ro01Nel6tpTvtqdfoK3zj0IOjZLBAbcGTgeIe/CsZjNPBz40tEPpIfQah/eNrJp + TNM1D5HhBr3bK8cVhGkCmrz8F6pWZ+B4hLwLx2I283AogfohpNBmw4etZEzMpo1/YmAJAdHNgQLg + QCM0DA05xT8fguM15GU41rKZh8NixUXJtTY8+GjcoaLLHG0OrdUhmCA3KdPAwfMvoOTBEXJsdbyG + vAzHWjbzcGBLwLTd8AJiPrMwTxkx40qd7+e48drt1UETnnNc00+BEnFmdTxC3oVjMZt5OEzqOL1i + /56GMonTgjFBzZS5MbzRtdjslVua5IMxTeUe5u+ZIskz5GU41rKZh8MmRgI7L76WGCmAuU63g2EC + 1zs1d1+3N6tgMDjE2YaGxxk9q4HwS8jbcCxlMw8Hw+Oeqi5MeaZbqZMw9KQNRSyYg8OXa5OFyLfj + cQ8nxY1p5iOk0LeQl+FYy2YeDg7wjgw+DufQb1AJDbVYKM4MeQSUYtTu6uC9gQa7H814+FxHzo5n + yKtwrGYzD4fDyKZXBv2RlkZxQUBAbJ33hxikxGZ3zw4SYL6DvglVqzOU6VF0eQl5G46lbObh6FTU + C+yFbgTO3BlxylhkcCajVUJVd1N8j2+HjgDDHXzCoM48A58hL8Oxls0XwNGoueKOXipWjmmMBsqK + POWHjK5i0mOz30FFGlKt5G5Ak/YIz2oQ2F9CXoVjNZsvgAPem6duyCvDOOxtGirECsZ0CBlzx7Lb + 79AG8QvWiEeO9NAz8BnyMhxr2czDQc2KjhPXXKzprIGzEDD+HapHWEbQTt0tsHNnHjpvdASHotWR + o/wZ8jIca9nMw4GvbPcZMSuNh2NGgKEhiJ+YuMHn1KAWurtZ0QZEJxQvQjA5U9HVj5B34VjMZh4O + urDQc9naYR/CQEQkVEDwQR6mfDiM7x7lZth04vLMdIKPZ1bHM+RVOFazmYeDQxbRYbqx7Fqc4lQO + 0Z1i5tiVofwddo9yBjt4TX74E/AXHdmsniEvw7GWzTwcDNcg2COLR/oC5beOa/nwEEQkpvISLLvv + DsZHDIUwrFPR8juzWT1D3oVjMZt5OAptjtjQjuxjth+Kc8IVG8o0T0FKsHm3SIJTOReCyKhhCId4 + Vs+QV+FYzWYeDoq3jPijfwF5AWpV4nKFVxomErLzW+12VwdTx5IJXEBh2Z0pkjxD3oVjMZt5OEYD + cAi4UDkuiQIJr47ccYrQrg4B8N13h9WBrcqiFUof6sxm9Qx5GY61bObhQB69ROEKszXd4ABFZxCJ + KwTzNFYFqJttXnSZ6qA6MkR0B2n6yFH+DHkXjsVs5uFAszVB+u/oWjGtGTObPFNj2GsijIEC7u5m + RR8LqX3GcNHfs2duVs+QV+FYzWYeDrwBJcWpjCQMBSuMIii+ZvqCwybdbgu3omTFax8o6BkhJHdk + dTxD3oVjMZt5ONAIjVByo4J1iAA4S6KjqFSSiwwzwy/Z3Kx4jtNUpPrC6OwZlWnOodeQd+FYzGYe + DoxUxlkxxEiadql7mdiz0lDkk9jPbjoUWMyk3CiP4MVFnf3M6niEvAvHYjbzcNAzrUMZVPFco5g4 + WCScHLir4F5X0NLdXB08NiiQ0OrV3N3OMEmeIa/CsZrNPBzcaynmUry1KLI1YypKRyVZBtGQOUaq + ZBcOClbM5bNRQSk5BMcj5F04FrOZhyNSnKqUq2CN+JKxKGe2vCommHUNFpH8XTgAgllNOaBVZ0qI + 4RHyLhyL2czDgQ0XRoHUcvn0MOCGhRnNwcqEAUIlSF3twsETBk4275koDxXYEcB+CXkXjsVs5uEY + bLSAxjS+fmPqxlXVYkF9j2ougsfuABw8+sc9V0l1aLNCU+OnkLfhWMpmHg6sZrnTQrDSnYczU5vU + cwfVLpZWKGJtSsQwgjbEYQhv4ZKceQY+Q96FYzGbeTi6xu6pOw7x7AsGjpzrLJak8NXkC7rdcRsm + ArklIBTz4SB/5KL7DHkZjrVs5uFouNtAWe8I7mETgd8QQ/6RJyFGwEMXcXd1ROahuaxjdYAa7Bli + zzPkVThWs5mHI3jfObqlGhwSiIgWVRLDPAEVEt9QYd88ylFOGlJArJFBRzyyOp4hL8Oxls08HFSS + hokcwrkN2dysKlrTHmGSXKAi9l1NEovPHhcC1oZ3p/odz5B34VjMZh4OKDcNBSu8CXB1kOxQ/OuP + YVr+TWhwu/0OwuFswxgzzcBD7adnyLtwLGYzDwecAmhvmhMdK2DGnwbPIDbas1hGYLHyWZ5VT99+ + an/x9e/Sd/Xb9vVvvv4ZmWl8yul3wEKEbHoOj9eQl/FYy2YeD2SsEs4EtDZyM8k2XDV5m1dRqsRJ + wu/WEAccmFFS0VXAcubweIS8DsdKNvNweIG1Kewqpo1FYpo54nk61I5s6wxZ7hKtODG4UXk6s+rU + 4fEW8iYcy9nMw2EY6uct2CkFoeHakNzLjVEM7r8UrkzaXB0kgE8dihrR4OV4pP/0FvIyHGvZzMNh + K5awCUqVb8NdczDZoSPKqnkWNgYH9266H9+OtyyDVQdXx2vI63CsZPMFcAgeexIXx0IxQyP1ZmQ3 + AeW9BDkKjattOLgP4MaFcMChoxxYX0NehmMtm3k4MlJ7IVkU8JljZghK4W6a2Ow9snxDLHQbDuvR + 3qMwxrThEaIVcLyGvAzHWjbzcIiUIh0oCriQpS2zBJDeREARcfw/itxslvPt4Lyhux8RqdRHurNv + IS/DsZbNPByW2ghz+MN4SxQYJIz2AwRSb3jPdqTgtlcHF0Mo2GPk5thR/hryNhxL2czDgSQMs5QR + D6jI92fbMsxq4gXcIPjYsito5QR+MMgrGpiIqF2eeHe8hbwLx2I283Bk00EAg1Ma5oiz6YAHMGc4 + /IUUcZXfVG7F/BdVY7xBEHwz7tDZ8Qh5F47FbObh0IkDXDXcmDF1QLIV11nRcNakca6ZgNpslnOl + Qq0HyhtHCEfwmdXxCHkXjsVs5uHojPhTnOJp0CEeSiS/OUIKVyxebxGPld2zA2cClClZdgjjHzrK + HyHvwrGYzTwcCEqH3uBU4Rqf6DzppizniEO+1TJhvn2UMwHKDkjTFybikYYHxjuvIS/DsZbNPBxF + IfQtEkx5brdO9W41/9LwNmSEs+96APPtBvODO+6QQzx0djxCXoZjLZt5OKgUFoAYFuUIWKUxM2az + RjOU0Rut7KZFgRt0Oo9vJ0P+x+B4hLwLx2I283B0g8x0g2UFw6rJjjMz7/QC1wobQRiEm8we4GDc + hsYsGtPn4HgNeRmOtWzm4aDVJEN1bUzKRpZKKIojA58hmXmt2+2LLka2mCDg2GQ40M/crB4hL8Ox + ls08HHz2EineKqoiiraTLshSMj8GOWrchzbnbVgd+Nrg/TSGn86o772FvAzHWjbzcOC5BZUEy1/k + vpmapnGOObOJaOQLNMDjdoEdYDn+sHIc4/6HVsdryNtwLGUzD4epiEzTF0cBsceo6cpamCVoJNRK + OdFsV3RHxY+2ImO4+DKfgeMR8i4ci9nMwyH1KBUy+YSgMTbW9Ik8L0GNLokc6vibKhhoGlPOlehp + 8DA/ddF9hLwLx2I283BQmBqKuXDcKIlADY/YDWHcwbCgz1yytlcHDqcKGhdipMhWn1kdj5CX4VjL + Zh4Ouk+qOh59smscuQJyCxCu4Esn1Au3DcuHOQFuYkNEd3jFHoHjGfIqHKvZzMOBkc0QzbUJhxum + /fEMRKIE+R5UYqrqZbeEyJNDsPx4nCN3cwiOR8i7cCxmMw8HlALL1+Lp53n4WZtkQzGGRyGvQVTg + dkuINJ2oMVGkZGDz0NnxDHkXjsVs5uFA8xAoUOuRouXKkBKWvWVMemAon2refXdgUyfpkqO+dxCO + 15CX4VjLZh4OhpbhyeAHlPF+8pBBqZMozxNwdG2Z/98ssA+JbhTYsak7Rl14hrwLx2I283CE3n0B + DlOESXSLUONLmM/S6ug1t7JL7FEam1MmNTk8QP3IUf4MeRWO1Wzm4UBzXWALO0yz8Moetg4K+/jI + yM3Q3+u7RznlKuhb6FJikX1GlISK82vIq3CsZjMPRywYBYpOeYSDGzvNUW5HJ4ZhAsTZdd5tP40e + IF1eFBwssmVHVscz5FU4VrOZh0Pj+jSEmwWTaIgpmVKRCDW5tkRpsadNBSWkNbkdjO2KS7Q9Mlj+ + FvIuHIvZzMORMZfF9wleLmzghBAJFmlM23CrwiaNx8jmUY6YsUeol7473cBDq+MR8jIca9nMw2Fb + yxq9HsQQM03yBiy0PDKmgdje8FzfhMMp6iPMuyFezhDUkc3qGfIqHKvZfAEcBalWyY2WfjllPpsV + bkNcqjz9KAwkdruBIwE2Kmw8aJ6ceZU/Q16HYyWbeTgokCiGZbsa5FBKI0U4z/gNGnCQGJzY7QY6 + xYQ0ozz8jztjb4MXyGvIy3CsZfMFcGBoo2nIdk5cNnjmzxR0kmLGvTeji7G5WQ1tNvigfsz6n/FT + wQz3NeRVOFazmYejYEvgukH4omGOppjahMhQHXwrxMAZZd6FYwik8QL8kFc8s1l9yL39S8i7cCxm + Mw/HUFU1hQMdrltHGl/wGoeRSE8Qlq6tuzUrqutU8IcPFxesM9SFZ8jLcKxlMw8HtfXR3dAC1w50 + lBR92mZxuoFFonotuzUrT6fDD51pTiZ/pkjyDHkXjsVs5uHAOSWOfYrnBp4RPQ2rtAaXnVsv8/l1 + tznrOTd4lKMGBO3tDCn0GfIyHGvZzMMxrIUyVV3Fg9z3Qv2QaWYk8hmmbZTFd1mInoUBGYbG7GiW + H3l3PENehmMtm3k4WhmOWxIOT20MovEa9xmtSotaJV7NdVNBiesURZJhjn0QjkfIu3AsZvMFcGQP + cYTuhvZDCd9VhgmYLM+mBDawtL064AihZsWA1bFBZv8IeRmOtWzm4ai9agYzmboJRSjMLxnt4IYV + KShaXAq2L7qRuVkGoOgBx0PDaEhjvYS8C8diNvNwyMCaiKlkHuKK6ghSPa3Spx1SlXhr7hbYeYzT + ZYQ2yFtBnSH2PENehmMtm3k4cJotFTVECNO1OpTFUGGAvi41hBwr8qauscNkCB4JIx604OUZUugz + 5FU4VrOZh6Mgiq8cIkqM2ySqiBmxBbg9qgxiKOM3m69yFGClxpIQZ2GO9CM3q2fIy3CsZTMPB+OA + SYvKHInq0jIaiNBbDhEzKOw7oE7vwuGx+kKEfTgy6zObFY7CLyHvwrGYzTwcwysCLgmTy0NODBEw + zSQasj2FoSWntkc1Q0A2ABY7gCBieGZ1PELehWMxm3k4mOmvzNUgo4QKBgqSQXaE31CJ8Zg/MXSz + uzpCoEEArwdA7JmaVXiEvAzHWjbzcFDao2wFI5f5b/zJaY9DIklsLNJXXfpmCRHCFuo/mnvB0EI8 + 8ip/C3kTjuVs5uHgnKCoBPGQmVle6DRkg6FSUmjXCgqxm6RQEuABSCeFB/85OF5DXoZjLZt5OLzV + zYeKslvDARPZVsqvBSoq1XYYcH7zKPfGckPjsm54mYsjm9VbyKtwrGYzD4cNsEZoxQ5jGyY2OTUq + zXKbKr4RDc+6vbPD249tDw0a5pntkYvuW8ircKxmMw9H4uxGc50mVISfGzPKoZjJY3CDb50vdvMo + Z2GgBgsQY+bmTDfwLeRVOFazmYeDSyg19VohuomKt4pg5lgy/kQFFvUKvTmqOQj4yCdJPXxBzhRJ + 3kJehmMtm3k4inUazVyOKJT2KPPBKeFKCje44eoRy2YJkW/HpYoxdZwc9RkP4LeQl+FYy2Yejsas + Tc3DKaKiM9YZL2eJKHg+gv45Km2bZwfDCUpyfLBV0YM68Qz0z5B34VjMZh4OlwS19AB1RDH4BBKJ + MySxtQxLsyw2a1ZoUvLS1/DqcHQ89O54hrwLx2I283AENA9dKHiQ+obwsIi10BOkyj74DDTPN1cH + vQ44zoiK85Y+4xiBiulryKtwrGYzDwcq09iVSw5yg18EWxeDGL1604dcN/TaTTiGSiijI1TY0e05 + Yv7E+/415FU4VrOZh6PQ5qBS2HRuNqGFoSX3XOxVMGiG4OM2p5/4dgElK6cs/+uMFuJbyMtwrGUz + D0dDDh+dQkQoBZIXjM+milwoTER2L+RDd98djLnBetNjzIP/OXKUP0NehWM1m3k4jG6U+NjdOTs6 + 68R0bAkQYIenVpDt2X2Vx2H9C+3UaefEkW4gTOLXkJfhWMtmHg6NNs+oYhguVXgCsVjY6Ed3gqkP + 0+X+6nCWBzmDmsdKiMDxEvI2HEvZzMOBllg0fH8KfZG5cgbMoyyw3qhXoQKuNhnsww4Eyjmj5Yhj + HbroPkNehmMtm3k4eHRA5qEsAg0U8htMdmTAhsSVF93EtHmU0+WgTIx3DrxslMVPnB1vIW/CsZzN + F8AhbbaxIdnKUYGxSvWO2VnBtL9JDHhsblbcDLHIHqPM6E3HI6OabyHvwrGYzTwc9MMVVot0nPDZ + oNgOB467UEYqQSDJt1uz4tvxePmo6CIWcwqO15CX4VjLZh6OxsvAMUAOITdxs820zPtQs6KAhUPQ + 7kWXa/roytJwPCaC8RbyKhyr2czDURNzzDHjnMKJbpivCTCsEuQSWlC4emxWdGkFSqdx4oL9hv7e + kbPjGfIqHKvZzMPBFMzYnFpFxQplHZzq8JJ3iSJWr/h4bM530JT1lgs0w2PS6yPN2beQV+FYzWYe + DqRyQ8goLxTUejzXoN5Fz/gNdekz3IW9mhU+5REkcLbF/ckdeZW/hbwLx2I283DIoQ2qekMMY3hH + wDJwmalACZFaFOe2Vwd9QGSIJQV7+rNHNitKLi8h78KxmM08HNFjUB5yaEVyhhukESL+BCbRpqVf + uyuJD8mKl6AxnrkIfNcOwfEa8jIca9nMw6EdA/4uSmRB0VznDYL6ReLYyBJBV0pYu5uVw8KRAUcc + 0LGjPwPHI+RdOBazmYeD+yzjYjShcmGoP5aCCDikWoiJHOz8YxuOqBgEl3DfKLMfguM15GU41rKZ + h8Mr3npYBGbODTaWMuaYURYzyI7l5sImkyQYRBDDEL6FuiCPMNjfQt6FYzGbeTgqLzRFxQo9mNya + wyIbpTdfG1ZN3Yi2DQeTPEFBN3CoIR6hLjDZ9hryLhyL2czDgeawqmMwkKMWD+umHSY3MuUQkhJd + 7JYQPxTx8ahH9pIBwSOb1TPkVThWs5mHw5WMTKtMJrBdUc2whXZgZ8MavcC+O/0ULDQcXgoIV4wr + 9Bk4HiHvwrGYzTwcRiPyna3jmMBtCOk9FHwYHwsc4x9+KptHOYXcYS0MQffUQAG4voa8CsdqNvNw + ZMgKfHr0FYY1HXNPUEgspA8GbuBzyr0xfwy4UMIfN2k4ptT+DqyO95AX4Xj/q+ezmYfDJAFlPVQu + P9Vok2gVYbRha6gBASW3J005EuB2QHmEyxsFmPkEvku/b1//5uu/aZ9+/OsfWvqx/e2n9sOvpOWn + e4a8DMdaNvNwIAdqM5OtlG89ozf4qQQNiwGDIFVEV3s3K89s23DrRGyGi9WRQeb3kFfhWM1mHg4J + t4pBzRa5VyHSg3GyF0gjJgacRUSUb+vswBkE9SRaNkhT0p09cdF9D3kTjuVs5uEYSossBxgMaH1Q + aue3TyUUElAbQ/FB7tmpjG+HG8iwBh2HyIle+XvIy3CsZfMFcFBWh6mOiEsUTJQze0OtPSpXasDY + 2m3C4VlsjiOJtyXzzCeasxwej5A34VjOZh4OCntKMEReDHqtoMHHa8mhMBYBBhmMvc2K4X76HTgM + U4ARR8ZtgOMR8i4ci9l8ARwNkQUKuIb+OL/CxWYupb1hGzjYoZv9DoTZ8JuNyJFgbUPV6sTN6i3k + VThWs5mHgxWBCZqkKQ+/oDgeB1QOi0V2ITRGPPbmyoHDoHgSWIIUro4Y1b2HvAvHYjbzcPAUh4/G + I61kiiJMxjB2k2CUQEFsKuS9fsf4dp6x8tHtUPhrnlkdj5CX4VjLZh4OeOqMnyGkiy5Jt6iFQnxj + iVRLWbyFzbnyAQfq0lrx38yWn4LjNeRlONaymYeDV4bGI7Bh45FZDoVf44TdBgIltVP/2z3KOTaY + xKXdwRF85hmINNZryLtwLGbzBXBURUdWDdlmhmchkGjmMZDBEBVRJfTxN29WjEWPdiPPfuQdTvTK + WXCPkHfhWMzmC+CgNtIwiHCdjUoLJtMM9Qw8BOEtUP3bI/YMyVuUCqCvG5rARxyZ30PehWMxm3k4 + sP5t7E6+JirgNqQO3UomCCzMauKosicRM74d7tgU17lfwX87c5Q/Ql6GYy2beTjggTI6wFHOaaGk + KonOhOFfIYLR8ZLYhoNeqjPDGITppyNFEk9N8iXkXTgWs5mHA/IOwnud8i3TsgzaqCY7htkVDXDO + dr3XfmJ1BMaO2XHHwaTPFEmeIS/DsZbNPBxVIiKWCrWRpEOo+PtBEkUrxnLtRXNwr1cOHJEJUNgw + 8A2oSJ7ZrB4h78KxmM08HC00SFC8yaEKjkIV04EU/WgGwpdGHX+3hIjkjIYRihDUqF0dgeMZ8ioc + q9nMw2FQhGGutWrFdSpDRhzzmvQEIQ1iILhp40jtdRgTMBONVCgvjyNwPENehWM1m3k4mBnwdE7h + 0LJhUTFBbw+FfEyBh8x02BRuRbCC5wYiStR0weVMCfEZ8iocq9nMw2EYt2FOLGlv6P8x4moEMIy9 + 3iA1vUmZHnAw/eQxTQ2H3G3eQ16GYy2beTiYJoAISnuWqlJtmXO3Y3UKHlyIUt2+WUEXYtdTKL5h + q3lodTxC3oVjMZt5OBxrgwl8mjpjLAYFhhSGA4oyVTlGy/co00POhZFPGo104bkjnjg73kLehGM5 + my+AAxuC7vuHemRsHv8ItMUkvMSK4FjUe+M2wAER3vD2GBM36khF9y3kXTgWs5mHgyZgH5aaMtOX + Hc/ACvWQOSjcImD3lM2aFd9uqAZAUXFoVxwpIb6FvAzHWjbzcMQ+LIVQW8jMllsmNZF9a6lThmUC + itmbvYruYEszSYB+EsLGR4zqWHCPkHfhWMxmHg7hExx2laPsiQIi3YTosTetKlZEj/zmq5xvh6em + GZKXiPAdOjseIS/DsZbNPBzsTBm+NBMeQVZf6MxCneYZYrmecqzsnh0SAyPODQurER2HI0f5M+RV + OFazmYfDBxeRPcTpiZ45lpq8QRDVxbo8pipRcd3arOCPAAZUU27Nw2X8ABzvIS/C8f5Xz2czD4dM + zlJB7ILL6HBDgxFKr7ZwgIxZ8LDXnB2SJEiO4kdDKpQT5xP4Rcr0e8ircKxmMw9H5jcXLi6FDMyf + Iv8ng4FcH4bwEQwfsQ0H008RsQKUkiHFnIHjEfIuHIvZzMPBlBMFPhqClt09IWfFkA2VssykJq/z + tHd2QOgZTOyhD8OedcSv/D3kVThWs5mHw2LbgbQ0zFBcHHEORNU6Z/zLi+CFbjZdNce308jC4kdI + M9CeKJK8h7wMx1o283BwwFIfYQotCUTYh0JoxPYUmgGDGRYV182jHFZPHOPpTFThnnNks3qGvAvH + YjbzcBicVJBfFyXkLumPa2uRQ6wmNY9Wt9m9WXEdgNkI2QA1RH/m7HiGvArHajbzcLjkREW9SiKk + xGkBC7ExAYWFPDj11ve6gaO+5OgFjvDo7514Br6HvAzHWjbzcDRWBAo9iCEGZHocZ4ZmVrBAtsIx + AnmSzc0Ke0je+X+khJoTz0DgeIS8C8diNvNwiIKdg7alIQ1aK9QRYVLnmlUZ8oiq75UQP74dGn60 + FTVE5zNH+TPkbTiWspmHg6YTT3AuuR4vLmdRNY5cTKlqYCmP/sLeQMGAg5XBgCD1e1rAR47yZ8jL + cKxlMw8HaiTN5nGVyqj14KGSmPXgMKlB8Bipe5OzH3CM/ixPG1TBjsHxEvI2HEvZzMNRMGscBF1H + 7ZALVoB0lW0tlHghDta6N98x4IC6MIhqyJgd26xeQ16GYy2beTgQrpKjyI4qiUoqhyxQ/uaW9eHm + mDe7gcAx5PYDQogfhlxnNqtHyLtwLGYzDwdWprz64M9W9ikFN5STnBsVytMIVtLz2L1ZjQHmMV7J + ++PI5OxA+DXkZTjWspmHgyFKX2COwL2B3oP4Yc3MLw9Rs/EZ0x51gdiGtUapBAd5qQ69Ox4h78Kx + mM08HFoWSXMDmVBVnMRsCMpuTsw1c74zqLYPB4XcoVYweNOn4HgNeRmOtWzm4QiiS8FMgUaAT/NA + 4Gnu0IQefLvMv7XHYGd10ElFnA21Nw6QQ8/AR8i7cCxmMw+HoDFepGOigMpu5tmhuGohLmbgsDOF + szeMhr47MYbEDqIkjOceOcqfIa/CsZrNPBxotlLBNUWi5RKpVWE2a3ujcjUkJUPf07P6MOLiDBqO + Kmy7Z7qBz5B34VjMZh6OoXuBrHnRVlZkW0VCjxKfoaiYDWQGardmhe3T0PtmLmJIvZ9ZHY+Ql+FY + y2YeDt9aR/Sb+Rq8G+nLdhVgLfCQHjoxCIxtXnQDBBXek2jQDEXgQ3C8hrwMx1o283AojRkaQq3I + rRvHLlWgk6iGjzz22FTaN1fHaJ4NIYdxb5bixLhNeAt5E47lbObhYKCjuWa7q64HeubWVEaPLZZQ + OQZcVfZWBwkM+0E07ONo0J5YHW8hL8Oxls08HEhR2oZqLm6OvlrkWxFlo3oFfwFtjNjqJhxyeN4N + S2akW88c5cztvIa8CsdqNvNwmOwMlyk8sVEWQ3I6WejTFPy45cpKF28bDpyYh38HDuhHNEkYe+Cp + /1PIy3CsZTMPB5rflEWsHXaB1NOTcAwqOQbT6AWOkvs2HHQ6eNyjQHtGBGPA8RryMhxr2czDwatA + adN4ePTi0bCiB5WZ7h9zrhQW9SbtjUbjMACm1Qt94YjJ6RBdfg15F47FbObhYBCtoC2tC00oGMEo + 41fKr8Jzy/o4QHZXByK6VNjx7VT0fY8c5XQqX0LehWMxm3k46ArRZGqRmy4OG7LSmBgaPi0wao6E + 4WavXEsL0ZRWOXpySp6YfmJ1PEJehmMtm3k4UtS5UzWElSvhH6IMYzWWmgx4iM408+7ZMZi5SH6j + L4Zn+Zl3xzPkVThWs5mHY0CA4VOpUA4RcjH0JniDKPjTxdddtbeAq7Ol2y+gIkIAObNZPUNeh2Ml + m3k4KIfIUKGk5Y5cElQeSuyxaiagMiQQvfvu+JCmHNJMdGiPKPb8EeGfQl6HYyWbeThocTD2lNBu + TUnElGzKyOdGqyIjH/gNbR7lwOGZtEFADgWBE/bxH3C8hLwNx1I283AgR8LYnmKjaow9BZMTc2M+ + OgTgsqZdvg8Hao5DdAY98SOU6QHHS8jbcCxlMw+HbLWk4qHqDmdYnuZVMd/BGhnTx/TytuHACA/f + J8ZnsWU+ctGl1/sS8jIca9nMwzHEWjMDHh+THXhrBqTYK1fc2DBLo2G+CQckK3RJGNNEIVmd2aye + Ia/CsZrNPByWGUDsAnGfQaqHJgdUEjcqiHjdDPrbZnOWxhPTVY79b+yHR/odbyGvwrGazRfAQR8Q + s3gtq5OKXiqVqsZoAS1B7LjQi9lcHcP3bgzaCGKrM5vVM+RVOFazmYeDr49TXclo3WJRgOw3/J6G + 2yz1Q6pYaXPchgnNoYMYEBg7JBGDosYj5F04FrOZh4OxGiOHh0e1BSX8j3+mEYXGGMJWcvvs4Cji + aQmVBG2/Q/2OZ8ircKxmMw+HYFCsU6hKqFopzg2nsyjM4HCaVM0hvLlZUY10KL3hGYFhyxGOrn6G + vArHajbzcKC3ACKZUiXeKZzdjGEwGpgcLRA9BHV34ZBBQ6PDigsa9pmzAwekl5B34VjMZh6O0nBi + xngrUgTXTPZ3fMvxdqC6mxlSU5uUadRah3E87UDMA49I4qPg8Ah5F47FbObhkDAOUTauDTljdBF5 + ljNdkCugGJOwod1dHYg6UKunO4v45ZkC+1A4+SnkXTgWs5mHo9PxQ35dDePAEDPGDi1C4aTmThuK + TWEXDsN8OiZ19DzYDY+8ytHUeAl5F47FbObh4GbFkCw+Q6P0AHV6qMUgSglvN/Ie7JslRFxzRq3F + aYy3hT5ylL+FvAnHcjbzcGjuUbCamSdnjoUiVYQJygBOwHEoMe6xeXaMBBj+ZJ9S+N4dEVB6C3kb + jqVs5uHwpfHqwzLCWXRhGAkMmvEbwbpgBCqVzV453y7gn4M9AXJvR2wcw1vIy3CsZTMPB2Rpanyi + oFoPHbQwE0N3linmiFuEYsRj7+wwQ8rxg/GGDcKZbuBbyKtwrGYzD4cY47LI4iukYbxInfFA7DZ8 + hvLWbfGbvXIEmaSklYVhDt3AI83Zt5B34VjMZh4O3H+ZJC8pIfpdeWdY7JNLw8ERQXwkrbbhoG3C + gcTcIYMkR9pPDOC+hrwLx2I283Dg3EiHHC1o2hsUS7j0MoGGbujQG2PaZpMyzbfDGHtcrxDEPyJr + zCzKI+RlONaymYcDf6eMIUGlM55qtAViiUQbH6kS7lqUe3fPDp6AMKzGE4ZR5hPvjgHHS8jbcCxl + Mw+HwpIg8fCQ1K7wL9OO/+2q6WK81CH77MLBwwkd4uERwtzmGTgeIe/CsZjNPByURRRjaLj9ctyi + lx5HExWNEngLgof09mZFk9wNR1vNWPSREiJThq8hL8Oxls08HHQiMP+lGYFY4RAak4z2S+SsrPOl + F7Wp2MO3456GnybVdYq6h1bHa8jLcKxlMw8Hx4NBCbF7vOMtn40pf4lcLNPmGTb7rggGcOCUyrrA + exuLm0NwvIa8DMdaNvNwYI6tmJZN6KpSXELkGAdgiFHDoIv5QLfZ7xhwUEDk7Bhez4feHY+Qt+FY + ymYejsZ4vxWdmZtsW8yoLyg4og2f3VxS7/tHuY2RjY+XB+WXU6vjJeRtOJaymYcDA+ZcekQJwyWq + h9BwOrPTWBQMOYyuNjm6rA4oiPyXCiyOU5vVa8jLcKxlMw8HwzYqwRnp+DdCJ1GofsN5Y9gj4ZgN + JW77oovtBP6/uHu5cAqO15CX4VjLZh4OSQuW10aqTJTnVoLutJ+AIvJs68gw7MJBlxxfZxoe6Psd + aT8N7vVLyLtwLGYzD4fjZgvDLcOX5mWO0w3mQ3no3sI1SDJsNmfR2R9aJMi8W+5XR+bK30LehWMx + m3k4cIug+g1hU2tGBFtDHZRlAmGaQ6WjxLe5Orji8pzhtosA6RFnNBgWj5BX4VjNZh4OpHqQeoOO + y8ZkefjhxZxR9rRQGniHyN3VwapAWkzibEOZ5Ew38BnyLhyL2XwBHPBIcoOUS9MD4oJjHJC+LJdc + SD7MGuw2Z8dNBC6O5/lE/eXIRfcZ8i4ci9nMw8FYJpwbrMsazz/JlDmVcCYJmKXlADbbFV2PghIq + CZ6JOqowZ+B4hLwLx2I283DQwW4ZEu0QGEM8smWrpG681RFPylhb754d8NchOFIC4+g4BMcj5F04 + FrOZh0MnyeQZYiS4aEIzZ76V6RvuVgxu4lCgdoskSLN9zHcwm08R8cjqeIa8CsdqNvNwUKhiKhCl + ULxsFD2nho1HKbieMluObcGm+J7BaBYmBKIkkiLJmQL7M+RdOBazmYejUMHAeUZ6eoJ4/6ItVrVE + +606Hgx+l4Vo6KHAGKIfiFCoO9N+eoa8C8diNvNwNEYCEXyjSop7Y0D4QiWIagiTDOtZhgE2z44A + rwduD46zQ2rnzGb1CHkXjsVs5uGg+M239zFbJKyo7iLC3qiW+FFpxzF7++xg7okpNEYTWCNneuVj + lOqnkJfhWMtmHg5kPLER6FgLhYqgQJEBYgA+2Q0FcJ+3u4Fcl/XYCuXY+c4U2J8hr8Kxms08HIlZ + uzHWrxkwV8xswlngnlsU+xXM3bK7OijlUiRhApRhG3OGZ/UMeReOxWzm4UCNW+rCw+9jZLYFDl/j + WBUM/0NtT5tz5ZTDEJ2h8AYhWx4i9jxD3oVjMZt5OCilB4OYFUKeaKVT29X0PRrUHuQquf3uUqZH + 4RDuemRuFkXdI0f5M+RtOJaymYeDpmzguYFRoGfen50qQkcU1Smm0ThWdpuz9GQFCw4D+aHOdAaO + R8i7cCxmMw8Hrz6MAlWlzge/gAtVUFTYi8ULCudZu8lCBATIdJrREWl5aJ6A4y3kTTiWs5mHQ5Qy + 2NK58+kLros+Y3LDXhWRvjUMeey9O6iP4FjOUU4BkQrMETieIa/CsZrNPBwKy0ZExaSDX8XVqmI1 + xDHOlCs3Xob+N5+BUOkQKqBEEgM6GEe6gW8hb8KxnM08HKHwwkBIrHB09Nrx5OIl6GFP+2HnGDdH + NdE5oVoFS4h1R0f+xOp4C3kVjtVs5uGADWpgrTPhn+pwaGqQdauwQz1piO3swqGYUddjNgFpLHkG + jmfIq3CsZjMPR3MZmi7l74xPhKGgyBxtCTRAMps9/gJ7ZwcCGGji0wwMDPKIQ3A8Qt6FYzGbL4BD + U1Z3HUmrAPVQNOpWQ72HyiKcWqc2eVZDdA8JDNodgzd9pGb1FvIqHKvZzMMB5aaZyoys70zPjqaT + DIW/diiSVGR1N1cH4WnOfgwq4F105Ox4hrwKx2o283Dgo4nIWx9PjYSDIJ1zWyXTs0jx47DSd1cH + 5CpBEwtplXFpPwTHa8jLcKxlMw9H6J1blWZsVmfml4cKBpOV8KaHbAVaxNurY1TBYC3A0HVHWIhQ + 6F5DXoZjLZt5OJAIrYiutFYiPqeYnPZGQVzn6vk/MLzdhIOr+sccDz5rHEZHVscz5FU4VrOZhyND + O8TjiZo6ZB4aH2NyNuEKjNA0Ax/bNyvHk3yIvWnHpMeZzeoZ8i4ci9nMw2GgLLD6mctkFA3ziEHz + QURpOJ7yGePuZkVZATuVwQhFe+hIzYpr+WvIy3CsZTMPR6URyB7FEYVnPI1y6Ju0A/GWQ7QHB49t + OKDSOaBQSG2cmQ1k83sNeReOxWzm4ehIDjPMQbNOytBzg3fleAb6kuFfIcW3e3b4QAcF7dshFXNo + s3qEvAvHYjbzcFDNLajgDMEpyrj4cI3aUsslINoKM3FzvsNRWRdqaCEO/8Az745nyKtwrGYzD0c3 + rTfTNB0PKrjG0/+g6FcVSwNXzbo5UDA6v/DgGVDHYUodmX56C3kVjtVs5uGoLA4Ng93hv4XtUMIk + gtIVV12eIQFLlc3NCpNTGNksjOE7e+ai+wx5GY61bObhwMUxuW4EljaoIcbgQqE6whAzXMRxRd2H + I6CzMWR2DlEXWB3hJeRtOJaymYcDmgIVqlQhg6rOYD/lDGq7NTKWRm03bc530OrFmoVn4LiSuEOb + 1SPkXTgWs5mHw/K2YE6zonlREb0YBsBJm44BcK+VScHd1UECaL2xzCQVyiOv8oHwTyGvw7GSzTwc + fHrKrRj/OtNdahIEOsMxSOnSVrV1k8E+iBCIwkLQRcohHumVv4W8DMdaNvNwyJpR0wkBFXwqI2PG + huIIs2lIWiHF53YvusDBEBVQ4FZ3htgz4HgJeRuOpWzm4aARSAGRSy4HODX1xn2q0TuXQ4l9CE3v + blZIszmNnCR8En1odTxC3oVjMZt5OJD+4lKrhmJrDRQPTROYePREaQibur7bK0ctS4xWB41yZqDO + nB2PkHfhWMxmHo4qK1SSBJ8nFbRVcQ9sbZhBVQxvQlbbZwcy7rRnWRmDNX0IjteQl+FYy2YejkLP + ic4TPAUTOUZ6aA49RFyZnWLmv2+/OyLzn7SxMOvkhDoDxyPkXTgWs5mHo2fXE7crDm4+P8420Nl5 + gRjPYdLx6Ns7OyJ+hNhjj5ECrlhHuoFvIW/CsZzNPBzwc0tBAGNccXXxWGtCjEoo1UrR8HLc7JUz + +EtxHbI0PnVIK55YHW8h78KxmM08HIjs4QKcfOqem1Sntu4azHVRKi6nQm4+A4crGiOaiOIzqHLG + TuUt5GU41rKZh4NrfO7DbKzKAvEfpxuUYWxwSTLUrOrmRTdKJvxZG9AgKJQcKSG+hbwLx2I283Ag + 6xa5z8JMo8YumJ5FkqT6CIu98L/sZkWX9wtT0owFQi/FQ/XMZvUIeReOxWzm4eC4GL5Cnn6HoJ6b + eHXQi4L5hnQMV97Niy5emvDeaDhCJfHmzFH+DHkVjtVs5uFwNrXRyWayVRbU8DtT+YErBC0pjOvy + 5kWXMXXOIWokKA+hE3NkdTxD3oVjMZt5ONBBhHqjqFJp7Eg7F90sJbUq3ArQ0q2bs4GI7Y9evMU4 + h5m0I6/yt5B34VjMZh6O7gNEq45BAaowUNZLi1GPUU2TDYzdzX4HlUPm1EH7Q/v2zFH+DHkXjsVs + 5uFIBlMhQYPNFKTweY4zutnoEDLriudG2xxkHnPxnqspabAjHmGSvIW8C8diNvNwaB5+glFZkwLF + peppOXFw+OJYJrm0TZ4V344X5SiQoAl0RnzvLeRlONaymYeDUUqMuBizoXQFTZojPA8BXcRieKoj + H7NZJEHl7cOsPH7MQB05yp8hr8Kxms08HIwQoN0yOuMOX00GNyMW8rilGYuXMjKJu3CgWeZpn+AS + Qp39DByPkHfhWMxmHo7cMr+9Y6YfEQyuV9KgN40bc5Jo+DA3uAsHtXUOchiITKSdOcqRgngJeReO + xWzm4egBazokkxCbDjiXd9tpzfKYRojKUrjaXh2jtYUkEAgj1XtmdTxCXoZjLZt5OCye8cxkwt1E + qyerMVMQCgJjqH8XKia7RRJMbdDXJxzjgWfkxTjTXkNehmMtm3k4jBvOENQLjYspex6DtrJMwMRD + Jdm+WWkKLqMdP9QpzyiFAsdryMtwrGXzBXD4imejjciC8hjEaYMrr6jcrZBTQmput9+hcecZvGmu + VupMN5BRrdeQd+FYzOYL4Ego5/L7yzAgk0++p1o6E1GJOXBA2mWS8O04wRkY4cpMy/zM2fEIeRmO + tWzm4UA0MlFSii2maLulmAHFD4UruoQYnfpNBjujBChgIIjPNPMhntVbyLtwLGYzD0dCybO67tDS + LRHFCihXzBLQBEk1e6u3NytLURI1K0bRUEQ8szoeIS/DsZbNPBx4oUVMsjEowIMmO3i6AaNNXbH/ + VbWJ7YvucEWjQ0sBBqmNQ3C8hrwMx1o283Dw1EAQH8tZzguNiyaGdR6+LnJj6OHzb+0+A6nk0ixH + 1x1HkGNwvIS8DcdSNvNwMEwOiYSiUsGi3HUGyhm7GSYFPDtUbNuvcnrxPGvwkxq87DOr4xHyLhyL + 2czDgc2TRaqVSSeo0xnJHryx0Uu3qIwxzrfNs8IaLTBtOBQ1ThF7niGvwrGazTwcEKE4JoKC8Ua/ + A+VWNhcEYoYsNPNQbZPBjh6sgXHuqA/TKz9z0X2GvA7HSjbzcChG+qFs2oJnueJ6C9fKjfZsw0cQ + YbV9OAKio5FZZuX9keknEH4NeRmOtWy+AA7E3Vxn+iL5Av3QYOhYfTIwryBg2b7Ls+LbURdGRElR + FTtz0X2GvA3HUjbzcCTUQC1666245mG6IdaDHuLQ7tGjQ7j77hguB4TkbUlP5Qwp9BnyKhyr2czD + wWHhMdJEDx/CNCKInLoaPqLmjgWL0+2+O4iH9jrx6QmeUXujr/ga8jIca9nMwwFRAS18lFplRb8K + PRLsgKkxDSklm/Kuuw3fDv4hfncWsTd1RHzvLeRlONaymYcDjVC0jCPeQki3IvJdk6GOCznUC3Qx + pd18Bo4imB5yJAwchiPzHTxfXkPehWMxm3k4FHbledDdmh4PBIqIiH8jUIlUKHWNXU0SvLEpFyOs + AjXUnpkNfAt5F47FbObhqLg3ChpQuAY2XhpMTaOchA8tAwamJ7V9doACY4FDhIrptiOv8gHsTyEv + w7GWzTwcHOUcEa0jz4OjZmWUjwoWei6xCB4hblNH94MZTzwEzIbTwhk4HiHvwrGYzTwcWMcPbm4U + HOeasTStGg5QiLIb+Ilxe9zGcisIyOVpVNjPSFPGZ8i7cCxmMw/H8AFi1Ab7FJjTDJMHCcUHRHgd + tiTLLmUaLOBxMZcPIOEMz+oZ8jIca9nMw8EjECIPLw8sZntH3VgwiiEgkQhlURnbZZLYCD0X3gIj + Bcc2q0fIu3AsZjMPh5QQDplvxbeR2y7MBdrbFk5iZ6yZ03yXoztcciytDjw14W4dOTueIa/CsZrN + PByNppNQGXv3wlSaQuUtiA//DiFoQflN1QXsbPE5oBSJ6bs8xGB/hrwMx1o283Co1LtnBq2Wj0G0 + CAsOS8fIyduGrfVu+8lDvWBcgcIVnvRnLrrPkFfhWM1mHg68Z9DytLDraGaj04P3L54qqMWMbiCK + b5uvcljxNLGYtLHcoM9sVs+Qd+FYzGYeDjTd/Jg7o5xbYiqof4vI1CauAtxNGXjdhIPJKhqByLYy + ySzPNGefIa/CsZrNPBysBzYlZjkYK0gSMEqGW8KrjdlZ0+PufAe3KkicHxMFiFkfOcqfIS/DsZbN + PBwdWRiOCJQvcocKxQHioOmiG6owUcaZa3N10Accuq2GTjkN3yNwPENehWM1m3k4+PaBw7vRyR7O + dMz35wJrQbBOcOTaZpIwT0XFWIwKCdLpZ+B4hLwLx2I283Aw5ZQawnje0/fQqIzRox0VxYY9ELPH + uxddeoDRWWhWQ5XkTHP2GfIyHGvZfAEcEN0QOaqsEVortowLkGk43XTHPhN2qQuRmFjODs4bNZgz + q+MR8i4ci9nMw4EnnVZcaWuUmYZs6FXgq+l7bDbTwds+OyxHECY5PPz1GR3dCMIvIW/DsZTNPBy0 + NpplPlOOTsfQXa/QrVpiOBAXRyg5u0f5cNTkrkuD4pAGe/ww6fyXkHfhWMxmHg7mnFgOw6mjuZ5F + SzjJY3YT8BqiaKV2NUlGlQRKKLcEzvMz/Y5nyMtwrGUzDwfKh3yuVIZVFg9y6LQMgjNWUCWCCUHu + znewtpnt4Ok/KLpnaG/PkHfhWMxmHg4KuFTUS+y0ZKErMKfEyD9m2RFrx1L2N6uIXI8Ygj3MPx2C + 4xHyLhyL2czDIWCsJ45yLrzYZMs8hD27QAIDyxuVN3vlEcVLqlVGY9jCvfnEZvUe8iIc73/1/D1x + Hg7mixUCGBwUkBEtFRLmAzNPQIbRFE4FezerkQCKJBzkqO/xiplP4Lv0+/b1b77+m/bpx7/+oaUf + 299+aj/8SjIq9xbyMhxr2czDwXPZCVxNmVJSGjErbJlxkGe0XDa0StJe+ykKyVuc/vsw92L9HYHj + GfIqHKvZzMMhaoOiENidmPKgtotbBCYFlPuy9hkzx62LLnBwHOHfCBNnqM6cgeMR8i4ci9nMwxER + wKgK+w7IN9lCXmd0LFXFOzBTPCnbqwM9Yx5u1CUHefAMHI+Qd+FYzGYeDsXkeqxVdLgLjNowSSAs + LSiJyyazlW737OB1CRkUli67/pHmLAvuEfIyHGvZzMPhhls8Mq2weTz6JLLnjFKetXjd8BbMe7S3 + 8e04/GiVc7Ni4ubQ6ngNeRmOtWzm4fjwxWZygI8vEGAIpXebQ0jsLkj5bEpTAgdaobw7MOKC2Xii + 3/Ee8i4ci9nMw4EpXQnYEmQagnZI41t8Zy1Pwd61KXp7ddBOGSWSgEjMkYrugOM15GU41rKZhyPp + mjG8pOmIWA8StdicMvLRTTQMLTHjvHmzogjJuA0dD0ow8sy74xnyKhyr2czDMezPlKiptNBiw2NI + JtzqmGvmsuVi3+uVw/yV1GCGVCg3hXhms3qGvAvHYjbzcAgEMDAnrxWCFRIxTClpfOtMpdKOR1Da + K7ADx+gECnzRhpD1mXfHM+RdOBaz+QI4GmqtaFkXiKEd6RCBvBWFXYwdMRRUZXd1sA3ylMW+A3qE + OUF7YxjoEfIqHKvZzMMRTa11eNrAMMChgDpJL1h6pAjLGYx2X+Xo7mkKxQjjM+N2gsEOHI+Qd+FY + zGYeDiYyYUgX7lI4ccUxv4wpGkp8qMy1QlVx8yhn9/ugqmgU8Y9oISIn+wh5FY7VbObhME15CWE9 + IZ5LLRGFbiZtODVYKTZSxNqGg2EC9Bbth4X8kWcg9iwvIS/DsZbNPBwW9xSKI7hD4JONCJ/Mw1il + WRVUGU5H23BACaWOiBklL/9DcLyGvAzHWjbzcNDrQIIENWsGoPA9SaVr7DQbYNis8JLYhmMMqdP+ + RYNdnTnKoQi9hLwMx1o283DA41LcpxRaSZgOaZ/GcjG42+BW4PGQ2IXDDUo2JRKDn8qh1fEIeReO + xWzm4Si+mPHrhrMmZaqcFePLuJsOe4IuIV1twsFQIPLrEeqpZHToyGb1DHkVjtVs5uEYIrp8L8PD + 3OA7pAwu2QrtBehMDNCWvYECOD3ccenDM26I4NuZo/wZ8iocq9nMw1FwS8YeMDXk3Qr+gQ0lRIxP + amR8M4Xts4MR0GEm5QMv83jm7HiGvAvHYjZfAAd6Cz13bLfCePAiwtARCC2wF2RjQH93s+IMH+8/ + 5N7YsE4IKA0S3WvIy3CsZTMPB2pvhYe4QLGVU5w9xUj8HWRyqTBxbnfhYIAKT01emjhoHZGIGQyw + 15BX4VjNZh6OpHFRsUmHXpjqCL3Dv5G0BWmYY3Sk91iI49vRkw2B16WEwXfkKH+GvAzHWjbzcEA1 + HIQ3xjnQ1eHp0bnnFm1DdwjjI4y4ebOK7PQKlhXMX3rcZ+B4hLwLx2I283BYeAupjasUXvEFKDCN + SBldynGo57i9WTGbTnl9aLfKeOaiGx8hL8Oxls08HKhTjDVBu45xAopLRVcHyYAiO/1ytBi2Vwea + vLi1sEQA5dDqeA15GY61bObhCOgeVsxNEZmmo11F4pcZIiLiYkz6+Lo3/cTZgfsJ5eGh5YAw7Bk4 + HiHvwrGYzTwcsBWCRQ2xNqzQfGJYtqOKT5GX7SrnXZ4VbxpQ4IqAzylO2SfgeAt5E47lbObhKAoX + FSY1mexXOBVonoXQ3mKOTKil7jY3KxLgvQF5gScM5eFDcLyGvAzHWjbzcHQFSaHgp4lGLAjUKjnT + O2vDCkihqe2dHdQmIZBQHuF/M1VwBo5HyLtwLGYzD4dH0hjBHoZlGU+qVNq5lmbWCERnpFzzZr8D + OCDII2CG49CxzeoR8jIca9nMw6HHDAHmshBpRbGBK642SPjQjcLwBiva3dXBcAeEiFG/N0dUF5DT + eIS8C8diNvNwILPeOMc5u3PqCLFTbA8SMSXGMlznN3objhCR2OEwDxDrzmxWFCN/CnkZjrVs5uFQ + sEgsBSXeadGjqYstAQJKHi1XZESYNNqFAzUrfp/xfYL1dqTAPuxrX0LehWMxm3k40AQaVrPeFTTA + sOISCLNhjVaGcMz4N/bh0FEPzwsUsw7drNAd/SnkbTiWspmHA3YuJaWEHkxgQajIsU7jDh6JoXHe + /T4c8D4YjYaniz7Wmc0KgsVPIS/DsZbNPBxQyyuEqu4bjw5UJHknSJNdN+guwGLffJWzs/AAhPOr + qNufmQ18C3kZjrVs5uHoIiteBpQxUofbgzV2xQS4YWTmIo2QzYou3w6e2jAw+qCTHFodryEvw7GW + zTwclA4LFyqGbPAsV3hiK2ZjHPYExuXeyvZFlz2PItig6Ap5DI6XkLfhWMpmHo6Sh5RnhWqO6RMu + Q6mg5WIgkzCPRitqT5OERwKSlBQkLdZFnB5nVscj5F04FrOZhwMREqw7OMo7EkrIS/dENTd1S6uW + wf/dyVn8nhyeaLwDLUX2QyXER8i7cCxmMw9HhQbKLTSJgksdc5qjyj5mlTKdKGP65vQTcAwBKNSs + jOOfz6yOR8jLcKxl8wVwCLzj4bopRsqRPmQoEB1ETGf5ex31xO0SIsVDDg08jDRvtzNwPELehWMx + m3k43CAeWluZEVSudZoeFr/ynNHi440uN+fKMZMa1arhbDMM0g7B8RryMhxr2czDgeBe5rDtPTsE + b+lJcMGtkWcWr0OMSfcsgDnKMajDkplnh6BYfAiO15CX4VjLZh6O0gqTgewkPMQbh7mD4WPxZ5Zd + 6x7qdoEdAYwAQ4i67iHVBXSYXkPehWMxm3k4EvowhdnZgGxrsy12CAy9QBGlU06xd/tVjowkloEM + pFGzOlRCfIS8DMdaNvNwMCQmaDghvsD8coT/xhh9YPa/YQnF/Wr3GcjVaoiIwCKBwXAGjmfIq3Cs + ZjMPR20aK1MWBiye0nOFa4CYleJiSicPkbbNii4JDHdnpm2oWx3h6HJBew15G46lbObhwEwFdTHT + UL/IbliYYaf58XALGBagHrMNB/KBo+EBwPHMq5zD4yXkZTjWspmHo2hM6lqOpTICyag/wmymw2YP + EWsVdF124YCADemUpqZnUzxys5KPkHfhWMxmHg451PZg3fghMo0oDNqUnZ2rYOhRLQ/zXTjQH0ZG + nOL6kAU6A8cj5F04FrOZhwMVK6pKEHvY3GHy8BSETzLmx8ywHmq77w5JZZ32IvoBkfL9ITheQ16G + Yy2beTiQ0B38NpQoB1uXigmVxNENjLwWUDDc7XdQgKHhPvi50OpOwfEa8jIca9nMwxFzhtOMnI4U + qWYqlmxVyLdWpGOGO/NuzQqLUy4LgYEbGFen4HgNeRmOtWzm4RhTzKEzl4nQG+RNX5BS4jEYDM42 + WMxtapJw9A1byLE2mEk7U9F9hrwLx2I283BITf/JqhoTxREOD8S/Xfa1dov/U+y7HF3uZmiIY+M4 + fOoOwfEIeReOxWzm4QALmoAMPXFuIIgPPS1mDt7R28bkVG/qWTFeqIbXkMbdRp5aHY+Qt+FYymYe + DoUptmLkid5GhybBeAcq04FeIGTREHaVQnHaCNTD4A2idnKo3/EMeReOxWzm4SjeF0HRcDzUhMG+ + 0QtcNQXDtBRP4qZhBJV1xgyR61FUdOEhHrnoPkNehWM1m3k4LM0niNIMw0DaxJ8cE3k8A2WnV4tz + gcibz0CnJKQez+FBTezM2fEMeRWO1Wzm4Ui+VGRbkW5lgACTOurtkgPd00PHoDnsvsqHuY0Z4kx0 + PPQpOF5DXoZjLZt5OGjUoc9q8dAcl6uhYe8pJhboVjlWHbZXB6QtasNIfkM4OFMk4aL2EvIuHIvZ + zMNB81Rw6xkiPQ5hUKR1Gpt8VLoPceO8+ypnzQkGMpicpTp5CI5HyLtwLGYzD4eAwD7GnRjYhJBY + M4wSj+9TloVCCZy43bODnQoCO+5FeM4ecSjAmuU15G04lrKZh6Nqi6iYZ3C2Y5mMUHpMGGxSaYdH + zYD5LpMECy6uCkzPonvpTsHxGvIyHGvZzMMROcMxxA4pFx4adXhF2LE2oDOwbOouKRRKz2CRUGPn + UD/TnH2GvAvHYjbzcDhasy6mjPyISMyS0BJENdRx42VekCvq7mbFm2MoxFAaO+RuM3xyXkJehmMt + m3k4LMxDNPbs8HNInRYR+iGYVjLKVwDHb29WXKsQpaSlwvDTmWfguFb9FPIuHIvZzMPhC/IjaEBD + rUJwGlKPs4amh+Lxxtxs2j7KMSdA4tJSApNntBCxS30NeRmOtWzm4ajMyhbk71n/nS5trI4KL9Vd + pbn1irRLXWBSZHSf4KkoVMWPFEmeIe/CsZjNPBwZkgIIRNocWpaAMinVDJFQAed/KPbunh2eqxUc + YIZymRA8BMdryMtwrGUzD0fnsCgYOyT6TeNByCMwe2RbW8O1ACWRXTgwJOQgD0Ow+lSR5BHyLhyL + 2czDgWJSS0MmBmWSVmVRMham/A1qxzXnvlskGccGYIzVgaLVkdXxDHkVjtVs5uFAgZIJJbRhbMdS + hcFNXB2rVijFMFqJh8Tm6mB6hzsVQqEqcgafgeMR8i4ci9nMw8HWRH2diQLGOzJLJFgN78PA0G2Q + GtRuRRfhhqHbioISc/5nzo5nyKtwrGYzDwcCiEplQ5MDAgPNWdR1qFRh/tQVNcS2e5RDw8b4SUSc + ht2h1fEMeReOxWzm4ZApWgZbNd1xmQRS00O7h1d6VY1hBrUNBzZDkGGgq6Dbc4bYg/HdS8i7cCxm + Mw8HUqRZoD+Ch4BpMgjII3Q+as+qoGu8K4mPgADMdcmFGSKXPvMqf4a8DcdSNvNwdIXUBb1ySiVU + XxPsUI3gtCzdcufF02zzKB9aQAMI5JgggBw5yp8hr8Kxms08HAUxmCQZPuNGRZWqtITsNwqGkKgF + 1959OCKye7z5PU7PZ1YHl7SXkJfhWMtmHo7IjD+ioJzb0EfQOUIspprGeDlvEXiJuwV2xtAwpqe6 + LqiLn9FCfIa8C8diNvNw0BgqAtUF5ozhkFDNwE4loEaCVxNj/3H33UFRmAclxTB6W+6M6sIz5GU4 + 1rKZh4Mr1ajkDk9xzJmwPomc4yrB2826O7X7KqcTP2ZtIPUgE3Po7HiEvAvHYjbzcOCJrXl0IMCe + EK3PHBvIJAzpaUdtF77V7lGO5SxQKMxy3CE9KxB+CXkbjqVs5uFAjDsNJV0asbyvUkF9vYXaEPBB + er633VFNgjg/ZCnpw+sz745nyLtwLGYzD0e0HRsbZmdh/TuTPI1BxpgNKleGFqHd7AZSkIReNaxB + DdGP3KzeQt6EYzmbeThcYBzQmYKeWEf8u+uKfXywJQzLNOQL9zYrCCRsfnBJYNNRSjzx7ngLeRWO + 1Wzm4bASqYvW/KBO6wwpl6O3MwOVMy/qpja7gXw7BmfhAQ8hIHtks3oLeRmOtWzm4Riybo0hJacr + zhE8EnAHok0u8cjm7ts3h9EUMrCQenh08Kw5I773FvIuHIvZzMMBT8ZgEog7AQpjCWNspKXhhxY/ + xKej3tQkgQvKG3oM3cDusUcuum8h78KxmM08HGOSHLt47lQ9ULtqOJYnw8w/S4XhwLR5s+KChuIC + zdlhdXpGQOkt5FU4VrOZh6Mw/NQK7mWNPiD2jaEnyLQMNjMObuSuMxrfDv4Dl2b65fhkHznKnyEv + w7GWzRfAwfOPyUDsTINnwIZJZrodAcMbT1kXv7/NmxViZbxkAYRy2BnVhaF/9hLyLhyL2czDEdid + CqNOEStHydJQDbtFCG8KudBYwiaTRPGYCQhg8OzgynBE7e0t5FU4VrOZh8Mif5hEazzEPfTpXHyj + XKU6hHa8I9zuUU7lEEotroFQU870yhkzfA15F47FbObh6AFWFRs7TppcSTFjhqGL0JjEpiAB1O67 + gwFQXoHD7MAy4HHk7HiGvAzHWjbzcHB+Y/ULzQp3IY1R3fh2WJWHyjganuW7ZwdyPXgxD0kw9vsz + r/JnyLtwLGYzD4fjDK9jrFVFkMHxFK8CkbVjiTSBNfPmUc63g2gwZOMYAj7SflLPkLfhWMpmHg4O + cpvQTLKMl8MegWXA8cE8QUW/lQLWZoGdb8e1ipsQAwuMqpzZrB4hL8Oxls08HEIm6IeVxTA4zega + 4wEVew5w3gTKLpvdQODAG5uDiJEFlschOF5DXoZjLZt5OHhaUG1ls0o80zALrIzbwLdC7huNsbo7 + qsmcjQFnQmGMdqZX/hbyLhyL2czD4VDfYwQQNFyCX+UZ8sDfAXF8pm1i2rUAxoTQQmlEsWzMox2h + TL+FvAvHYjbzcMSWpGGWw9MUROXN2NqlbKpw5UXTym06FKjBHxnjSgiRHpI1fgt5F47FbObhwDml + 8mzGyJFek2N2eZSqYEUx8ZF4Dm4Oo8GBH+uC3hNPfX+m3/EMeRWO1Wzm4cBoCJcIyTQMRSpUFkxj + wKeqWBgxGIrHmxddVIA+CroWrUt7poT4DHkZjrVs5uFAuwp6FcwFjLJj6dRzEYcpAXds3LnQxt+F + g6lyqhqwIuAPnTk7eBi/hLwLx2I283BwSHAbrcMl22H7NIT4svK4biCmhCnNbgmRxzjDNlSIMQA+ + tToeIe/CsZjNPBwysB4aF6qILbOG6gaFHae6ipsgZn+7Guw0OTDKofMU8Tk9M9/xFvIyHGvZzMPh + y0c7kPODcQLGOngeYLg4/hUmsXpXk0QhY4UYCesDN64zzmhvIe/CsZjNPByhe3pNjJ9ZQVGd1g5d + QQiICO9BdGZQdPfsgAkhIgNp+DGLQzerR8jLcKxlMw9HzZ33XvcIGVNTh0ICAyd0HFaEh7qwa5DN + KcQoGk8PKCpRHBnzfwt5F47FbObhaNT2EGITqgxDriFLkpgcw8Ej8xRJZvuiizolBi20baBMH1od + j5B34VjMZh4O7rLcdEWuGSYtRuA2toaAEv2OgEq+2H2VM4PGOwYRJQ6QeOjd8Qh5G46lbObhaBEZ + T5ODhjNdufWiHIJEAnPMBn0xdHU3zw7EpfmH5XWO0dqZiu4z5FU4VrOZh4MXGmjQEqTrBJNkKN7S + gSoOJvV48+wW2ANE+FG3gqHEENmRAvsz5F04FrOZh4NOdgnNc3CHzJyS5FJVUobvht4Yl6vdZyDD + /TBxHBadqGGcOTueIe/CsZjNPBzUC2vq6F10vJk4yRkLtBwdSIF7hUL+9urARUrDhxla8uGIji5T + n68h78KxmM08HBRHKLayKyU0ezRUt4RMKM7xKrTYkETcPTuY+US4HNlWFPFPwfEa8jIca9nMw1Fh + HcrRJtfjPsXLWaMaahDucS5HrFU24eBWhfo6A4hQSfSZEuIz5FU4VrOZh2P0ABsldbzJm+ocuNpg + RzOqihF5jLxb0Y3GO7qLlEggIp6hLjxDXoZjLZt5ODSEQ4wIaP/ljh1axBWtC5dwrwt1FLO2Vwei + sPjbRKye45nNigvGS8jLcKxlMw+HT7SGUHrDapapmJKdaB8TN72gSmLl9upgoBzvJ56BhrH1Ixdd + JG1eQt6FYzGbeTgq90bGA2uRCZVKZPClwK8cUavh3yF23W1g5zJPhTwWNsDcpc/A8Qh5F47FbObh + +JAJRW+PicoAKTfCng6YpKHjChvO5N1XeWSlsepwaRl2nYfgeA15GY61bL4AjpwC1RGutwjh4/7E + WUUDarQ9WCpy16gOnu8wZMZ8jcfgIQb7M+RdOBaz+QI46Dr14DQm2czZdMokejg1sds30zAw3zvK + qaujv46ILhbA1h3ZrN5C3oRjOZt5OCiJVBx/qfGpRM0VAkOGKN2LE4O7EDYp09QOecpwyaXRiyPF + ic3qLeRNOJazmYcDrT30KLn3MB9I8ZDSLrUqLIfwfTJY0G52A0kA0VaYPRYatj7SfnoLeRmOtWzm + 4XDIVzHfz8iygLHge7Y8yjlCGrfJyp1yb7Pi28UP6SRkWynFHFodryEvw7GWzTwchanWPs5vblbo + LXTBiEeBZaVZHEwIbt6smOwYlB6EW/1w4T4DxyPkXTgWs/kCOGzRJSJ7CAMHEfxMXaRTL3HIhHIC + i00d3QEH9UNi8w99pBv4FvI2HEvZzMNBfU+ieMFrPKUS/0kROoiMdV2jMr67OgjOP4K1WA3JM3A8 + Q16FYzWbeThwNMXUD4ouWpQ5jTsul93RIMRoHNG8zSIJhIUxMstLkMPDHmnOvoW8DMdaNvNwVM4M + aoaqMAFFoYSjxMYxxszYDbMGYVMEY9h78wSk2EKtxJy56D5D3oVjMZt5OOhHVEY0q4WBKLD9Zegp + JcTFhskmSkqbTBLgGENosNfxKDjjUPAW8jIca9nMw5GpWfH26wwHCsRaeZs7COwGlSv65rBDNy+6 + OAAbM5yG8GY2Zy66z5CX4VjLZh4ORJKGRwg3XKxU8rArYKsvuJsWpm903V4d8LApuWCPjbjxmWcg + 9ZyXkHfhWMxmHg5k8cYTQ+Mg711FbEwKR6kE4VAElRjp21sdQ40XX7TRevJ8wxPvjreQN+FYzmYe + DjoRlgoVA5AYiqOw2kSyKJQkpBF7wFV+Ew4k9yQSY4O3cEiDfaj4vYS8CsdqNvNwfJBuMjQSJzPv + cii74yRxnREZKA1mc7NicAyeFbv9cHk4w2B/C3kTjuVs5uEQqmIYqCqCewbRwmSaSvjcoJpO4d3W + zWfgSEDj+4QliEGY6cRm9RbyNhxL2czDoZhabp0Bf5qBnB+i4VWO6DSuG/idlrzZ74Dw63kIYtSJ + /N6ZZ+BbyLtwLGYzDwdaC16PQRtgx/wXZeneoZG4rpWtWm6vDnp3ox5GTxAt8TOr4xHyLhyL2XwB + HF3BljYNQ5Xc6N3VwGy5kxEG+xAk3iwhAjEzn1jUwWKHanUEjmfIq3CsZjMPB+aKVKa4TeFT3hAK + 5dRF5bgw+CRxB97erHj9UaTkhTn6jUcuuiD8GvIyHGvZzMOBO13Ubeh4wr6BiKhhsqMcGovWSGH0 + zZrVOMVxCEEsC+LbGWLPW8i7cCxmMw9HwAF4mKDxFs/Mi2Eop1FaZdAGTm1FIXHv3cFIPsY5xKK3 + SKnkyGb1DHkVjtVs5uHI/BXYlHNowKNtlde5rzK35BpiJYjH7MKBoNU4PWiXH9KzggvxGvIuHIvZ + zMPBXSFp9GEadVw15N0wqKtRZdTwYU3n3aN8OD+x21PNHVfnI6vjGfIqHKvZzMOhEwyoD6vZhjpl + HfohqIeATKL/VPvm5Cx+aExG8wCErMIt+gwcj5B34VjMZh4Ohvx1ljx1YZIwzuyLotWBPQHTNpmH + 4e5RzqpA7ESyV9F9P1Jg98+QV+FYzWYejuERj/5IzxzoyLbSCURfLKIZ0ztib2ZzNnB8O/hVyCpi + fBGOdAPfQt6GYymbeThk9UJQ0oVjFSCtMznGOxDHQF4geEfsUhfwNvWIlUlk2PHPOjI5+xbyKhyr + 2czDgRZJoqiOYYcM6MKgnhQkNKvR6zAMGmyaDQ36FhIYg2oFJGdWxzPkVThWs5mHA8KNF6h+F9Gb + r7BHXMWNS+FQYBzlxG04eAR6DeeNCR5/5lWOxuVLyLtwLGYzDwePPcMkwegFYvyFplWCYJUFw/8B + 2oHcfXfgnQrdDdb08C46Mt/B8/415F04FrOZh0Nmm6qyQ4YS3jq+v43RGPq16Cpxx0qbPCvPWC6u + T7AWEGM6Y8X1FvIuHIvZzMPBqBhzxqgYMyfL5BhiLmh7KmQw6M3GGDeLJEEwsg6tbrBxrDxylL+F + vAnHcjbzcOjQhpNcxjoFkhvvPuXwQ4udhqChV77ZnKXPMfaW4W9DFezIq/wt5F04FrOZh4NWU+Yh + jnYr5JGI8VBGCFFmFs3w5OqbY/6MBXKQc/IGLtDiyNnxFvIuHIvZzMPBCYGJKyI96MQ4HoEI5XXD + 1H+hxsug/yax52OknD555GVOnf1EkeQt5F04FrOZh6NSFnHo83TEF7j84FCO3LSswINKCfpvexVd + qNLc1fGmR4wJfusZOB4h78KxmM08HE3hYYPqBcSbhJMKykko8KG9Lgy0NwTfNuGAuYWDo+CGoPFa + OALHM+RVOFaz+QI4hjoPM7M1t5BqQakH5YVSE0JKOdS4OYyG75rCa4haGD4U6szqeIa8DcdSNvNw + MDkbM54qjmIG1uIdmxtoDEjYl9GNSptMkmGDx4w/TQ/8jM6Ifr+FvAzHWjbzcIiK7y98qq4b5XWU + vnVERjdDo65QB3fnymk6oVfHRBoXXQQxzmxWj5B34VjM5gvg8I1aYS4UwWsSCfEeiwVwMlxyc6bk + vnt2WGTeuDMjuUB39gwcj5B34VjMZh4OnQOaqpzjqQ+yNIMYDM02GfEbQqhkV0CJ0hcS7PDeoA8d + Em59C3kZjrVs5uFQSPZoqDzUMahaeZ1xdRzaJG1cidRuzYpvh1IoJo6Gd7k/0g18C3kZjrVs5uGo + LbOp0/FQFQdByzAU5poeMoNCXQxx9t3NykEHRRSfcpg8ddF9hLwLx2I283AwxcwvrugI1ovuoCIy + OuuGaAUSSp02yDYczKdrGHXDPPBI+wkN2NeQl+FYy2YeDl8tBJ7OjYp6oW6SKglVK8R0LfoLrWyK + 741OOY1ATVeQSvGhm9Uj5F04FrOZhyOi5AmJneoUozY4DFEsgTXN/YoNrLF/ba4ONWzo0YVGKPSQ + nQqDO68hr8Kxms08HGNcguPbx4JzxJAUa3TKlakexwgPiWETDtx/eZcjoITqiTpT0X2GvArHajbz + cMBw8yhT4opmMpwSxEmgwKE8zZMjW70r+h3GO1CjLCadZ9s98u54hrwMx1o283C45gtzNZgCIV+F + C1So0P/ZsKCUROY1dyu62OYM444QsKmTZ2pWz5B34VjMZh4OHOJDxSMQ/3is4wtmvcFwcPRUBc+F + uskkYd5TAIXCUlN4e8Q+/i3kXTgWs5mHQ1TG5uGOjNEzSxVXNR4eHZ9YxEKx4tpkIYKGk3QCHYVQ + tEKPbFbPkFfhWM1mHo6Keq7BEvuDtGAkPFCph1IlZXYoV263wM54P/ozPkiWnj0ExyPkXTgWs5mH + gxHALqCrYzMbqLSjoDvef6qhTclh4rfggGWA3ayj2sJ0Fe4sB+D4VyHvwfGv/ur5tT4PR8xwDTXW + jTgzd0ahTMtj3IbpzY4J0V77iQQ011uU91gajDIfeJX/q5BX4VjNZh4OlSublRSiYolWqGDlLFB6 + qwize8lld+fdwYACw7fUq2BNo0DjDhzl/yrkTTiWs5mHg3lMWVgOuiMC1pD5zjhB0Z3oCJM43Jl3 + 4aAwrD9aTyy1A73yAccj5F04FrOZh6PD8/jgRgu0rNCRRPC28ibnZjoGNvsWC3F8u2HDBSFU/b/U + 3dvSbcdxHehXYeBaUszzgVcKt9rhi27fNHntmEcLEQBIY4Nt99v3Vz9p7jXXhuj6q0rFMG1BJBRM + 7Jxj1Slz5BihSVsIjkfIunAkZvMJOPrjPmfWpivli1vPg4MZafx9OFAakEtyVwfzk+CZw4lrKlEk + CQg/QtaFIzGbeDgU2PX+jMr2/U6asplY3fhJMxN0jihlZ8NBfbTvPZ+QQwu0nz7geA1ZGY60bOLh + 2FiZXpPxfg7MrenlhRHUPRsxathITFfuZsXx3A0aR8WW35TZrJ4hq8KRmk08HO3sCe6iq6S+3s0c + XAMD66PxF7W6KetV7qfcsnwyKoKIr5ZYZnU8Q9aFIzGbeDiC2HpLBkMlkakpcmiw5Wq2426DnXyX + DYcYWG99cDktYQEcEH6ErAtHYjbxcKD0tO3Gb1Z/yIJAJOJTfu7csRF79iVroCB8OyOaeIg8HF11 + C1CmvwlZF47EbOLhcGzcQfxru9qT6PduinnX1zbWfC3ehJlwhOL6bM7GjjX4V3xZ4aftx+u73373 + u+vLL//Hz9f2y/X7L9fP/2jba95C1oQjOZt4OBr32Y3aAvGvMAOutuvrXW1/jncQGcvcrPoWDSL4 + Y7OA6kr4dwQ4HiHrwpGYTTwc+hyIbmhV3oOXErs+KgkfVlB336D2ZGmS+HYLQSbq7itdF8XdIqvj + GbIqHKnZxMNhH2nMBd7OCvNPiCT3OuJMn0pY/OXarG5ggGMIJy9yBNfZtQAp9JuQdeFIzCYejplX + R8tiyI9X1RDpbTiIxjADvvQF+7xuoLkxrUCyxn0w3SAFW2J1vIWsCUdyNvFwnAupBZo9LYpu7/5j + w+r0ap0n+Oz9mrk6ZlYqYZqAxphLcwHqwsKA8BGyJhzJ2XwCjr01jcZfdr43lypaJNOyBVNHFgXs + NjPbTxqyXuN6T265XApKrI63kHXhSMwmHo77ojY1nvM+ELUyv0yp5+y2fh+MpTnVMy+6vh2JG54g + zg7K+IXgeA1ZGY60bOLhYPY0hHHyc6YmRs5q7rcWa4FV+Xp11GM+WUI8v/+y7T9cf/rzK2EmKc7T + lkRMGGZGTfnn639sP/7xh+ufjj/86EO+vC7+5c//xb88L775L/67ffXwsvlf/LPf/tDx3/Y2OdP1 + Np8LIX1f93uY2aN0fu6mL3k5Ffi2hv97D23tw09+25f/4r/bt/3Ft/21H8S//YeO/7bY/P3hKev3 + eqynEdrdoXiNy3QgmrNZzP22pmCcsI05GBfc9hO/2+d/se63/Zt/6Phv6ze6DUcQgpgbfFjNaBSB + xUBeMBL9fGno/Seg9rouiDnB88pUZNIe/b5k32JW/ezJ+cQjQvJx3hADdHta5mOeXHoLBIRVJoI5 + w2cPzW8QsUe7UQZLgGAqWgaRZ8y6iKTmE4+IS7N3XbdcN2fwdWsCB5PxbgMmZpbrZ0eNfh0RzD8Q + e4IVROSvMf8OiCTkE48IQllrDo+09nHv9i1GJpdZl6ujS2Do5bO96W8RUakzszHqLrhjFkLkEbMy + Ion5xCMyK5lupik+ZFiMDJx4Ydh/xKOG+zBKknlGm47s5+ApGoraiaJF35wjz5iVEUnMJx6RD26T + ue2t113oTanuPH5Qa2h64fG3nyUMfLtGVo8ux9MHWzmtzfAriLzGrI1IWj7xiATzsabdN2x7Jorn + 0t6esDhomMtG9Jrsc8TCM0EcrAH099LomN8g8oxZF5HUfOIR0Q9t2SErRnAH4BRgyp6z5XIPTLBu + V+PcXWtC4jddH57DzpIy58gzZmVEEvOJR6Q7GYYh1ehSU/Fvd03r0WRYY8EYKz5y39Gtr+fxaHQV + B2IptGs9Y1ZHJCmfeEQ4uR1GkBpXrZkuixGLffrQYFMF7bc+t2qkl/lRUlU951Fdao08YlZGJDGf + eEQcGRM/ssv/7ydGAUiZ4+G+pRg6hFGu7F0LbdkgLPdDfMBS58gjZm1E0vKJR4RSqp6D2eH7XLgl + QeEODn6Uyxcank327VdhynNzHEzKLn1apfvbk/0RszoiSfnEI2JjWki1BNe4UOswqxJmJ9Fu2u3G + OM6/a9mzgp636Kl66L+CyGvM2oik5ROPyO5eyip3sk8NMzF0ylIH33UNiGbbqVJk71oa1xjlTDsM + eqcRO75F5BGzMiKJ+cQjgitrTdAObE+N4Xu40PsbnWsTlO3x+dnJb16I5iyM6gVx28lfCt1+HzEr + I5KYTzwi17CRp++p0+837a/hoBviytVc2s4EXPLPkSlIo+PcqD6spdbII2ZtRNLyiUdEt3o6Z8SO + e+uYwm5Dy+A96Awrois85r9Hgj+WCwO9JKaXhdbII2ZlRBLziUdk0QcxlnQYVFGGn92Fz+kw9oh2 + 43f9aSfxb3ctjxENKzwoz/a00Ytvz5FHzOqIJOUTj0jLLUBFcaIaeR84zGqOWiO3duJNsfDOP9mZ + E7CDVRAgjVdq13rErI1IWj7xiLBANIXvUrUM03EHqqBtSyVl56PMa7wAIsSmwgI0sZ04h/8ra+Q1 + Zm1E0vL5BCJM3fGYw0DXsiPSrixP2v0Yxuswi99ld3VpK+iPTGurcZVIa/4VRF5j1kYkLZ94RAzF + YDgo1uxGV64pyA4T/Q/uWYx3nPPZ75FZ21j3UDFzKlZFecSsjUhaPvGIDCq9rGBPkrfNFoxnums4 + p5F6xbTSaMt+sxMINb8XXDYYRxS6az1j1kUkNZ94RM5zmLYOAaVTfGee5VShtNOeLJrQnfM7Vvpf + rmxKAkFdp1A1/hmzNiJp+cQjcmH6e5NsFPPUzI0c7x92vWR3Vvr102enY765/boWmayk2wqSxOGx + b86RZ8zKiCTmE4/IxqmJLvS9MQlCmhu5Bc0sliZE23m+p+xzJMyBK2l5bDKSSZs9/hVEXmPWRiQt + n3hESKtOm/njk2qXYXAE9BYRPZgybNd6DNkdq3noWJijoQTrwEIdq2fMyogk5hOPCN7itKMGnTjn + qFsm+zbCnvOy73joiD25dy0zMtRWMUzZ0iRaC3y7Rh4xayOSlk88IowYCCWgnpjevl2yVJ9oq80b + U19N960AItqSrlqa+eQYytS1LLmXmLURScsnHhEzdzQmZu927xCWpO2qgbUwb+LUh/Of/WafmaFo + HoY6o2pZKUReY9ZGJC2feERmLDlvHqhs94TFfiDOcbrWGyH2Qh0pd9eyK3qIEA0NpeQ0Jdxvdq1n + zLqIpOYTj8g9GebrBz317Vho3J/97XnYmgqhHVagh7g4QoIMoIKmrkaZNfKMWRuRtHw+gUgDisPL + 49I/btfbPDLynDbv1LcHRk/mGumUy4i8tNQy0FvKTCu8xayKSHI+8YgcRCaDC+NyEAzbp+00kIas + 4BHfGknbcztWrLoD49dZxYyjK9PVfYtZGZHEfOIR2VjAX9vZD+q97aKN2G49AsR10yE8dRKz18ga + 9hizD4y1xlKIPGJWRiQxn3hEziCRc3ihGxB3mpuFZeCkLxJkLjSwsteI2hhhvsGTPQiyFjlHyFa+ + xqyLSGo+8Yh0M1eazSPRJ0PSbVV/IcFLkdhI133a7Oy9ruXrrUSRRmwXjJQyJ/tbzNqIpOUTjwiG + iI/F8WFmXbqOxhaogLpOzOu9BI3W3F2LIoPqr9a0knyh+RFjeq8x6yKSmk88IuFtzqZ00hlDdTPC + f3q9D/ayrR9023P7I74eLnGYCkU9msqwTN9i1kYkLZ94RBR+FRRHSjw4h/yWkbeoKQx76Cwqp+RW + GlVn3HuVPYKyXqH3yFvMyogk5hOPyIxB11gk643Ccxq+0Q73SuSAbbEYgsveteh/LkZDWZIb2C1z + smu0vMSsjEhiPvGIUDXkHjT2oyEe2lUMhWiEsMfaiIF6k+T2R7pBQ9IYnUqNEZJCL8RnzMqIJOYT + j4jJNjo8ixsvj0AyX8ZH1GhbPmir0+XOnR/pBleTfl1tvqzPy/C13mJWRiQxn3hEpmCBpsxxUpy8 + byw3c7pMfvG3toER82dlxL65/fIfkMM49KsTpUw1HiKvMWsjkpZPPCKmA9GCFJ/IoOgfdoqM0+Qi + PC5uXdSMc8+RESnee868wugfUeYcecasi0hqPvGIHBO+b6vLTu2e/7XJBUWVY0HV6klOZXd1Dbs1 + tv01VOhKTb29xayMSGI+8YhM7RWE1k92Twzp9Kt6IqFGRWcne7ff+WsEFYUgSpigJp9baI08YtZG + JC2feEQoervx9uvROshHukuYPNMxomEPR7D3y961NCZ1rCDClrMM88EaeY1ZG5G0fOIRUe+96QYN + y3l0jT5Vj6U17CMlPlaa+orZiOBpMZ1Q+W2KvdlZTb7ErIxIYj7xiJz7vTMmGFf0k9DEGILLrPL8 + YM3M9M9yESGQ7QWK1KLp0heq/T5j1kUkNZ94RDSpaHge3ohBFV/rkErKyNsUg8sYyZmPiNu1hhUj + YJYghfoj4cb+NWZlRBLziUcEHP2p3KtNoml1ex5e48bDUXXrVGnMfiFOwYyZMmDwLJ/KMB+6Z8zK + iCTmE4/IctLhGIZrmUKrfTiOQb3DkT456k2SZHespqBpTFQ8qGuVqms9Y9ZGJC2feEQu+jSBIMSt + xBCi93qvzbtcx9ms1Di6XL6WCj9l/GYN/mg692Vuv8+YlRFJzCceEUTf2bTu3vS703zYZnPTx0L2 + jH8BybMCiNC200EkLzAMpXYtS/hrzNqIpOUTjwhlrYtd4IyXoJfLyXxbBuWUjYyxiZIj/xxxgODP + UWpEji9U+8X0e4lZGZHEfOIR2Z3o26ymRSJ/uokCjsfOhYgSrCkSMsvZt18VM7I3KKvqjYVeiOaD + XmLWRiQtn3hEDgx4DBQPwvbaNtZQ4Zi/7vnoz1nN67OmwO+138DGY4NHI5+VYKHa71vMqogk5xOP + CDdHqkG3q5D+1GbwEJMuyMWNNGvYEuVWUQxj98EcM1DwTfYUOdnfYlZFJDmfeESOrTHRs/j8mCcK + J5OS48LCTiHe7O6du0ZCBqR+EV6YTxXatd5iVkckKZ94RM5jusOXn45t50lA0mkmRncryZugvsbc + PruvZ2msiwlqQjVl+iNvMWsjkpZPPCKI2ItOSDDosjR6o4g3vdmOfdrIvuvI37X0wJRn0Co0i8tU + 48fmEbMyIon5xCNyYDO65TIJ7pjLcwtuSDRcHg98r/UUc/lao0mIVn1fqQwNrNQ58ohZGZHEfOIR + wWsbubF8FFG812/SpnqJRDn8tNGE1szbL4qLupZwc0dEosw8+1vMuoik5hOPSIfXNo5UGt1719lj + +NwPU7sOkiCbnL9rtbhGNAe9SFb+Y2XuWs+YlRFJzCcekQu99OowExqvj+AY3J83h/mJuRFjmCt3 + MpRNtw3QWKgl4qFdCJFHzMqIJOYTjwjKnEeioZ5h2belObZ+Po9mv05EeWaN2eeIMAPnW2KmLZgL + IfKIWRmRxHziESEEydXuaJziCrQNG9ThJGACGBPUJheyzxGHSLiu0kLXtyqEyCNmZUQS84lHZPAK + oYuybNe9st70gr/PHhb9GSao+1y3i9FAKG6QlqRqzVSmGv8WszYiafnEI4LQiAZvkmcYDxLMnX+P + pqAOrzR/mBbNXyOm3uhKBOLkXOpkp/T5NWZtRNLyiUeE/2Y/dPfaG6lyts9kg3pFLR7n18b1LfuF + 6HVoSt4SWfuxkL7W+IxZGZHEfOIRMSFNJp4ySjvSdFou5V498WazedGMy54f8fUG90X3ao4wXam7 + 1iNmbUTS8olHRLfwQBDCwN5vg+3IQhfzpJXvvJ2/2QusEdQqu9akJlCIGx9QfolZHZGkfOIRsTya + fWKooLy4NVx6mrs9t8PnI0F3DNm1X0NvCjK+odHGqdgaeY1ZG5G0fOIRobE076ZtPEC0ruhmdu3p + dX2z2taAn7PXCEki824zHsVgALjM7fcZsy4iqfnEI4KJfZwnx3P8h364Jp/t4jnVLv1J7yy7hwhg + 49NBG8VjpxAX5S1mdUSS8olHZLl3PiPD6n+t7R5MKVlYUV/SdZ8v/N/c269JUxRW4/FmEcsh8hqz + NiJp+cQjsplNIJ05Up47aT9ouZNzGhQbSXAQYc6d1Q3kVSwKf2kR6QqdI8+YdRFJzScekXWgizJ/ + 3IFdrjQRGV/oaaj97ldQl8ldIzT85w9JbC/OucysLpRfY9ZGJC2feERGqlr74PnWNiS19s3upfy7 + 3dtmmL3ZCiDCMZSVdWs8tC1U12Ke8RKzNiJp+cQjsnNcRLiGBqUaB8o5cuiZmp2uv+ndLfuupc5o + Tpf3O4PKpgyncXzGrIxIYj7xiLD+Zkg5Lkim4ZHeIM4ZuFJnHFuDJU12f0RzBOdB356/dSHNB+NH + rzHrIpKaTzwiu8md3kgoeUu0+GADPtAMYrFAlXnvz2wuSujXU1oxyjWXct+DyGvM2oik5ROPyOzA + MJmrsTTt3bFN/bnuE2Hsg1Q2HmL2XcvYiMlGHkwLdkuhu9YzZmVEEvOJR+Taqb4SGeiWS7nx7pb7 + 4ORK63elKpSvr2WwKqiiNP3EP6MrxEV5xqyLSGo+8YgQEkABxTU9Fg121lXLuM19KDIGJmKT3UMk + IxJu1B7tnVJAmSrKM2ZlRBLziUfkouZEO7Nf9hUUHXVylUZ65fqI19VN2T1Ekuck49EZ7f1joY7V + M2ZtRNLy+QQivF/4eXodDkG81KY1NiYQp33djFHn137HwRZoOIU9O75poTXyiFkZkcR84hHp+8MQ + aHCx8sVo1Jxb05x8V82S9EefrS4blDwIorhV937Zhd7sz5h1EUnNJx4RlPXuOFZSdEujqHnyI97R + ta55OtleZ6vLcpdHwlbJNBvqvlVmjTxjVkYkMZ94RNAbjLM3165WROaHusAFkRY1KFTP52y+1jxa + fWFEezHUU4iv9YxZGZHEfOIRsWPNDTVy4le7EatdX6Q57mBy7AKsaptbaTSjgPerWxXMxQrdtZ4x + KyOSmE88IiZot1DMQi09kR3U4925OFRiIW6rMyUXEYbWQRmQ5JlRxDKaDzrPrzHrIpKaTzwiTeiP + NCi+tB6cKaq010CS41oQ6dy5srnxzhCi8Yt3YkeHrsw58oxZGZHEfOIRGQzrkolfe4Stft3m8B9w + TQ2/hd74lKsbP1LaVmIkdKf8W4qL8oxZGZHEfOIRwUQ5g/MhT8/VI2SdtpvyWRixOoiUZevGOz6o + eqj3a+nS4yy0Rh4xKyOSmE88Im64yozhurXzCvU08bgmTu5cP5YWJtnniFo8NsoQPEjGQic7lF9i + VkckKZ9PILJv7UzqFy3+cJoz9gzqTuQA9mXXudqyEVlD8xC0poXWQpVGtcuXmJURScwnHpFLWZH6 + RhhNUMDrVX6dIyYGg9/FsV3554j+ZK8C3AaWUKEXIp+yl5i1EUnLJx4RRJSVkMDgpXiM+NLs3Uxb + kRDC/F0ol+eukZVRgB6PGRILr9Cu9YxZF5HUfOIRMSs9n7NjhHhpM96MqG8kU6Oi3iVBAS0fEeJa + XqBMrVUzy5zsZGpfYtZGJC2feETGc7RjdZ4Mp676TmsrrJjOyrFpER7IRoSJVVDDJoBfrD8SjLG+ + xqyMSGI+8YigYNuTh6Fv73nW1T0aFKGBGqxmb6gNZiOyOqY4wlBiLofII2ZlRBLziUdkGrkrbM1p + UtCjPTSWGqr+03iZ1jVLks0OIvG+DEFNoh0KIvIaszYiafnEIzLoUV3MxDDjWVJObPfOk9cCeYFh + utGE8teIi+rIFYbgSiG3ixHKLzGrI5KUTzwi631jCJ5Tv9MtoT/n3xnVZTlogPrCx85EBGsuqEd4 + etLjLNRnf4tZFZHkfOIRaZiys0KkAriO5teveTeddnEao9To0pq7RmTQ47ByHxnUUcqwg95i1kYk + LZ94RJZ+2oOa0zGjMJ47y6Rg3XKv++ad0jS5M1bh6yHUGByci3FR3mJWRyQpn3hEdm+Q2/o4mPVM + x7pN29ZtJ4Jjo+fej7kMOl+Ph5yORqfXXm7Xeo1ZG5G0fOIRYaN7DSRl16s7+qs1hqj9OlA0paQZ + XN/yzxE3uFCT74q9R6D8GrM2Imn5xCNi2u0KZBR8LZr7061GG5TKr6FhLGaAMB+RRRGTo2Qwsy6j + eAaR15i1EUnL5xOIHN11TNdIk5zI1rGM971vqvN6iScNoVx12fD1aPMGE0FEpzI9xLeY1RFJyice + kSVICujm0p+bO5MLlJ1wURzHGoi9Uz97jaiWqZ1TdaL0UaYaPzWPmJURScwnHpGm70b1E8wgSth4 + W/dIpFw/tz0IAJu/ykYEn4XU72qup2lLIfKIWRmRxHziEXGuc7nYjpGR7kq71Fo5h/m4Z3ZNR3vm + KtVYbUyOTOCoZTbFdq1HzMqIJOYTj8iw6IdcbroHZV7z7DTk7343zk43flMPzl4jgwWCr0WFmeJ6 + kWo8I+vXmJURScwnHhGKTfQ2zLdd48lmgXa/MZ5lJS0e5O+O/HNEs8obMRjN94UUzyDyGrM2Imn5 + xCMyG1jCmVs02hW05ntr154ZX0vbf9qVHfPXCGdYuthWXbkXomLy15i1EUnL5xOIsB3ZnLoNR8rT + aU7NdFMXDJShAdk0t4eoMeJp2HmKhGJ8ma7uW8zKiCTmE4/IhN3Al53l9NK3CwYdPQ7nB99KMgNd + gdqvVpUuPtHfpSAirzFrI5KWTzwintM2+JZs/0mVV0NX1RdD16zVNWLQ5U5P+z2r0RDiYNnbFlLh + eItZG5G0fOIR6W9q5C3lhws3aDZ6aJi631kjhr+hKpx9jpB3Ju6BCoZUXoYdBJHXmLURScsnHpEW + HS2M3bTTeXvJ4fB0Nxpd31yDA/7M71iRemjtgIyTeIsVuv0+YtZGJC2feES6hrzDoYExnKiHlJgn + B7Hbas+r0lh1LhdlYoLorakUT/ihKVVpfMSsjEhiPvGI3NNwtaeHelB/7c9pWPd1PkkCBHlAZZXs + XctEPmF60YsxH9hnvsasjEhiPvGIXIOrqTf7fozmaTdjJAMeBJlGzRL9q/zbLw1T+Fon+PalurqP + mLURScsnHpHlYP2CCHqMm2rKtc+HuVDjzmEEpzny71rE56l84MW3obtb5hx5xqyLSGo+8Yggf54m + uQxCTfMxXsi+13ySP7tN7qrYZu9ahDN5vdHipNRYiIlNwuU1ZmVEEvOJRwTnwfjh6ThHo1tNkYSr + Fpv2aTddoOqYe44Y5DE1qKasYVXqzf6MWRmRxHziEVk5jrBMugmh2MAWHq6Wh664Kgrlh2xdFBsg + 0rS771qwz/6MWRuRtHziEZkWp0cgZnU4W2fH60JHnFpNO26cNrLVZb3+VVC4j5hJKbdGHjErI5KY + Tzwih2kqEzczXBqW0yinKxFTgMwHDt2d/R4hLaBDyVNXbbkv9EJ8xqyLSGo+8Yj46IzFGhTffZnt + X0ZD56BSPtLjuLEWcs8RN6xVpXHWVVBKKXPXesasi0hqPvGIzL3HB8cRBIiw35txHob1ZNQeRBu7 + M7s/olxJNMObZ2zMQJRB5BmzLiKp+XwCkeDpqa07ro1ZKz46CydwFY8w3aVEn6v5gM5I0Nkj2+um + 7wv1R54xKyOSmE88Ig03sf44F+129zoa5asXu7/zITrAjDp310JnpNylWFvwzf6MWRuRtHziEVnN + uW3h6y8cL87OqYIcz8DwQyEl31cXF2+ivoEPRoGuFDf+GbMuIqn5xCNyEN1exouHlQLeFYryZH52 + Wr/zch7Lnl3XChmEC2OvdTUV4qI8Y9ZHJCWfTyBydFToDtdeBXlsraCD7Uet086h0m87d9eaOvMQ + 5n4XwJc62Z8xayOSlk88IsNNONPY20EqwxuEsuwVdC5D210L686uogS9OHwXnqGdKlmZu9YzZl1E + UvOJR6TZFX278TIF1bZ7hyFP+5cq885gzBGfffsNenH2ww41XmWrECKPmJURScwnHpFuuM7VvIIj + eEQTmjQUD3yhQ6n23miV5+5aZke4jwQvMAbUhd7sz5i1EUnLJx4Rc7njcg3Kix6K+7HT2/IOud1W + zYjq8mYjQnNw5aRMg466c6E18ohZGZHEfOIRYb1HEWXo7/EwM0IIkF3PdutAnOinDMfyEeGj7BRB + +nWalELkNWZtRNLyiUek24ZudvTiwTtDmna86XHQqpnwtW4C4/mIrC5ZQX+pHMs0VC6/xqyNSFo+ + 8YhcOlTH4PmxX11H35eIUDMQR7l2zAcs4GxEQtWBVYCZt2LV+A+rrb/GrIxIYj7xiEx4KLSCxonC + wAyEgf2FaRI+4DcrsD5/jVBoJC5Iy5RQVKm71iNmbUTS8olHpFMxYQgTyCe8oU/FjtO/9ajrGX3O + fa4GHRknuxXunDZYW+qF+IxZGZHEfOIRmXYkRuqM97Xyu/CX6+KJ2DIOJUW3FniPBGYQzi+KKVAK + neyPmLURScsnHpETKcvb7XKU2K1aQ2/3uPtlayAyR+xz1Zw8/dVP1namjdKU6iE+Y9ZFJDWfeERs + UBq5ZtIOZV8GnzTpOLTz42OtG5Snc0/2habztAbyUXBXKLNGnjFrI5KWTzwijWcCrUxtw2449k5R + y2i7HqJOST/PW3ZXd5knaoBU4+YB4oUQecSsjEhiPvGI2KiwThBNUSDODyVAPgjtMXhpK7FmO8KY + SWE/Mk3mGv2/MjqNbzErI5KYTzwitxN8PsxTbaysWMDYuwj7by3Zs2nXS8zetejPKdQEN+ty58gj + Zm1E0vKJR8S7jdgSyYcej3E2/WYi9MQ83c591LTKPdk1whbTHhir2JmF3iNvMasikpxPPCK0Nw7V + rMDQ2rRzXYB796L+sovNNq8CiOhWTVq6jqtyiLzGrI1IWj7xiIxklrVbKS4FuxFi8YYSjSSOHhCb + BlburkWfZqEZj/hrqLGQ19tbzLqIpOYTj4g2u1NjGO77XM75xsam6nRuSo+7e3G25gM6t3k3lwR9 + SbKpRe5abzHrIpKaTzwiXKyufmn33pRCw4PkZDq17PcdfKi95HPfI4FxFCopHa1GJLAyiDxj1kUk + NZ94RLozuL0Z1lX4HXZdRAau93iz4NsPGou57xGIDCt6JIlnxIcy/iNvMWsjkpZPPCLKiue0mQ8l + LwCElft0YND1PC8MvvW5ak6+nhIjdryRdsuk1Bp5jVkbkbR84hGZWmobznFubO19hy3GX9pr8yU5 + J2X7ITKn5NGjqjwT4ih1jjxjVkYkMZ94RMgEOUkafjybWxYT9cPzve9onfWUg4ZcLor3ehhT8G7H + EloLnezPmJURScwnHhEPN2xSdm90avQPD9rV12rDWhnyjc2ey8QmTT6ow7MKGDtX4DK71jNmXURS + 84lH5Ob4vS47Q7EFSZ5ko/aVeajZ92O9unaZVRT6NEbeBruWSvJSCJFnzLqIpOYTj0jQzl0HPfbQ + wVBF2TkhUqEbOVaO7mHZJztBQEL+Rnr8E8YyHSsov8asjUhaPvGITCZ1w5zVxCf0IoStCD/RQ7FV + 0/Lb21x2EG8eLg3BoEdTo9R75BmzLiKp+cQjQkLxYN/QnteIutUZeTsJneEF8aIO1oi5u5aLQ2v0 + l+yjtn2hXesZszYiafnEI0IHuznO7oJGf2+4oM0+nwfHN08T0+25LFNrJFQZmSHO5SqNz5jVEUnK + Jx6RIZwaJ/Wrk9kIAf9zZLTr1R78ncdry1U8gwjSERlOx1Wx2u8zZm1E0vKJR4TS8tY7zwdu4Isp + kq3xUJyUuYjT0UrJPdk1RVgrDO6MwaOnDBP7LWZVRJLziUfkvK422IWeLMY4VQY2I03TbTfWE+Rl + cutawZQ9vA6JG6LHl0LkEbMyIon5xCMy8hlRcXKhvJbhuHEfzHu4bIXGu4Jt7nsEIkr9sF3WIMdR + 5IX4FrM2Imn5xCNi6pDbG8l42xWrHstjwWr0TDxbj8Urt4qytMDuZqYAZqcL9RDfYtZFJDWfeERu + Wpm7YTdtdXYjJhb2GQWChVJzHyq2uSocGLoqjN6gWE5joam3t5h1EUnNJx6RjlGoHZ6gKcocGnZL + q4Z+qqP+Ut2asxFhYMXnmPu7YvJaphofTLFeYtZFJDWfeET6g/7cboKdV+1M7ndnA2MXWynJbwin + uTNW2lT660EaRUGz0PT0W8zKiCTmE48IusmOEsRH99hHTNCmNRTKXeEYCdjsd+57xLRbuPv2RAHx + Gsv0EN9iVkYkMZ94RHR0tcERf4/FqNviLnzoLXHhY0U9jNnzI76ek71T/zUcOhS6/T5j1kYkLZ94 + RPoNH2i8Q3+9+RA7O+1cR7ON1DlGVZXMupbuiC4oitPHkFWh2+8zZmVEEvOJR2S3x/cETGfjCnxH + +hPFsT+wGbdwA1vyzxH3gyC1HUiNhebZofwaszYiafnEI0IQ5bja0fLQ1R12cltzOyCcumfNi+nQ + 7DUS7Ky1EWedq6XM1Jsr4WvMyogk5hOPyIWbYGbeiIfV4cF+WSNmYXc8YFShI3cy1NC0U53+XKim + lKprPWNWRiQxn3hEmI2YZ0cTVIRnmdQAAuEUW2vFFuKAnLtGPAvpeiuicGPqyvRHsFVfY9ZFJDWf + eETcTJkgmjs8h12Dig0JYpXpXRM+4S+5PUTEeO11jXtvRL2rMnWtZ8zKiCTmE4/I1N3hOb3hnWjw + LkuLdDoRzDgVPqgK5XasqNTo4C8GesopML/FrItIaj7xiPSdJw/9awStRlsdb2QZUOOP7aJcvtyZ + 7KDZwzAoAjLfc5SUqca/x6yJyPs/O37NxyMSqrxs9+6V0NZ4jHfHb/rcRk2szYDBmDk/EjKYgoWV + WpTzvQgT+z1mbUTS8olH5J491o+WO7v2Bfm5a+t2Ur8rE75B8yrzHLFhBQ8rioCe7V1fxO3iPWZd + RFLziUdEFZ708mRMV12LdyunYxSULjwU/c+VqcJhQJeCcG9ivrX7F0LkLWZVRJLziUcExVdHxJiV + 2ol+FUmZc+sJPJ276Z6ryZyxCoggYRPPNEEy90Xm2d9j1kYkLZ94RDhgYoBe03RM9zQTMfWmMwJ3 + QskzZcjs6ppl5Az74QnjTFqLvNnfY1ZFJDmfeERchDgr6FpdJAFVFhGCgvrvruKI0HNkdqx8PQ7H + IyPr0HtrirxH3mNWRiQxn3hEOL+gG567k92aOM6z30eOiKO+yThh9OS92fm+k8Ji4WqEZKLpEX9b + /Gn78frut9/97vryy798/2Xbf7h+/+X6+R95PrzHrItIaj7xiGABDeha5LR4J+2kFNeFlj/dX9Va + h3v2rgUGXTeFlM5gT6E18oxZGZHEfOIRuU83K0/0+/Bwx5szSDKa62nQ6Dzt7tyTndgNulbQ/A2C + akX8EK2RR8zaiKTlE4+IYfP7Ivfr4tu2zTVfVDkOd+FNNV6bPZNBF76e9johyFahsYy3wnvM2oik + 5ROPSKu8uC+BGXQrwo+Gewgwe7tfB/0PBKHsc2TULNaxGv0j1lK71iNmZUQS84lHZOeAyJl9VUYZ + 3bZOjN/Wm326GVudx5y/RugG0fvlsLhgZRc62R8xKyOSmE88IjT82RyzsRpxS5uxv5qzIfvbjEGx + fOkye4h2GK2XkfWPTWscCr1HnjErI5KYTzwiVAUuHmLHwPbiVH7qVsy5rVkWXlZbv2VW4wMiqC6Y + rE1oUxbpWL3HrI1IWj7xiKDEex0GHUBTIyhCQeZ9U0gZmru71j2zq+vrqQGY5UGk449VhNP4HrMy + Ion5xCNCs08dPujObW7B/Y03cu3rdFFvnC9uY9kn+4d6NW8xisllVMoDIq8xayOSlk88Ik6L+VKY + 3VdlE49Fd2CXYVShs2+pzuZW4008fMidtQgpbnSFTvZHzMqIJOYTjwiBUcYKJ2Oxk4PrReyh+RBA + Cx7Ux9RkMh88RDxD+mAHvrhzFTrZnzHrIpKaTzwiZ3s6eInLnmdzUve5vOhanMZ9Gi4DipnzIwq/ + 9Jepe2AUT/5DmTXyjFkXkdR84hEhRs5vhNnFeR6bkvnFm0eRlly51lU7Z+9aC68L+uRNr0NSZqIH + s+8Rsy4iqfnEI6Kl3q9ns2+7kVB24AyTzv7S37Xzd5pDuSc7RWxsFAQhBcxSa+QZszYiafnEIzJd + Ixdwk1abiqrhtBupcVWHv+du7w3tZiNCvmBqCD4suN6Fdi2q5y8xKyOSmE88Ive0tZfWYXAY6wwh + 6rhTYe51SzoCgUtu7dcUJ8cyak42LnyUIufIW8yqiCTnE4/IZEbhQHgwHNrvF2vPXiHFf9iDJvOa + fbJruiC2IGvR6zIyXwiRR8zKiCTmE4/Ifqr8mnwbme2FTntLt/FwGCM/8KlsM2esHCKq/Gjx3BXw + 7cv0EN9i1kYkLZ94ROa7c7uyXen0cejR2OW2MIc78Dge/Zn7Zl9YIQ6cFSikTuQBC62RR8zKiCTm + E48I5v+14Zqb0g3vdOOHi16uPctGY2A09/YLEQbH5t2INDraSyHyGrM2Imn5xCNixs30IX1k0x19 + N+5N39EO8iLRvCI0kL1GWjMKGBS608ZIyvRHaEq+xqyLSGo+8YiEQZGdBsAenI1HZLogWm72bZtW + vhdtJhP7Y4pyboDbDipbRWq/7zFrIvL+z45f8/GIhOH1cd8unlXH6LU4nizBSTd6ntxEmTO5KEHC + BKsRXYuIwVCmz/4esy4iqfnEI+L8nvkrBA1xJiF9byLU23AbeB2HdmzmOeLrOaO0dHtCzNyt439T + /zZf6z1mZUQS84lHRMUkTLTPyorTegx6V81Mh0NHg9/YMWSeI+yrsL6Cu8Lcdq7YZRB5xqyLSGo+ + 8YgworRjTXScWL1htfJ7u+5r6hlPzYf/lFdFCYjQ6dKR1EGcylQa32PWRiQtn3hEgnrTtHsuoDy0 + HC70EkkAK9SONISWXDdwX29qg7NYa4zEplhojTxiVkYkMZ94RIZmM4NIhmMGynIdx+EvvcWhHs/u + IpP3G4oo5t44IjKap8hRBpFnzLqIpObzCUR4HNNjbE/bvDsq/yrFxo7cgycJJf/MHmJAZBqhrQRg + 3RWpa73HrI1IWj7xiKg8XfPJ4rgFBPXSHSvbeJoHysYBLveF6Oshai0O95Yq0Vpk6u09ZmVEEvOJ + R6Qjj7HBg2g8lySa8UboryH4uGj5GUbMPdkhQpSIqy6691Rq13rErI5IUj7xiARBwKXf96EPZNPh + 8m+21q3L6KA78Jy/a5lSQPudwEE4rtA58ohZG5G0fD6BCFYWvzeFxaAVRJnhPo9uoStuv+nmLbOr + G3aYzl0Ly3RFFCqi5vQeszoiSfnEI0IkaNlRco9tx8N2ETZ8M+2mEb3nNu7H+buWMXbu7FTVpjKV + xoDIa8zaiKTlE48It73xZr239sYVdMNHiv5opsGl51wNUmcj0veeNcirphHL+FhB5BGzMiKJ+cQj + QpQRvwEP2/X0umcqs1enl7jbaOat77IrjVA2e4rOo2w5l9q1HjFrI5KWTzwix6XA0WH+bvMWDF2Z + e150Gtl8cqzUH89eIybexlaXknhGGRUOa+QRszIiifnEI3K2XtO9X/B1G05YL4qy28Kex6tdPfjK + 7LOHrzcQM6X460lSxg38PWZtRNLy+QQitz3l8DOeT/q+lolXIuGg7tztZXebqVTj66mhgMQKCT7t + hW6/j5iVEUnMJx4RK8Ihcl2Gpe+N0aAGOzLEbYxkC+5vmVNvznQCwsFbsWVBMhQ6R54x6yKSmk88 + ImHyP2gCtuu9M4e53LZu4qNbEMRa+fDlniPKJwqN2PE8k0pVUZ4xKyOSmE88IiZFum3j0RPMjlsT + cLxCrBXzN92CiJ1d+w0lh46HlVeiSmaZXesZszYiafnEI9IaZJ+7+2RmpXwytvrIN7V6hC2Wks2V + fftdgsLWELz9hmksdI48Y9ZGJC2feEQ8PtiOmOTBQTnI968GC6bba30NHJLsjlVPO43rtDoAt+NC + zIe3mFURSc4nHpFmRMv1YLCzNONFFdtteDyma+j3czuuXOaDDBwj5hRUzMBSZNd6i1kbkbR84hFB + OjxOhkm04zGl6Wfe0+kvp9ciW+ojc1qBrhZ+C1PYUenSvG4ZRJ4x6yKSmk88IqqLDCmXQ2n2aLlY + +XbeI3wQQGSx5L5HgoJp0GikwmQhFplDDCi/xqyNSFo+8YiwPjywDY+LP899bYqOk2GFW+njZmeV + zQ7y9Yxm27esEz+vUmvkNWZtRNLyiUdEBcXc4d5vmHMGp3mFKgBjx3tBoL5lv0d61Sy3BRNvjS5i + KUQeMSsjkphPPCI9Q0Ray81lil1jHQUx3L4OlXnvk2PN5f3ariwO3rBBu20udY48YlZGJDGfeERC + IX6+XFCVOs75bFUGe54kZIAJlbG+yHyzQ2Sdgxsi4x9S5YV2rUfM2oik5ROPCCWtMziGUsnENp0a + FK2tD0XgQ2mlP7NP9qDgB+1+xCLuy9S16Ey9xqyLSGo+8YisGwlshrprc6pwuWwxhRm6DQeG4+5+ + 5L7Z//z1FuJdZGyLIvLXmH8HRBLyiUdk+lCga4PcvhmrBZNxOya1YA3djRd1LjvoAxFqZ1QlQq2s + zK71jFkfkZR84hFBIzh12LdmCq+Pjdv0tC2GRzrVLXeu3NpvQMT8VhuKAvwuiyHyErM6Ikn5xCNy + nNTJ3bY2Q2m71YiNvxNG4XtBYPyccnuIARHiAnSDDJA0ZfhabzGrI5KUTzwijnLzbiyOWxXHHi1l + RNMd9mlbtWKvvcCuRXkZeYQIM3pQqTXyGrM2Imn5xCOyop2s+8fdCi9e8ZcMHQEhE2pop3eXjUjP + ZUhrlyTgXMgz1OH2iFkXkdR84hEJA27KvAyoGxI13XINrGA6fGyNRLy37Novpqo2pVWC+DAVOtmf + MesikppPPCIuV+H9Nk6eIbc56obLxU46aKIgZJQk+/YbZH5NojjZ3bkKVVGeMesikppPPCKN6ZE7 + aDVeu3LK0rF8a4K77nYxEViW7F0L/yucSOjLwTCrzDnyjFkZkcR84hE5dUW24NmK0thuQVIcLXtT + l3f37dps3m9PxJ9IFDTUM0vVtZ4x6yKSmk88Ivrq3KvaHvn6REoxtntwF9uCp0Tbn7n6WnS16GuP + 2odov8Vqv8+YdRFJzecTiKgo0lEMzVza1Z2nCF2UlV/IytnKJENupZFfGVRCYLLYhTpWz5iVEUnM + Jx4RJd+2PYJKkBbfrYfR8T026eH2ZbWsmcqZ0FBnpMXJek9Tt9Bd6xmzMiKJ+cQjsvN1U45XRAkG + Ry3NGlzGNTxOuFnpLmavEfobWPEYjUH+oczJzhjwJWZlRBLziUdEt9WPt/dcZ7w3cOoxhzi0tyaT + LtO1ZHesBKPAwcTK7PRSqBr/jFkZkcR84hEJOmTDYm3gYZvPdQcer21jgktjwChOLiLUGUdiRF6J + jQZJmUrjW8yqiCTnE4/ITBuePXuz7st+rIMnYtsjNDYzhZQWTTtz1woZIOUpLDtNCqk5vcWsjkhS + PvGIMAAnyHBp5y6d0aUgYM0J5nS2H+vV3rm1X18PBXvCjednXWjq7S1mbUTS8olHhMmIB9ztEEFx + o/CLPKeSp09CWVzXJHuNtERlyRBpw7RNodvv8IxZF5HUfOIRQWfkzNQd4Z417BdrhWE/R6MLHokH + L6XcXUtAM28kPexcxRB5xKyMSGI+8Yics6svxQ3KKMxW4fMhsHVrL2nq+vfZiPQaLlh0M0GiQgw6 + t53XmJURScwnHhGe38epIs9ObODbzfnNa4TgAH+YTuM9l/fr6y1hTlcvPww7FnmPvMWsjUhaPvGI + 3ANAmsMY+3hc00iTYyWkORoiceBTyc5fIwrIZlGQJDG2iiHyErM6Ikn5xCPCEcaX2pkhug0txzHg + o9wjuxDWuiav8k923pT9yhimR3wotUYeMSsjkpjPJxBpecLQxDYIynLaZMHezhYMiQZ09jOb04iB + TcpZi8QF2Fux0Bp5xKyNSFo+8Yi4WtHEHhFKXXmPoTFI3U3oKcFGibxWbscKIgQM6Dkr0LFFLIXI + a8zaiKTlE49I74J177tOuyF2wqYYp4R+14+Zkm3vCuxazExWeo36uk2pc4RTyteYtRFJyyceEc/z + jf1Id41waQecB7Z76EH0f+kFLtlvdlJqM1/xUZ2fXE2pNfIaszoiSfnEI9LeI0lAhQm6WvOJ7KBX + gowSilzqzkP+XUvNjBwgL3CbV7Fz5DVmbUTS8olHRDcXzZfG1jm0DJOa42bSPoynZq9F0+W/Ryhw + IDTO1AbROEqtkdeYtRFJyycekcEgImq8kfbGHOh1orvdzN8upq7t5KWS/R4ZzA1SVNOcZEdaCpHX + mLURScsnHpHJ82MKHmInGig1wLEn1tjjx0/XqdyVq4k96N9jU2ACulAXUil/i1kZkcR84hHZVeJJ + WgbxatqljEdG3ojzsjM69qorUGnkOB2eI8r7lDgKrZFHzMqIJOYTj8ixbidr42Dleiy8p6/tGq5N + X/Ha78klOHfXmnW/jI4sbtnUM8sg8oxZF5HUfOIRUXq/Psb/mwnjlwLEcWgndhT4PdzXO/s9gs8Y + Lr942MM4luGiBAG1l5iVEUnMJx4RVS1CpuzYzIJ2ZhWudjfq0SKa0mambJq9RkKLnZipZn4hp2Oa + Ho+YlRFJzCcekW0IN6uD3zTWd7hneRpueEEM2+fz7nPZQdaGfpj3IZNP+iuFdq1HzMqIJOYTj8hF + u18jZFPfmnYvkZYJOMkzQ4goiE1+n53RhTl2VC0mJIUUBsR7jVkZkcR84hHx2+UAcxykMjsk+XFY + z46Fa7Obg2uWLZfTiHhEQGLQtPIZC00rvMWsi0hqPvGI8GAblZyoIwOfFUkYiOLyppvYTyapcxWY + B9IFiNj4c2E8tIgfolCPmJURScwnHhHmVYxGgurveU3kAPfZpkXUlN5W69KVq/f7MRJK9IHOhyOx + zETPW8zKiCTmE4+IX+6GmzUh+jI2JmFiGnEaz+lUfsTQzt+1vERMcLEGCJ3EMie7PvFLzMqIJOYT + jwi6lh1rPwY6psc2XK5bqsBrYKSwdt1z3S6GoBePeeS9gyJZ6K71jFkbkbR84hHxVrh2snOYpvO1 + 9Nuiztjt63A2HbHZNbs/whrCJYu6AJHZQpoPzGpeY9ZFJDWfeERCv6q/Zno1WI07Q5hxd3KuWEKE + hJot+xxBJbbagiSRvxZiYj9jVkYkMZ94RJo77FUkZjdTumg85M4oPRw+YdDEmrP7I+7TqL+G4/11 + KVRFecasjEhiPp9AZDe17usbcV55VF4TtQdOu2ELQ0VZSiDCoIfdhUvrWqg/ApGXmNURSconHpHW + 6DQUFLHUUsKGFRiDiwcJeSeN8ey6luFfNUyl/mXVtCpz13rGrIxIYj7xiARfHgc44UyCmS5Xl+kC + GZMRYuxK7DKz0ojz27QQsQFigJVB5C1mVUSS8/kEIsd0mt9BFFmnu+nGnR34MQcv16E/zyuXr4VK + E9ztLUPdkbFMx+otZl1EUvOJRwT/+iRCN6LlsubZ+gFF6L70xDkrcOrJfSEaBe5mHhpqD7Qzy3Ss + 3mJWRSQ5n3hEVAD7ESiEHpzk67259n5QTV2AWYnmrhFvdfMjwXzELESh+ZG3mHURSc0nHhHsOQUU + Qvv7RGQLPB5z995g04WCSvbJ7gAJtCAPUI+cQj3Et5h1EUnNJx4Rs2gaI8agBptVE6zB8TeZK5z7 + eDjsc1+IDBX4spOQ0Ikt5fX2FrMyIon5xCNyd9vl/YH8MHhTc7EyvGsIB8t0m6k/ZO9amhkfkNCW + 1aMscvudnjHrIpKaTzwihqQ3kv3uWVQeSPzO5zhipahEMbhnfJF5+w0IO9stO8IohRTP3mLWRSQ1 + n3hE2vZW3+gOBC3/sNN9e2coNmxcL+ifZXd1fT0N9mAIo/pQ6mR/xqyNSFo+8Yi4zt1a6mtztDe1 + Gt3EDdU4kHlYUJ/ZfogQ0bNincGQSQ24zK71jFkbkbR84hExEXpSkkGUdvL2ih3GYDdWY3tz4QZN + ubrxXoZTmNO1bYU6ShlEnjHrIpKaTzwibrtcYOaGSDnls2XCZxxPE8LcFnYc4FwF5ol6F+89ev6c + xQrporzFrIxIYj7xiGwO8YuYKQbPjSaEiMJEKVDU7F9EIHKZD74esbOgHURiaym0az1j1kYkLZ94 + RPYOB3RuUB+Waw9MLe+StVXQovyr9Jjb1Q2KKArxygCB1VhmxuotZmVEEvOJR6Tvd+X4wM+iAtvy + WeDRc5rhXOdDTWXK5TQ6RDw60Yi7YD9Vpof4FrMyIon5xCNy9N1JD3DuuYG7/+qK8LrYOhLA6o12 + mtz3iNVhAKLXY++Ul8uc7M+YdRFJzSceEdaq7WEqYXW3aqByDqHH20yaiR7ySzYiwQMXndwsostc + IUSeMesikppPPCINYQHMHbpnBPdHjxFF3yP0xbeJEXk282HiuGqcx9RbkNgq0x95i1kbkbR84hE5 + eYAr3e1U/FscbA12oxkNq0pVlJYbX+6uBRGk0HDRMtVTbI28xqyOSFI+8YgEYlBnrEoVhWUW8V9E + +WvhmEQFeO6PfESCuIreJHFsmJQ5R8w+vMSsjEhiPvGI8P7Gy1o204jnShxlvlsUuvaaD9KNRJ2y + 10hnvpFklMeISZxSiLzGrI1IWj7xiCyN1pS+q1Odu3G3OuWbdiEz2+jwYmeXQITKnToNKkqhKsoC + 5a8x6yOSkk88IoZ2mL841S8zb90dTBAvrpV6V8dyL012n93XM7Wla+9cKqQdND1jVkckKZ94RC5T + iIym+92GNYOHNC8ZTVfViVPMma1U4zjH+mJAEiYW2lK71iNmZUQS84lHZDu46RKW5W1xN+c20gIw + yL5e/tVh0+UysY3ydF6InPcm9KNSu9YjZmVEEvOJR+TaiF95e8DF/I4Rzg0HhTXlgjN/zHv+CzEc + Tmryq95LqTc7wfOXmLURScsnHhGOuhwUCMmqapnSJa+l0T7z7FGa38xFZZ/sKysNzFU+Vhhhhe5a + j5iVEUnMJx4RO5Q3HMeWe6CMstN6QMcOBj1OeIyU3F3LzQ0PmzQgkE0CF0HkLWZVRJLziUeEssPZ + a+Ye7JKG/Wiupj9NwakBYwTze8tcIxpVLtEAJxQF+UKIPGJWRiQxn08g4p6F7dt3zEMbfq76JDsB + jsBOcSR3uUxsiKAFeVQtyilDmSrKW8zaiKTlE4+IjWk9j/28lm13IWITSnoptNnVuzwl8netoImq + 8oBCx+6i0Bp5xKyMSGI+8YgMF5OejurZpTHiMOFiZffqWVhxD23a3BeieTduZSQBxzW0Egsh8ohZ + GZHEfOIRCV6eo7buPqE50JNd2LRzOF4OH/AcxtxKozEevLzwl9AjKTPP/hazLiKp+cQjguXrpsUj + 6dRC5M3jlO+ue2SywazHeGLuyR7em1z9Bk0NogVl1sgzZmVEEvOJR4RcZj8hx9P4DcKAehgnSfH7 + ogo78A3NXyNOdpqoXuwq8oV2LcItLzErI5KYTzwiZGTpyQZNsom3J8bDxWah8zgMMvK2tOw1gqzK + L0k1fkBiLrRGHjErI5KYzycQuTcNIKICi9N8XUj6k8JGchyv7pz3JpeLEgbyaUCG8VaAFHqPPGNW + RiQxn3hEsEmvZQ+o9OO6aTys69Y3C86bf83ZypkQCe4tQiGkdIXe7M+YtRFJy+cTiFAIuu9l4EDN + capbb//TNiZ2aTTu15SrnBnaIphgM9rR4H+X2bWeMesikppPPCLnfRzXbkJ3O4ZLFa2biTDT4+gV + hc2qZe9agTHpUA/CnB4lhRB5xKyMSGI+8YhQkFXk0OMjcsYLvCMKSHaESOB6oztmawfxNEE9Cux4 + 1KO20K71jFkZkcR84hHZVU0M8lzXtt4k/RvToSQaT1YIA3WtObuuxWTGbKirllHEQiocwbnmJWZt + RNLyiUfEONrWLrs9xbguSiMxQITssd8M2Jqozq6icF/ihbg4SPgiFrr9PmPWRiQtn3hE8Hsn19z7 + IOR/mmU3OdKfH0om1slZ4GTvp2Vy2zKLqLxV6Bx5xKyNSFo+8Yhs9z1phNiizM6zvLh1Ru6r78y+ + zSr02f2R0B3R0VUxZU1a6hx5xKyMSGI+8YjszPesBu5uOrlBzJ+fgm0MDduIzzXm71qBTYzuLT4N + /EJr5BGzNiJp+cQjQsOAD7sqBy8dDREzb1N7HrfSVs8z6cqdQwwSH871fmp7d4VSu9YjZmVEEvOJ + R6Sn8jsem9qThwnpvubY9ZaQgE/euorBuXUtswpM3swgqpIVUnOC8mvM2oik5ROPyHG3yG2bbWrr + jtljhFEPpcuW18VwkQfMR0SLBy2PtKy5xkK7lgvI15i1EUnLJx4Rtsb7fpkRbJFLVbV0lobpUqPX + HLm9HUsgMnlBGIJwmJRD5GvM+oik5BOPyAQO2ijEHnTC7VfMja9g0ePCtd9OlGxE8EyDxhY7cAOJ + hRB5xKyMSGI+8YgY3eE2EpYHZfItePGtih7BvjtIpfTZ/RFEVW2WoE0fmi9lEHnGrItIaj6fQqTv + p1Eft1VJCcLxPZVy7Dm9xW7I1mlEZyTzbvLUq4QodjlEvsasj0hKPvGI7ON6ttvGmvKeSTjRqdmH + mQ0YkYE2zLXn7lo6kea2QineSE8xRF5j1kYkLZ94RHRDtiDxuzXn7VGozNgfm5OEMzFZuiH7zQ6R + 2dbbUmtUXi61Rl5jVkckKZ94RJRQ9htBiyzNRuLhRqjjOm3r4tPDdiH7rjWyBECKNw+s0FhG8Wx9 + xqyLSGo+8YhQ2FeNZxwaXCqZWu1EfpXlD5Woifx+dsfK69A0DzBUB0v1EJ8xayOSlk88ImZ4dlNu + NxlmtKDpUGQ0Ta0QD5Fza7OrKAoo7nKrXhhYCjHonjFrI5KWTzwiw0FGduJpvGwTJZR5sV0dGz0s + 0gOr0cTck33kBm5ukCcWbl6hutYzZmVEEvOJR6SfEX+bs5uIYFHKbAhkH14iuyvx2u9X/q41GRxB + HjFDYiMsc7KTvnmJWRmRxHziETluj3QOx9doFlH56baN3TezNytE0yr7hRjqcv3kxTO7TJfatR4x + ayOSlk88IgOqVtMt3XXdu4kRdpW64ss2z8fUbcQas3etuXVBwKnQip0LVVGg+xKzMiKJ+cQjouB7 + DPNJAaUlnukwH3fT++Z0T7sYtm42IiTKSTVScgrzCoV2rUfMyogk5hOPyG4ilwcJLVliS2E+YTA2 + rd8+nae7157/HlmMYyv4UxkwCV4KkdeYtRFJyycekaExonBM07UQ8+cnRvOXCrOhNx6Vxzjl71rB + vkUpngwO6m8hRB4xKyOSmE88IuMV2usEg+aL1ug9HN1yreTjt5vmAD/c3F0rKAIiuDD+Ue8vdLI/ + Y9ZFJDWfeESICixWBSeK7eg2/Sr2ezc5Ux1ENqJrAUS8cUyfKpnxRSyzRqZgCvDXmLURScsnHpGd + EsoxkARcdq1coorUADozJJx22WBM2bXf2XyVxw53SgyIQu+RZ8y6iKTmE4+IsarVIjkuXVfy5Gop + LltTe4zOknW6st/sMzkPdaggUWZIqMwaecasjEhiPvGIeCgcw0ZbgOfIcIYNjK6Mey+TGH2S/Df7 + Yn00dka1RvtMGUSeMesikppPPCLt4a570mmkLKAnciK6hU4ypimlgXPO3rX0d3DjqRAZIOkKVeOf + MWsjkpZPPCI71mT/wYoPzfVFY3dbTVJzEPVSOcds5sNqllFnd3AD1sAvs0aeMesikppPPCJdsDem + 2Ue4FIuO7oYSl+GRmfkFasqc/WZfOV3wqAwG13NfaI08Y1ZGJDGfeERIL5/qG7dSIOFlfV0TPtvV + 7ss9rsatCiDCApOShE67IatCa4QJ09eYtRFJyyceEZXFzTDtdqxbexlDvDlU9mh7BJ1Gju3ZHatQ + zhobxCDKmYXcLtZnzMqIJOYTj8iOpXXddkdjVvd5eLZT+r1Hf9tli6J87ps96AZ54YzBt6zYOfKI + WRmRxHziETG644FIKJ60Ra8caKKH7sOGp7BraBwFELHcgje77kghf3Yt4teYtRFJyycekW0bN5IC + ZEs4kdBiPvtDS3GgB8uUb8qfZ9dgJ0jVrYZPwVLoHHnErI1IWj7xiBzrHvb5ixRgGGFXAvb5HPA0 + mE1fHbn+7CviHOIDWssQ3u2FEHnErIxIYj7xiKDk9hyrdpVZrt1AcUt1sNhhZuyRI5tBZ4vxIAmW + oYophZgPz5iVEUnMJx6R0ZvQDjUha+2EyRdUUzLMg64V7cb5yGQ+rDwQh5Ezu5FgDZIiU2/vMWsi + 8v7Pjl/z8YicVEbNuXEdQRjZA41u78j3DWoqnQJh5hxiyGAGhmEFRmVlOlbvMWsjkpZPPCIjvWUu + F5xgjLKrAJNHMc+zdpzAz3PZM7koq3qvyUZsAeV4cv7xv6mfth+v73773e+uL7/8y/dftv2H6/df + rp//kejje8y6iKTmE49Iv3OnvFY3XubfTeiQBGb81Z500BoUobz3iK+nXGZx9F3Yt4r02d9jVkYk + MZ94RC76paMHAyo2LdPzni6zJIinfbtROO0yJ0N9PcZP3Bsag1aUOQutkUfMyogk5hOPiA9PH/Pi + PsKMYr0cK4HQqKc4DkHbKbOrGxBRhGcFjopiELEUIq8xayOSlk88IvpJ27kRoBs4VHoo8um5eVsN + NLL3fVgyq/EBkRnptw/asl0ZvtZ7zNqIpOUTj8i5kt04biNVnj6qvSR+9vPuBkXGYd7X/JNdZaah + B7gi/5ZxA4fII2ZlRBLziUfEHPvCZyooa2kqNefRe62vmrxYp3S3sm+/fBAXr3VijZTKi1RRAiKv + MWsjkpZPPCJXj1u6sR5xFbK5cJzmOs0s1N8nlZ3bsVKDb1aaKGZ13R+mIj3E95h1EUnN5xOIKP32 + GxId6uG4sXTVq1L19WzftXvX7De7wq+GsfIM0ZWm0F3rGbMyIon5xCOiF9feiNjOjYsuo1NlbUzg + HMtyMBbLVSn3ew4FLc9cjR5xy9y1njErI5KYTzwiTg2Uk+VswwF/+Gy8QnBTxvlgCOMWnPtC7Bky + Yf06SdSUi7CDFPYfMesikppPPCJr2JyMzHdah+Ep0l56WGxcgkhDi5KdjYhOFZ8LFj2Nf0iZNdI/ + YlZGJDGfeERO1DkUUyocZH+ZJhkZ7PqrXReMiN4MSS4ioZxFEAVdUuxC58gzZl1EUvOJR8Rcbmjv + zccy3WQFLIv1UGik6WTQx6U1HxFhDNB55zRlOI1uIONrzNqIpOUTj8hK5WFf5tByvQZbmDq8Yz7I + DKBiG2DIRwQUyC7UamjglNm1CJ69xKyNSFo+8Yj49Qb/yCvMj3g6nGdrRpf2w0frfb4KIIJJMSC2 + 4OaVmbEKa+Q1Zm1E0vKJR4T3i3H2aWhwtoZtV9Y6iAG2U3tzWWAtmb1GCN4A19CQq0Kpc+QRszIi + ifnEI8JZbLn54rX0tLbWrK7dqyU4e7cb9sOe2Wc38Ibf5DFCI3Uey6gCvsesi0hqPvGIaBkSXD6o + N23e6yvy4X2O90HOSZnr6LJ3LWMK6MMz571QcSxzjjxj1kUkNZ94RMaOcWt4fUw3mTOOPQQ5OgJ+ + jNpJMOV3rJbO+UHzLHjvrYX6I8+YlRFJzCcekWPxGtEX24ej2c0eUm28DflRjN+8sXOdxUJFRo3M + 9ZrEtkVXaI08YtZGJC2feERmTN/uQKUiLWscsZ/UU2jLIAx5Mvq/5Z7sLgckHywQ/ftSL8RnzLqI + pOYTjwhvNzzsebrC/M614WMjzhHmMFbCGrzL7uoa/8bDDoUaA3yF6lrPmLURScsnHhHOe8Fl9ZhU + 5BFMvRRv04dkncI4iZGP7DUC1sHm9dFpL1TX0o98iVkZkcR84hGho9i0QfPBZWs/PdqNtl/NvI/E + z4ILXz4iFmDQwoFvGUeYlfjXa8zaiKTlE4/IdlsROxcYKo0seZCDiDAHLWa1o+aeCyCi8KvEH0ry + faG71rq8xqyNSFo+8Yic1z7QRh4PLJThnK6JukAYJmlvbnxzro+V3zOFcs9cdm/e7EXmEN9jVkYk + MZ94RO7QUV+2QBTkDT3arqhsnQvRuE45eM+t/baWhz0/UPSoYpWpNL7FrIpIcj7xiFyqWcM23z3X + UM4KwXbPlHMw6GEQToU08xyRAVnskdsloulQ5q71FrM2Imn5xCNy20zuYaQaT9xn93InpRnuwe1m + CMdulo0I6WUE09EyMaBS5IWo+vYaszIiifnEI4K8SDSIKOB5jC2yQ8eHxFQoM0QHPZvdfESwUKgM + IBI7qEoh8hqzNiJp+cQjsg5bd3f72Wxecc11BBkTPXfXo7W59y33PRKUlzulIEUBtqGlzpFHzMqI + JOYTj0hnn1qWq9+nW1PkZM/TU6xxkCyuXH02pxEi5JfZkU6kyst4T7P9fcSsjUhaPp9AZNfD3VtM + IAqwSo53rxqvfG7b5y825TLo6HXhPLhIG9jSbS+0az1iVkYkMZ94RGBgYER77+SrMBy70QUMlJN2 + /BZUsvNPdhL+wVdcbQvlqBgiLzGrI5KUTzwiG+4i20MnetOTKm/XY+kIPijOX2c7nAXWCKJkoGIz + +Sv0ZrfuXmPWRiQtn3hEDJkPExad+REiHIBpkRzxIFB1DVHnKtXY8wMhT5/Fq11VoNQaeY1ZG5G0 + fOIRMTlic3JBHcdjdvldtBGJ1IwcewyLlti1Qteqb1BcmB2XQuQ1Zm1E0vKJR0S/akY2RMM+seOt + Cub2aArj4ZEY+HvZ75FhRZ4LcgWub2Wq8dbda8zqiCTl8wlEDP4P+PF7S3rZQPveMK9aALP1J2Wn + /PcI2Ue0SbAvJF4KrZFHzMqIJOYTjwjXaar43oM0sSn3O0qG3m14v3b+VvTWstfIHKyt1w71ASiF + EHnErIxIYj7xiLTMKblWzdTO2oPxXhjlxNMazVeNy7ZkKtU42Z0eXuvBENwJXwqR15i1EUnLJx4R + KjWuWg6PnQ/4dimn6LnzySMHcJEVyp1n17PHZuzR6OhDFeKivMWsjEhiPvGIOL2b03SCfavdOnM8 + 49FMLBaanY4QofLsXYv8fKAfeSiiYhdaI4+YtRFJyyceEaSTcJQHWrzJ80FzxNN6OcaDDjBRuvz+ + yEIxnhUYFWYSBqUQeY1ZG5G0fOIR6ZdrJL8RJMkd8pimeFoEG1sIOfK73D57a0RBDQV7DqNxKnXX + esSsjEhiPvGITDeG9Onuuw/bee7U6PqeQcx9eh7uuu7Zu5a3urHBMArMXaHQGnnErI1IWj7xiCDG + X6NuYRg/vDdSSfdxk34gI0Sv5sh133PFwi4d6J4Hq+NSL8RnzLqIpOYTj4gPb0l0ZADpxGH8ctzb + lnm9nPdBgym70igDLhf0HhT4yygwf6D8ErM6Ikn5xCOyEwUM0+zDcRCf446oQLEOlOQDy3HOr2s5 + SDxGdEJ5JpXxeguIvMasjUhaPvGIGN/BzTqp1LAkNi19n0EwWR1q8xICTO45ElgoNFGU+UdaYWXO + kWfMyogk5hOPyEEqiJfCaFDwunpE04ANN6t73cgwX7lKNS3anGKMexzps7FQNf4ZszIiifnEI7Ic + nh18R66gG8ScndcFCVOK5d3RBy529hqh0MglYAla/kuhrm5QffwaszYiafnEI7JeW68sezS8FfR1 + idDxQaTbqLy1H10+O8gWSD9oIXgZHtqFdq1HzNqIpOUTj8h4HvPG07hTSTnHbV2pPzRH+Jv7FfwQ + 8tcIYpBN0fhDtxSqNNL1eIlZG5G0fOIR2U+6KCztz9kwLbc3hUbz55qHQFrRTrMRwd0gHtR5tJdS + PHO2vsasjEhiPvGItMOtQeWGtdD9vRfrRYPXAtnWdt90x7MRIfDslTO6PPhXoV3rEbMyIon5xCNC + xn/liLiYEKVDp6AyrYaop0CCIFVe4IVo7XUmET1AGd0XQ+QlZnVEkvKJR4TFHp7DjuTbe7jPZql5 + LOCjsLrwYOxyNR/cpY2JedQpyJNcKYTII2ZlRBLziUeEhysdSiTG+UZ76K+J0sBAKNvzwfTVks3E + tgeGGVNvkkHVtAwiz5h1EUnNJx6RgyPMcCi+Typa3bjyDm038gLUAte9n7NfiBNRwJG2TBfGTAv1 + R54x6yKSmk88Is71nWVoG+qKBGrcuTgheCOS5uBE0mT3R9jpcDbBahypDBRD5DVmbUTS8vkEIsy6 + nR1Nd507YrwqI4OF9TJp1ZgnabLZQe5YIA4ePQgQhfojz5iVEUnMJx6RfsXyRWBnPWMC1rxCTxnF + lILNa9LszV8julUojZ1nztiVWiOPmJURScwnHpGBUysJ2LYNvgoHpXKOIWGudkbYuhVWct8j+ocm + Qyn0kpIuxcR+xqyOSFI+n0CkQwvq72W55yEwr9sgR7eRjdct6dgi5iMSmAIE6CjcTYVuvxO2/deY + tRFJyycekXZauntsZhBweEN7aOgueS5S3AqSP/lrBCvT3do/Icxzlrn9mqF7iVkZkcR84hExmMtE + DA8eg5HMVrAf6Ya924whjtq9+WvEc8Q4cDBeLeUsprTwGrM2Imn5xCOiUBK88W6zfe21bzhB+kub + fkZDl2O8st/spi1MtHuK6I8UcrvgCPAaszIiifnEI3Iex3bQ+L13Ck4X11DK8d1NovHU5m23/DVC + xJT7XuA1FuP9UqF/iVkbkbR84hHhAG5yZ8H5DWMR/Xod1AZu2orkG/XZs1U4qOM2YeiUxKAduNA5 + 8ohZGZHEfOIRMTq9kBfoScls490e+7zSDjoashzHBZLsu9bMbUZZi4mV6m8pRF5j1kYkLZ94RFpl + DvVZh8cUzKdXb0VOecG/m+eYinA2IlQz+Y+oZpKPL3X7fcSsjEhiPvGI9FsoZaluhXFdHj0KjIQB + MSA4jh3uNLmI6OiG1+G6AL2QAjOprteYdRFJzScekV1142YVappnmC6imeO6s6E5N5gwHMtHBOWe + Tewyc98rpMDslfQaszIiifnEI3If43kPG8mgwM66N3vXdmCdao1fm6Mkd41gz/WrugyrmX4oo3hG + x/A1Zl1EUvOJR2RBgT9N59rqZ4LYG7e/YQ8SjWGuZM3X19I69H4wP03LZCnEaXzGrIxIYj7xiPRn + w5zdZWhYGMAoZV2nteKyaiJgtHdlrxEP9vAWCW+SUnet5RGzMiKJ+cQjMpztjHStyrGcDvKNwa7J + UId9z/NN66QAIkMwbvDULfYegchLzOqIJOUTjwjNUkPTvQfJ0txBes6c1dGSzGz1Fqcje42s3cAI + 0fi0h89c6Bx5xqyLSGo+8YiQpsHyvWz1c3OMNAiHTg+WpdWsrEXbP3eNcHij8k4LPZiCF2I+PGPW + RiQtn3hEDLphaM1bmKZdpiucJryszoW8k9moPvuuRYyVSi3xTP8zFDrZnzErI5KYTzwiWumhrbsc + RqtUt0zWrrsZzvtap+AWla35EBxNgm0Sx4u1lHbQM2ZtRNLyiUckyPW3Ch1Da/NS41ruoSFjSitl + bg/2ldm7FqLW1KMH0TMtNa2g1PMSszIiifnEI0KVfLxVOII9+xTI2H17nHNLR9OjUau9ACKc5NyA + eU+X0qCDyEvM6ogk5ROPCIbDfeycdMO0SMvjbezd7oLXLgkOJ0A2IsZ0yWJzhOnRf8vUfoOS/9eY + lRFJzCceEUO5eHLEyTtuunrrDaXZifycYY/mXsfc94iLm6d6p/QbfO7L3H7fYlZFJDmfeESCslYT + +iLGRUgIKZxQje/6vT/D9Midu0Z4YlGOxw5CLcY8KrJG3mLWRSQ1n3hEutnvt1NhnKZtIxc/buPE + zarlz44amt1n9/WIb7CDbZTNxjKcxreYtRFJyyceEZdS+lrB4JhFzwCaoAy4XXyJl+He99z3CF4F + YVnyNxokphYKrZFHzMqIJOYTj4hqU3BLgoN649XZ7WcCAyx7FFZQXHO5KJywPNmVofxPKQ26t5iV + EUnMJx4R1SsdqtUI9RQkfv2HDZGn22e2iNu05qpwdLTUMLBBTsx0KsN8eItZF5HUfOIRudyub4Ja + +7KTGhhWphf3uZ1M+HrjtXvum93X40s8rDje1FbK9NnfYtZGJC2feEQWlUUXXpvLvR/jhhVkNhn/ + 92gWNu1N7kRPh7M6OEhcuOhil1ojj5iVEUnMJx4RUIztHoRqBpbTK358KMmHWXYUoSF7MrSbGE+r + DIaOBpJ8mZP9GbMuIqn5xCNCPmZtwLGZP2QEfm/rZX5kClWVCekt+z0yMWlolMvCRHshxTNEiteY + lRFJzCceEVvJfk+2rkPdZDkavlZsdc99XsagnZmrHeTroaIoyHt5DqXeI8+YtRFJyyceEVNVs64I + V3Zt9X2iSz4SB6RQrqW77tkqHIZQaEAG9eWG9WUZXZS3mJURScwnHpGF0cjlHsQK8WJHfHsecnHl + PbKdmCn575Fgreh2Ys8KSuWFzpFHzOqIJOUTj8ge5kHXEcuB4/HABFwFeDRidRi1Gtcz+66F/4Ub + 1DQuEKVqv9bIa8zaiKTlE4/Iqgzf8/3uKPdjo7Qqv94mXvChjWWIN7MaH0bnQu+eh6u2brFd6zVm + bUTS8olHhL1U6E+NhExX1yzPRUoDKHU7rYFmzeaiONUVyLjqztysCk1Pv8WsjEhiPvGIDCjXyzH3 + l6YhC8lmpy2wHpPXCQKXX3X2GvFmVx/ryGuhCBU6Rx4xKyOSmE88IgQezqbfeOi2yx1QINFo2uM+ + jg+/9lw1p+CEOHC6WJRkXIILIfKIWRmRxHziEdlwUWiDXt18n+tGm7EjwcyRj3wQdZk7+xyZ2WcE + ZUD8EbTGMog8Y9ZFJDWfeETY8piS5l81HGSNuVT2xLaUoExUj2SAs3et8DDUsSLAXO72+4xZG5G0 + fOIR4SkWmB288bRHTrM8h667/mFHGsX4bq4jjBl2xTESK/gU7KcLrZFHzMqIJOYTj4iHwsmuarg7 + ZuDmpuntk2C+XL0GpMZsvV+vQrYNrr8ruYKmFCKPmJURScwnHhGDI/SWcbT4jrRHsJ3eeYiuXHbZ + i5ktyb1rBc2g0KwiZ1dKhSO4lL3ErI1IWj7xiNBumnaeRsF+D7t0RofXGzlvTkdMK9vsPrvzPGiP + EsZmQ1LqZH/ErIxIYj7xiHTK8KEGf06KWN7rwcN1N+2BLEQ6c8i/a01h9nfRIwkjKYXOkUfM2oik + 5ROPyKrAQZemUeadmSW1GoekhDyyR97TbkjZuxaT2J5h0odWTak18ohZGZHEfOIR2WbGe6G01YzN + QVdraK9wqHAExzjFRsxGhD4UowvnIRpFqZP9EbMyIon5xCPStFezdUGRd53vifEeDh2agp77Qcuk + zV8ja2vGirKMK/BQqNI4P2JWRyQpn3hEDKWty844CWGO7lnn29Hzv4me7W2QGshdIzasYJ9M8DdM + tZc5R54x6yKSms8nENl7wv2h4mv+DXPEKOo5zV4nzcD4Ir/Pjhm0oJPbD9exKYXII2ZlRBLziUdk + vIOFVZh98x5h9WZWfOWs0Oz0z1CCs7nxi2qAGgrqw0iMs9AaecSsjEhiPvGIUOHVvDW6ebRnszWb + LZ92JqEtniG6vfmIWB0OdYwXLfxC1XjElpeYlRFJzCceEfWsA+P3xvimjB0aubOu7rYN7BbsYtld + Xb7JH2wt/rrWXKE18ohZG5G0fOIRQQBCDeJ37LWA2jithhGnwY+azNN0ntm3X7RulUYHicmeUog8 + Y9ZFJDWfeETGNtQZBxJCKlnDtff3qYnoaaJHQhi7ACK2RXAoavFSLrNGzMe/xKyNSFo+8YhwrLo5 + TbttKaOgn3zMvJ3DtPGkpm+a65pkVGFxMg1K8tj3hXatZ8zaiKTlE4/Iriqg7EsZsD8OfUM6D5vh + ketu+y3oK+a+R9YRA1E9fmRJOhZ6sz9jVkYkMZ94RPRwGdndByt2kvH6YysexEwR26SCSYNc5Uxe + WB6fThIkPTZZhXatR8zKiCTmE4+IAVBWofrqCDzq71xIugEXQvcw8CHO/F1LndYpQsFgRtgqhchr + zNqIpOUTj4gJqDAzfTfNNp4LxbMTbctBcswX1d8jV++3AwZGI5a9YllX6M3+jFkZkcR84hEJfkmE + nDQMlbP4KSzU567N9Mjgt+DClXmOePhTPVfZ6tVnCtW13mJWRSQ5n3hE7kOJsT3me+xM8VxEOG6T + l8NMrHybtiP39ovsi7LVYjR6jhRyu3iLWRWR5Hw+gYiuxXK0GxCupdHY3YfpCCrvhLIPfIjMNSKD + UFxW+B0mB1aRc+QtZm1E0vKJRyQoze1nsNPl0c7Wcws+Eue1+t8tl8TcaryvF1gPVl3oVRZD5DVm + bUTS8olHpCUx2m/aVSd3N0Qh8yR839izXc4RE+3Za4RkJpFUcPRkJQqtkUfMyogk5hOPCCnTedn7 + LjghjmbYcX1VtlZvh7uhlpJ7+w1GBKY9egLoSr9lar9vMasjkpRPPCJ7s90nUjzzb3cuIv63IThv + 9Wm4SWkOuR49ZGqNKawKKJNphTJM7LeYdRFJzScekRsPiPr1xppnu+lhm4a6eu66bHW13bPf7L4e + eYHAB6OlthRaI8+YtRFJyyceEV3XMAp63rx1l3bZ7sPp4YGt0kVwYsplPpgdaQyofKioObDKnCPP + mJURScwnHhE/Yv3c6TKUFqZBlVXH8Z4wTZ0iZFKyb7+9WxZLHaorrlxlqihQfo1ZG5G0fOIRIciI + 8hDUkb0JP54hnov9ei8GqBe87Ny7FpfjIczrTkSdCvmPQOQ1Zm1E0vKJR2TXyT+UTuyOXojTRFLr + WiaXr/A22bIVz2hFGVNA1poQ5NtS58gjZmVEEvOJR8T0+oEORHPjwhJaWYz143gpcxGi27o5txpP + TYL2nMqvdeKBU+QceYtZFZHkfOIROfqTOsbicCf0y4jkIOA/kzVFGjJi1WUjEgyTg/OTLYs0YBlE + njHrIpKaTzwia0cT8MKL949i7nbosdP119Y9ze72Te78iAG14HJsqoe3TaE18hazKiLJ+cQjcq9c + R9agV0NiYBuai6YsP/WpIyKkZZKrVBMyMDXtchQuv2WqKG8xqyOSlE88IieZTPUsk6EEH9QcOSaR + BqS9RLex0YTPvGvNQb0a4khv7g1l3iNvMSsjkphPPCK3sYRlv09DN5tXIQoPUwq+Curn3cVvNxsR + L/YVy75xmS70Zv+QBPsaszIiifnEI2LUkDC54t3B2Zg3YiiooLKv+xDejU3u1BvZZXcsXBcjW7qJ + RU72t5h1EUnNJx6R/diugQ879eWRME2/UwjExA6SW/N6dtnnSKvL7k1CXAtLstCu9YxZGZHEfOIR + uS6dqQ0Ok/4IgwuTPKcpHx6Q5nYNYOXuWugOuHN+WRzfCr3Z52fM2oik5ROPiHY6X1JNK4TGwWIh + LktZFlGom6bjWnPfIwQGtNebQKTwDC20Rp4x6yKSmk88IkGy7zaUu7XU526vRZrxBP0vlnzDaron + d40EgS3NFowKU1yFbr/PmJURScwnHpFwNaXP2KGYbuN66y5RB5y6M/ge9/eVOz8S5Pu17ynQBT/l + Qog8Y9ZFJDWfeETMrzP+dnh0Nwb7vq90ttxUdauIoxhwz10jPdpvO3aqWt4kZXqILBtfY1ZGJDGf + eEQMB8LgJkyzKr5T2TLITiTb5m8Uzq01FxHzvmZDNa0MnXaFEHnGrItIaj7xiFze653q4nWFw4T/ + 98xPd2L4Ni09F+pc5sMsg2DgqpCCHF/oZH/GrI5IUj7xiKyc3ZCCcB16pIf5PDzdDy5Knu8m2ufs + Ksq8qsigYrsBs54p80J8xqyLSGo+8YjQnjvWZr+9S9AeXLLMKSgUmPQjEsh0LHfXQlzmdOzNbnsc + C+1az5h1EUnNJx6RXX2xGXa8OV5T3dj3brwK8VTKOSTS3spGJBzqhrVWrupjmf4IN5PXmJURScwn + HpG2J+/rkyHlEhzdsUw94G/TVvsWOrLZd62VlKkd0ZaFU1GG+WDM/jVmZUQS84lHpJlU4pcb7TeQ + Tbvz0Dck+eD6tU+rAyV7jRh4m9zcQqd9KLVrPWJWRiQxn08gomw3LHtzmp0++mGbTn4wzhavdgYk + Y+5dKxQY1XyRnJSXpzK71lvMqogk5xOPiHITp2MERgf7eK+XI2RcN89rur+2mtxdKxhTjqi/k4Ij + 5Ivctd5iVkYkMZ94RILuFYF4m8pwLP11wqef+FlhpmwrldnMXUvj0PhhqNaSMx3LcFHeYlZGJDGf + eETYTKMlIJUaa9e9unYTcOjygyIwjkqXe/sVjXOcEtk6ERgo43bxFrMyIon5xCPCYPo23EbkYQvj + IuTj9/ZEDPrQsT7PXJVyXWJ1Rq0R0rIqNIV2rUfMyogk5hOPCMVl1ISOBqCz97wmzDlblsc6M3Ci + gNm7FmT7hn9ycJ4qdY48Y9ZFJDWfeERwtayNZjv0D127glnoTDB+o5FCEzvbDTxoxuMHNfSDgp9r + mTXyjFkdkaR84hGZbpJN24SIHSzZ1m0Pox6MFUYigbpMuQw6iBh2C2Mwa+seXAqR15i1EUnLJx4R + S8N0M45Qe9InP/qeWs00b3NH3HTdttw5xKXVlSTBQc+jG9oytd+3mJURScwnHhFvc8eGQSuebwZ0 + 95NFj8EF+v79vPuUubdfpxPzka4JIluFqvHLM2ZlRBLziUdkvmwnRnUVFjfSpbwvMB92zwcvk3bs + c9lBvh7eA1E1/gCEAQvtWo+YtRFJyycekX2gx8DB4ZrJNPbbPl7j4rJqKzNrNWR3dSGyLuYQKdxR + ly3TZ3+LWRuRtHziEUHO8lJX51X1xdHd2RJfvKgX6gKdMbVcTuOi1k8QhZOV5u5a6IX4jFkZkcR8 + 4hEhDt841MPs+uGxaL9qp4Om6d7c6h5H/q41jc0qvA1QUbnQrvWIWRuRtHziEWHNbkxhpsZ7EpUx + No3JY4yEFxiao8pj9sm+BN3gZVXcoulUCJFHzMqIJOYTjwh5ZKKANABcTpHjl2bb0FAG2v6O+fnK + XiOUeYfV/xsIYxdi0GG1vMasi0hqPvGIXMMcrN0QE2bKgEZ2Q5P3atyE94G/bq6+VlCMV/PVeNPp + mQud7M+YdRFJzSceETKjR7v3y64EvxnlMaUwO0eCuRURiDF3nh0iw4CuavP1Siy0az1j1kYkLZ94 + REjJrve8ryN/hWFTgz/WXT+RNxDJxuXKrqKQc8amMB/EkKmQi2uQiH6JWRmRxHziEeFKaYjWLDul + TPbQFCD2S9eKvMwxsnvL37WMQxl3Y2AWiL9lTnYPnJeYlRFJzCcekf4OznuHwslFCsBY+7DeHdX4 + QH0I1ie5dy2fTqNl1kKk6FyorvWMWReR1HziEQnyTUHEfz83/2qVzFFG3Ian82gNXGWvkdWtF3NV + g5hXQKGu7jNmZUQS84lHZF+me9y83I8boaofkE2XFq3xJJ2x037IWiMen0ynGe/pWDlLiigMfBOz + IiLf/LPjd+F4RLgh9himO+5cc5Puc1k9t4Zju6ESbcW8arwMDH9bdg51AlFFFM++iVkXkdR84hEZ + jqHfDI9wCPHdCJ8N5qib28ju3DG7yHuzIz1YHeZHuIr1LR2c+N/UT9uP13e//e5315df/uX7L9v+ + w/X7L9fP/2gG+z1mVUSS84lHhIQsQXJWF4EUxAtcW4TsA2NwpUfSjXnvkfD1tHMZ1tFpGMqc7N/E + rIxIYj7xiFx+un1PyBQ9fkVptFrOaTLutg2zF2PuroV61A8YW54jKMyl1sgjZmVEEvOJR2S1z28n + YctgcXFce7OT49CvGo/mJuef10MMv2cSNYGw5ULn1Vlo13rErIxIYj7xiBDawKNS9L/wGT0NuR4P + p1nRs2+MwmX2Rz4QCfQ8zHiF5a4E7/ebmLURScsnHhGGoRdZ8v32Fh3ZT/OiNlbbqwDbycYuj0Hn + 67n4BjEnElG8TUp0rL6JWReR1HziEeE5vZk4DKoCOlcqjmc3Ln2vXxIU+O+8aYXw9egLOAyZxDJh + KvFC/CZmZUQS84lHZJlVGMNOTxoFxZQYlk7JROFJWYtuXN4cYvh6vN4oy4ZbYxl/9m9iVkYkMZ94 + RA7MnfVcLsMjBAFRTp1cR9u4aVHzH+7s94h2i93Pqd6HMe0yJ/szZmVEEvOJR8Qx3s/90TfuuoEu + HdT8g0qZFiJstlxEjDWS8WefzP2yXcuc7G8xqyKSnE88ImQyD2b2ZtqXDfHBdkVvyzTPec23Nnve + tIJVgeVttsrcGz2nIlNv38SsjEhiPvGIYJPS2uBqFEYKlpMaB2+xtfX3TIcqoWdWGlXKCA72jcJv + R6msyK71FrMuIqn5xCPi3ttem4k3XfWBhumG9WsEPRz1NrBMDTq/Z1zJoF5rLtSFq9Cu9YxZGZHE + fD6BCA3Z62bSbnIksA6Jl9zcKZdtvvr5nHPPkWmxQgYGM2YRy2gHAfcZsyoiyfnEI9KGKWm136tr + eIXO5GVVU1bVLtqNClJ5CgNk6AnLKgi4OGAgFVEY+CZmVUSS8/kEIkzYjbR7ioDj5rRqvKofT6tk + uNrhzF0jQVSWvAAZesoPRfhaAZFHzMqIJObzCUQUfdmx9d107xRMg2mSS/B03mqD85U5h/jx9VQA + OF1MhhWKqMt+E7M2Imn5xCNCsw/j2ogbQhVb8Hm9151sxkb1gY3S9Nna7z9f/2P78Y8/XP90/OFH + n+ql7/Sb0HHK+HpffvFf/n+vn7+/v7/O7357bz98uf7hu+vH7fsfdLZ+0dl6+2fHfwNjAE1HNp9w + kscAt289u50k2IwudaznZ3md//PP8a/f//GPf3h+iI+/9ZuPv1b5FK9/hPjvoTKABr5RrL/1fT2J + 6d0aIr0Ofti6K5/9TXz/03/9E+z/+dx+2c4//Nd//W/Pb/L7P/o/XOdv/nPoUf7Dd798/8sPepU/ + /emHH/7hu++//Jft/PH7n/6K989/+Pp/3I7j+vLlv/z5b333v/59/Ft/jvgPM3qz71d/UjnTGlGP + otPbnL27V8Of+tNyHD9d//3+/qfth/B1/nX76fzh6v7G+vm/ti+/hG/0l+/1n66fr//w//3f/76f + 7D9f//0/hj9hWLv/6Vf/hPEf78Q2bwjw3ypGrkXdZjSja/3E9u7ejH988lXn433uu/27/rZ8qBLf + iFTx7pK36VbPNCK1R43wbfemUEjhePpsl+HxjfqI31a9b/T+p4n/HbEF4C5m7ZuLuLalu+h9aCFv + ByOtgyD63/od/eXU+PNi+8ux8fhIY8RHapt6n+n9zxP/mSw1MmdhbDoQgoKX2MoZXPdwbWljf7qs + 9fhKf+Mj/WV3+p+bVb0vlXz6D6j9a38HPeQj2K7xosAMOU3zLe6l/We7Fv8criL/FP4i95f7T/eb + 3/2f/8/vMr7Hz+HK8LcvQK//6PhfSvCk2cxx3/znNIRNVEw+SKfr1ZpcvT5LtfzTx3ke9mb8Ij1N + nkFED5TyVp+0+fWfTviD/+bP/8XfhP9mxmf62/eAwID68080bNcRf8D4z+j3Y0iagYlhoA/aRKhq + TYMf0sDm5Pzs7+hXPqO3gHktFOK/+2cMeH3qDxj/GbVybyM3Z9eyThrw4g0CtNpXbgvY2Z+mmb7/ + KXkca4wwtFZqxKD71edKtV/jr33Gv/kHjP+MuzKUnobq4MKElaPLHDRh+9Fsp0r6lvlrbAePJNxf + PY11ShCO+3t+4eQ/e/zHd3Rs3gqtzXPrDnW7pdsUn5pBP1ZfKXNHRQBl8Gf+iYVkimPL3/Xjp/7Z + 4z/+5idOZ2xpcQ5G/SPzZ5eO0mFI/3YpKvHxR6clFhC+/6ebR3/3j5/yZ4//+Cu3u8OTmBc6+5X5 + 5OVFJXG+jdKouLaflbV6270JhyAauJqYRsLz/N/s4yf+2eM/Pl2L4JjJyHninkk0iScRVYpQEV12 + Hbu/9TL65ec/Kac9HkbffPzAXAimK4uG9udHL/++v/zEP3v8x1fX0A/V7SEg5hVP/sVlTbWMkYRB + 2OWzwqDffvx1JEJpMLwngvW/2S+fTk3Knz3+4yNZbDP1EO+M855aUofE6i/90f+funvptW65zsP8 + Vwy2hWDeL245vTSCtOK2MWvWnIgASTZEOsjPzzM2RSk6+yNRu8rOOkXKh+Sx9HGsd9WqyxjvJRs7 + GmI2rnwtULt9KNAM835Oc/royq+tvRz8d5T+S9uYabzR+LVFT7SdZRKPtqzX2LryI2LkZBCDkW5n + 62vl19ZeDv6TwvfrGUcXTfTLG2Xp4ACml0Gs914/7ar+dtuJvJUQG5FJjr3ddmprLwf/vLGURm3t + RQs7z0YkSJYnEplOG+fhn7Zrv4GP6epDHBjIch87W/mVtZeDL5AJ/3vLrjZYSPPtYc+k3lh/kucr + jrx1zw/mtwCaHQe8ImDus3t+Ze3l4DPqYt81XY/o91OXZn+0StOFajzKhF9a7/nbpHO97HGPreGD + fRr8qtrLwR9sNolXhCntpVv7SJqJeF6eKEZpnM+bV75EXQS2Ed++t96OJJGq2svBZ9i8Pe+D+ng5 + YnUpfRl4Ay9fWo3uq/m2gxvjHxT2FPG93XYqay8Hnz4qfISMa+Rg8G0IO2WBr9yGhFbzRWtd+XCP + 1J2whjh7A7+y9nLwszkGHt0zrklyz3Kv0iqXiVf2LihxnlsbawZEEVY5EMFp3nV226msvRx8/vCT + 5L2Vhax1b48YmKAMrJ2oqU/nb+vKR4QPa/Lxy8O/M/Aray8Hf9A9Oumi9yTocD7jiulWHral5yuR + 56cWWt/u+TwdwvcvbEsrkqo+e9uprL0c/JFbL3O/67iH53HZ4SwX5FHd94uT6N06ySLMmSOUCpN3 + G3rbdiprLwf/vsKdl13D7aKplXZb9vO9MLZ2DrsDNW87iwGiPum+rntnwxQxOlW1l4NvM75e06z7 + RQO7s4lKQjvkY40AwG7/p/z179uOdX8SbvIuOTprKfMarKq9HHwO4ZMswoyPIIB4ibDhXY+fs9LC + gvfHIrTv4O/euDQ2Hrm9DVOMnatqLwd/n1mQSHtOx5HmiZ3okO7H5f99Hyr/qbW3gxihk8/1ZDMg + /rnZwkcP3Nray8E/mFDifg+ayuursfPkeeIKE7Pc/aYNaNzziTTIcoTSYzONP3en/DD4dbWXg0+R + apI1UcMQymxMc5AT792Sv/Hxr2bqiAsD/xIeJgJWehug19ZeDv44MmW97PrYZvKa5WziTK08LR79 + BdfD1pUfG76wW8eHJkNfjyx2QVW1l4PvKauZv9jfV/64WW+T+iRs7tJttHX81KnttwfuvhhMejQQ + A0oF7gz8ytrLwZ8SXumVJnmmz3tfjDj5RW80QMdDoze0thdsZaLPORJTm3W351fWXg7+Jr/pa3ro + phmNZD21mCHuaGuP4N/2lS+XPHIkZv/obIxIkVhVezn49+JeM5oe2uf52OgDjAYq7oYjsuBxtg7Q + eXBzdznlywUDprdtp672cvC54dDxHC+DevhLBXcy3tfkP+RInWnddoKhKbvBC8sZ3lk/v7b2cvC9 + PF0y2Q/Zd8bj5ZZ58nS+T8l+eDtr6xhRsgxfCqbnEyJcZyu/tvZy8NnLTWyaxbpvZumnRsM0c3i8 + 3frtFHPjJIsQ5dic6Ocmh3nrq6tZXXs5+DnU7jOGvmnKaX676CpsZupy+owVl8Ztxw8JWRD2W5y5 + fb1wq2svBz+8lziPMhFK7MtH1ORMk0ggkZOgnbtx27GJydZAy2IkVBNY+cn2QnXt5eAf67mFhQz1 + NZom9+pw8Ndfy/m91/nHUYi/eWRN+CJh7yHJjyVKX/386trLwafXGd8TeyHl6KtJA9HWudNGLHRs + fBnb2gs+ABNcjTWLX3+6q6tmde3l4L+vbvLEZVzMi5s+6ThV9C67bQnz2B/HIP125UdyBdpCOGHW + +MB/dNuprb0cfFuO+whOONXa1/xK552BmZAK2844t658kSF0EV9WTEdFTNuHwa+rvRz8W2KOETcj + Nw57SK3TKc8+H1K8J2ak+afGZN9WfkSF0keYIcqb6mvbWSprLwc/NplXXP10xfh2igcuKSJG9yOl + ZbgbezuT8DgEcT7uoU/pDfzK2svBn3WQF4+sW+AdJzgKREQSDTbu98vNKbzxwHXTMSJjFgX+ua8B + +lRbezn4d5pCbx4JXhtrMaFdyLJPuEW/27MdjRTxScI6bviqpSlAr7NHVm3tPwB/HVMwk9m9CCrA + aZUNJaaLowd1hIdp68rnTcRlz/h8Oc++ejsWTl3t5eC/KCIG6MkE3S2TLOvgpIbMs457pJs1EmXx + H+QRBAlUY3noa4xYXXs5+MucNI/jhnMjCk7DlnTAloe1N3coLtKNK19+uRssXa93Q0VOx0evmrW1 + l4N/JEZcObnYnPxGTgQqhp/IU4I074WdcDP45sGj0xt7pDP2Ai+uutrLwZ/kwuswhCtatusntNnt + 0VtDs5GB1rzni6GKvLg4cGnQ+7rn19ZeDn5s9+csMoBCIhOkjJK07kvwtW/hlCLbuvJ3tjWy/2LT + 7+3Apf2vqr0c/Ojs3JO/sIjyploTjvLAzyzMxxDHW/f8EPnS+HIz41Lf2SOrtvZy8CXAzkPSzlyu + MbIzDBUZxuroT64+d3NvZ1s4mrjr8+zCEe9r26mtvRz8V1i1vhfa1IohvurhX6y1HxbnYuKOHwf8 + /La3I4WOFjEM50wTO9vza2svB19P4ebzH/84j2fZeEbTrs3xvqCKWRqpIzRe5mG2Hv9w3e9s5VfW + Xg7+wThY1oJmgh0+cwF4Br6w6Uzi2o7cylJGwB1d82W6ipjrjDRVXXs5+DfuwoswOK8Pto5/TrwF + t0dE4sZBdW+UBfkAK0Ecorh4yqG3Pb+y9nLwHzm5zkT3/JBkae2nmR5L3IgQ13S+rS3ljd/IvMga + kSmzdNZYq629HHxdr6Trm56EsMZZ816O6SXN5eroZfTjjMNvB+4RvZ15pchCVe5sz6+svRx82S1D + xkwmhMj4HcKEn1tG5ymcdQyXx7ZHlumVrgJ59ThiwvV126muvRz8VzszM3m5xRDrIfsNePKG35Gd + en9bqSPsLrTxkQVjkjj2xdWsrr0c/HWRIfUex+qSnw79ZVRNLX3fRsJhSI2uI/EBkNApgjjLVqRG + frKrWV17OfhTcsMZc9ZM4OeIQ789DEifSYuT392PU4t+s+f7AHwFd384rVdnHmvVtZeDHwmDGy9T + YA9s+T1HzdMJ0k9JnQQ9jXu+4B+eneKiRIT15jpSXXs5+No4XrQjCShm5nLqZ74r25GHPAXJdWvs + atLgYi8gweGDGmV1dduprr0c/KSvQGsu6eR1zKb0YleKW95c+k+OU60rfxz0duz7LlAI+n2BX1v7 + D8CfEaQOvhfbdrzy5NFrdgFYI77as06p8YUrxBZ5QUazw8T1tTPwK2svB/+59S81M9nou4gzYNTS + zF/2ggwG56N12xm5KONLGQqhTvXVXkCWrKu9HHzv2ZsOepk2g/RjzLz0L739l5ksj9/UDP7munMg + KTt4K+KwP3rVRO+tqr0cfPEQBIm8vST7IpHQnaMvXJxmIq4kL41G1ggo7vk8ErFQhs4M7qprLwff + 1FaeycBqge+I4fm2eebSSrxiD9PYmpniA2yMTNAAcHCX3vb8ytrLwSfzfU+bDZby44Jz3tpgGXkz + 5isuO80Hrm92YqkpGFaCRGcHbmXt5eCj7Rgc3nb6BVnT6ApngRfdw8+d5dHe2l4YuaYwNtFbwEXp + a5Ll5ldXezn4kZkyPKLnJKVk7TSy3GjHPI7dl0au0ebLB1hGcjj0BfyR3radytrLwd9fbYSXt+PK + gkEK3omaHKNc7D7fwttIHQnwQ/kijoKouruVX1d7OfhmJsdolGiCe/KSZbEz78M4mmLFDWhoB5/v + yOnKzIOhM7qghVNXezn426qLw94lY2lycU9mKUaumgGDI1gEZtswhc/LJow92tZCazpb+bW1l4Nv + yW9p8rTCz9RUeB9UDycwadzEZrn5nu8DsKtafAGIKJ31dmprLwf/5iNIdbu5VrE2TYek3FksnyBq + vTbu7s0rn7zo4Cyw8fTpbuXX1f4D8Hczk4tMnBqIFPRm7EhBNeDM6mfrBTeCr6Mcz1uGL/DvbIxY + W3s5+GnPzEVocK8zX9Nyo7RyVr6Yfj+mLE/rI4t70k5soV1qjNuXIE4zsK72cvB5Vh8Xn5HtRWua + 9+skSMHd3K7rJdr/cbj7b8eIsdnwJZeVJcSgs65mbe3l4KdHD/MmfjvyQq7pV3Cx3g3SON7gvTWq + EVe2qIxjRBUTR/T2yKqtvRx8qFwph/B/W25XcmJQllOL3CBWMIx9G/f85cDExUGkC/Iz6qu3U1t7 + OfjcLtgQiTTRzN+uR3xHvmbjFAyqcNxplP8zVtNQNhxj1Ym80xv4dbX/APzHhI9K+UXTv/hoonkI + cxsvjPETfbP1qomsw48fP18LqTML9/BAqKq9HPzrFgFqcLWNXL1eMn2dHqTZxeN2ckVptPNln4Gj + eTB8cW3tLKqpuvZy8CkXNk1Ngz5fQ8bz2BFHBtFwA1kchWLjnh/0t4hGZGdtQNDXtlNbezn4pFJX + Qu5gdRzXeq+hIWdGgHRs7pu50V1Qr0IIdyhTwtO0L6Jsde3l4OvjYMfe7vi8k0f95dOw+z78Z6kG + GA2tK3/hYBADXCa1vfV28Nqrai8Hf+OTvwcfXHTBPmQnrnmuuLh04/MgEDaCz8CBjUPwF/QvOrtq + 1tZeDv4tsCZMq9+LfORMBCm3hJPnWZhz5W1uZazpGekmc7ESHbF3dtWsrb0cfJE4IY2gxb3v2eZ/ + XDhUzCq0HWRijo1qxHXHxgr/NuaC3ll9Hbi1tZeDz3PhjRlrXq+dNEhisDfuKjRle0Mn0crb2XmY + bO6ZK8OXpa/AGuZDdbWXg/8gijhYdmYL5EEUWniamjw76zVEy7t1z5dOxkuWJ/xy6Fr0tfJray8H + 3yvOsyqMRxj6jrwGDRWHcPhyyZeJ20oXDG87sxRWviHL6gz8ytrLwTdjHUV0LCdrhJlFgtYCnx0O + AAsl9NnM1RSxO9tz3PZN0Tt7ZNXWXg7+S2365vc1sZVnpZ9wvxO6jiki2y+/gMar5kH9zCU+7GRR + dzpb+ZW1l4M/L5FeMHvWRhzx4ZB142cAs+RnZ+ne2tsB/rlyynbTtAN1B35V7eXgayzEI8ix+IaF + 9cJFOT9HHvT2swFLM/hoggg74VCutdkZ+JW1l4Nv1xkXMlAvXdOUiyorIbEJbXr47VxP6ySLLoIO + 2mBm4jXV28qvrP0H4F/GJ8szT6sw0Iujpjn6QxpHE5qIN1sH6HKCyBFpvMwJps6umrW1/wB8tALQ + yN5mf8FKkofys82sHS+dHcF9jQfuifgmel6PIfx7+tp2amsvB39fdXZGPqbvjLDzuhQ+/B6YuD8c + 3HX5W8GXgBCv5uMIwn9n4FfWXg7+s19hcjQcj+jdSY+Nw5Ej0gkgmR5lvBX8SAI5g54fVl+dgV9Z + ezn4iZ2gCZ++L7uvm9ELZu69coGZs916ar3nswszTRHese0dgl9Xezn4L4+p/OoevxSJeCTcNCV/ + y2hd38cIvXWSBfw4t+aVDHrpTBZUW3s5+Ng0RuchwWUINa73icRg4qqlvyHvjI1XTWp2YaC8fGNI + 09m2U137T8C/XwTBhCAeRDUP3eBWIrDt48NXv3GA7gOQPw9Ij76Doa9JVnXt5eBjM7lf3i8hnERW + RHrC8+dAI4kgzzw03vPjA+hM846cGdz1deBW114O/r64A67ZKOsRjLgN3BzD25Gh+LNN99tIF/QB + Fo4XrKz1NPsDv672cvBRkd3mVynx5LLvgCluoPKGkM2Li9Ns21UT+CaJGxtl19f+wK+rvRz8xD0z + subjRTXM1z18KVWe5bVN7+QBzeDrkGoqH7O2cnfbTl3t5eAbFzplF6xMdFnml3MO2vKcbPwicltt + vqx8vAVOvnuQQPsSR1TXXg7+POGBL7r41/4yXOBvJ59seRArNTplVLau/DjPz9A2miX2dtuprL0c + /Czteb6x+aRjndcmItFsK5Lor5BqpeZ7PjUQu5Ew+9qmvlrKSBd1tZeDLyGI+pzfwo6Yn99rD1rl + tSRxrMYscyN7Iey9nCbRUR7IXrrq7VTXXg7+QRDB4et5Gao9t75+Wo1SnsXJq9fcampKgjuE5Yt9 + f7b7dwZ+Ze3l4C+J0QJbzQldUGD5iisYB6/vIubna2NjDfghRLTdix/pTP5fXXs5+MO4EpsnZnaZ + zct2aWWeFxUPEwyRWamxsfYlPuf5QmjX4SOrsvZy8M+vMMr5Rt+xPSQGFYy+OI9sF8MdTrOttx1v + N/AjZXXYWKusvRz8/FDgDvNNArcmHu477C1/XeaL3eDcfNUUs8aVn7tmh4+sytrLwV+Fc2DpcM/f + E9KsCaKgpld+kNvOcq3NL1w3nS8XcTbBa1/9fGdUXe3l4DPZWR/M8C0/LDbRRyjPjzuyid/zeVJz + Y43siOsCbTvbvN7u+ZW1l4Ovk796Wmkky+NeDibWXNcoVQazP/bKjfx8q8crfUL0n9A1u1v5dbWX + gz8gqPGzS6RBbwxS8rlFXNZ7SzUw2WredtQffdLo78y9rfzK2svBZyS7pIQkYZ8nQbxNsM5QZF1o + ytOQmg9cFPR1t+m75nfX1aysvRz8WQYuP0cM5bjrs1DmHf7q40vzWLIZSOtVk56P74XV7/DqbeVX + 1l4OvligO/rHGUEwSajJmwbzbtPhPXK+T2MMtx61G75hmZ/RvHXW26mtvRx8KdAccWaugqwuBjQp + 7YBrxRm/1yPa+o0rn/YCRxkTUdeiM/m/VMe62svBZ2Z6C6xxs9zvJWc2X7RBE6ZmElfT7LfjfunC + g6BMWt3dnl9bezn4d+Lju2piDq72jxDDM2c9/YTZGglCjZos4J/8Bf20iOJ6O3Bray8HH0VB41Hq + uRmKoCy2F0dOwLqocyUaNoojvN7Q//FkURj6A7+y9nLw5xFVNhECXdvzCK7JbEeS/CD6cz6DU2tj + zWnCUkMM13Ry9emrn19bezn4NoSX+5lX7aq1gx+upyNQz3zr3vd7bN12Zqgz1uBk5arZl/xfomNd + 7eXg6+uI6SAFkpuCK0gGN0tRIUcPhzXxQY23nTVc28LlS+Out3t+be3l4AczKs/jOw7LSQ4huUNw + 8Hqy3Fne5xoblSl2HNwFYyxR6PvZlwi6uvZy8Lkb3cttbMD30sRJX2G4D2ewDrwdqDW8IIwd/bFG + Qr7jzlJBq2svBz98fAXnMWQJVaKEIFrQNa9Ca87pog9t3Hak1eAdHnqkIln70mRttbWXgz/fD3pH + WknW5lkwmagajYWced0lhI/WPT8+gONDoLX2TmfbTm3t5eAboI+WvGiNhwhoIX6eiVXeRSr9OeTm + lvJGDScniFPfwLmwr6tmbe3l4OeXBPo+WQzS76CpGWwN17w/C5L+fedWlnK4NW0c+Q1UeksL2mpr + Lwf/mtbLHMVlJHHeebb1lJ0ioFWYxEk53tpYC7cmN1fh8247nXU1a2svB593+6ifuYN7w9cZkHX2 + wW/hdeUUK9Z61ZSCq0thQCmCa+7skVVbezn4e9qijYAoy01wWvJ4P9zorpxXLhhPM2PN++oIi05G + zfzz+trza2svB/9aH101fmKDGeJ4Uwel8xKQu+v15D23znApqwmyNBhkcQ+9bTuVtZeDj5zDXOPa + BiN0UZQEcfIRE58ZnoCCWpv3fJ1pTp0Ep/11NffK2n8APgWQnf5+cZVXyeeOXcqUg487P0ybUeMj + SzOZzxE3ZbKv3vr5tbWXg+/hmUxT8Hboshjmf8V18B5hBLNRqjQfuGF6IRh0Cew702TtlbWXg++B + +3J2iVzK4I3k+9YIs/+IK9u1ZVp1uLpp/jTJERoWvcmCamsvB/8xMkcWHBm/RJjSSfLP5e4Zd65f + LHhaeTtsdpi380jUGu8sgH6rrb0cfCnEOjrkn7wvhh1RjUBF3sDIQf/e09oqC/LCokMkhxNH3Ftj + rbb2cvCn+b4XzMz7mNlceA8xr95472zvzcf6bCVN8dMkyQoOEGuNzh5ZtbWXg6/pyNjIyJAAidG9 + YaJY0LQecsqO+U6tpCkTXLEdTlycwbE38CtrLwef8HYRkYKilhJevtwaTTWcQWnEoq32ZvChjjYS + pOfefDW9eepqLwffBv/sIYUQ4oY9suklk8ed/GUZDdqEGu/5IQkK/osOzzr0lY0I/Lray8FP0+uO + r6EsqHwggnMvWU22vpI2dPYb2wv7QPSPHY4J0B1Lubr2cvBDCkQmS5gyJTaMEUy2vDwAQqqiM9Do + LugDnF8BBu75vT2yqmsvBz/MfMc9HWmNrOa4kzxayvuRPLzy2koX3JfVncFRjrDZW0u5uvZy8Fmq + 6aph78y7xW+yMvnrFIa+84BC27rtMHZwdY2/xCu3q5byXlt7Ofgvy4WB6PZNzBHI//Pgmo+txtf6 + QbRs5Gp6X2nkb4NZ9M7FvS/wa2v/AfgvlXiyw6Q8snVczQ+ZfHF35AQ7Ewi13XaAb3yutbYLP+uM + Llhdezn4y034TPi/EqHj9J3v11OLKtQckU9RI3XEB9AiOQVNs77oTIdbXXs5+Ed2GPLhWod3l0+T + kFrZMPDYfMeDjXhjY43JDpNgPM2TKKszp6nq2svBxwu/DM85LawR/ozXl45bdFC+hwdzsPXAdcPH + GtHQ3Iwm+mKsyWmqq70c/Itv+DRQnofTtNvlqaOWttOZuw5pOBuHKUSg4Q8/s3SIUNa+Dtza2svB + F4yl34svNS4ymm4iaMkpF/7OrBG2pFbw0dRmJjIyQckbO1v5tbWXgx8RxAzoDJy2dx4v69+LFFd2 + fdPOFaD1qikCHT+c2FFA3NYd+HW1l4MvnAOHfja4ukcBZZYodiUrBtbi28XVuvGqeZrLxAR3WxBl + +2opuyTU1V4OPl87Dt+sfOeLazum/nCTqDzcF4hC97uxpRy3HBcpQmin7tTZnl9bezn4HrR5Suym + Jof7eDG1Q1vQ17/26QV+Y0s5wF91SDkddUcdqa69HPyZ8D9icMWkTAhqQQ2nTXxwF/KT0tXoseYD + cJKNcYoTtzPqSHXt5eAn9kaRLrB61A5DCktld357BKfBnFrtfA0FvqQpi/O2t3i+6trLwd9Poc82 + Zs9cTwr3+5SpbwV2SITmv94oC3Jb068zLzg26R19EWWray8Hn8nF6HGr7ftg4+MUzxEexM36oIdz + +LbddnyAkTkqpy9E3M5cR6prLwc/JO7SyLTvk/W5JCo4knEjdbN0qayNkyzM/GOIdhE/n972/Ora + y8GfNPQfhqbmt+kZ05CvUXfhTPjhw9PssYbtOArJGij7DuPcrtoL1bWXg/+4Xe5Tmi/BBTE4D0eo + 7XLUfhm1nK3bDvfwNWRB8R10JoizJ9TVXg5+pKDprBlzb9TP/s1FtWmAOyZKUPFijXs+lr8AELEd + RHFnX9QRddfVXg6+2064fLG/2N8ZTNx2nI0LgSLDL1t1K/gzZ1AJ02JThs58NY+lsvZy8CXnze+Q + wX4yvfA743CEvRn0tXQbcDWD77q8aOej7Yx96XCBX1d7OfiPxGcC6OFi6XVP3L2+ZJsX2qzxK61W + K/i8+Yc9GptuO30N0E0i6movB3/GkLLFk6EEa/O8xM4PWgyu+TcxaKudrw/g3PZzwg/HHenrtlNb + ezn4Ym9pzuf1wBNMr4is584XVvE+Eka7graufEm4o5fDycv66G3lV9ZeDr6gIJZqjB1pRwbkpgCd + r/KdvpIpW70XDqwF3BHiOuLezux8q2svBz9FUkpm9pVvfponXcR9xRfxctHHImnUZEVj4QA7g8rD + kKyvbae29nLw8/BO+EybXMqJCh3JgOkItx1TdKL9tfWRJfqP5wJHgdj1+xojHrW1l4M/uM8DPuQj + Y7R4tvG28490iOOQ8t7a1URawPbl7sjxpbN+vmDUutrLwb9YjVDBDV7/Rlr8dszS7zezURbGrbHZ + dODudq7V25asESNo74ooW197Ofgswz2uxochURquM0x29CG50pGSSCtr62rGB8CTZfEVBIm+WMr1 + tZeDj6pJAc3jjqlgHLKD9v4iKNFsa9mX1NZeQP2h5nWMW/2R2tTTgVtfezn4O1s1OhSbjYTKhcFa + xDxwsU78GNAX2l64O7ra4Pg4cUGpHLs6cOtrLwcfn2kZt+d1IZFShqrjG/fESrJxT4+MxpUPfIqX + JVrjy9rXPT/Ar6u9HPzXNWQdpdRMfNwJn8X1YRvgLxx+BAavbQfuHnH23O2o4iYekl1tO9W1l4N/ + cbtAz9S5f88xRXpEXtHXNCLFDaCstYIf5PyVrlpW2dAVaYpou7L2H4D/zM8gkZWlb8R0iJH4Sq/d + NPZPc8RW8HXTTLD2YyR1WbtqKSPZVdZeDn5+XW+O+yVNiW2HMovN14UhvrH5wqhqXPmGWMaUeLey + O5auGmvAr6y9HPz7uS5PIGqImaOsQbq1P+Tp2XZKify2tRfiAxB4yAmK1Jqjs22ntvZy8OngrHdt + nctthM8Ob0EOg0JaR1lx69O+8pGTraDNaLIvWVAsnLray8GfBUbsUtDvZbfLW/2iDB5hQZjj+dga + 04J8gJUU1ME16hf1duDW1l4OPhEEOhm+pmEiito7jfeKunY9C2/NoTEnC/igD3M7M3S3/b6umrW1 + l4PPbYH2fLooI5asz3A/5ygnbnhm8SlUcq0HLh23jjVzzbUzTVY4gtbVXg4+j7X3K3Rb+rMh9yK5 + YNveSyY6BZuwvmbw2cTpEvkfTNnOVj5jzZray8Hf2ApyMeW0lmVIGGlNdIhI3eOZtTtT6wuX2OL0 + yA37Ks+szsCvrL0cfPcp1nZMvoxRkOkRGCx6VA/K//miBW1d+UdcM+V8L6Oucm/g19VeDj7Ozq21 + Ru/M7xsln7dyEgDtdOR5tIyN/fyvqbyAPtoU04GuqCP2/Mray8F/bOyM8ymVuU1RY02rHo/d/hwS + lnhufWSRpMwuO+HngAfa18qvrb0c/IivomCQx/fc17nISrmW2whRCjqJ1tRmfGG3t5ex6eS34y99 + DVOqay8HP/pdqNDnSXt4RGfTWIUC1+j4OK59aX3hGtzOfr+Mjo5t7Yo6gmlUWXs5+PnW83rNzBE8 + FvrbU9IAnzubDuTdzRsP3GM3EWayhjk19uWrGeDX1V4OvouNXJq85Bg2LWizEppCEYfTjWmzNI4R + 8dSIEN1zZl9Bbwdube3l4Gsiu1Cy1OR84V8t/RNtNp1OYJ6DS+NVk6hssuuT1y3yb/q6albXXg4+ + 2ghzCgZfFOebvj5u35gurTZsntRq7eh3Gxk4R2z47jt93Xaqay8H35I/3pdSmc3Lw1QQqXvlxJCl + cj86no1XzeDmo99a+A7wvavMlFg4dbWXg38d+MiiIybUNWmID0tx+kTnLqMvFmBtdEEf4DTA0i/V + onKN6uqeX117Ofg7TvJgiPLKx9JTPgX1MVIOZ1l66KX1qukDxOsWb8r50Rl7obr2cvBfgOcl4hCP + 5brC1ZEKFF/WMOtNHMXbrppGkoxS3XUwfl2i+lr5tbWXgz+Q/ksjkxxxz9k9Z9oNFW13lr5+W6PZ + kShWBlNMgwbcCITBnsCvr70cfGSy+R65VtNEuGZeKRTQzlwj3biBtr1ww76AmpevhiEulVFf4NfW + Xg6+6yR3Oz0dAqyLn6ZlmnnkkEXIsE2NpCngh+UF6wVNUjqXzsCvrP0H4NMfcrhDUjspbx/Pq4ky + cR5yokG/p7ZhCtq/a6aMaV8AYVBXk6z62svBfxb2yUaJ2cZ/3rpr4/WsQhGP592P62x74cYHIMUa + 3aeWcBPva+XX1l4OvoEtbtRhVr6LB31s/Q91FgceWqHQojfddoC/jWe0SynjuDt0Bn5l7eXgI4JH + TM0YZr7sHWVxDyddognI/nCabXtkxQvdVh/pT3qnc1eyoPray8FnLkU1hRgr/PwwuopxPeYg1/U0 + hvlU48qnhtObDqcjXNDOrpq1tZeDP+f5vNN4iOgQKpPDAJZ+Z0q8ZanRGy1frB40UARl8xod5c5u + O7W1l4OPoRDARCsfs2l512d6+MvqaoqiR+FsXflkiCjim6s+P7G+9nzgV9VeDv4dhpdJMBlHx3AC + 3Ijj/Kvn1cH0az+awZ/mUWPti6bc27bD+b+m9nLwKVMunEy7/amdJhc6oyZPA+BXf39ucxGPbYfh + xXmQd0XgWm8rv672cvBHZs377pKf2b0g5nsQYeg7B7Ar2Xy1g2+A7g09yUDva4AeC6eu9nLw9TAv + Q3SnK4n4nt6bnzUaj69gRlZuVKD7APrJG0dfM8TOUkHray8Hf9PLfMzQ6VOecbHhr5wG1z2xe6FQ + nNsG6PEBNPTFNRkkMhjobNuprL0c/PsyalqO11FLiDJmznb2afG4G+MdB2/rgcu8bcENivU/djVA + t3Aqay8Hf8w67RjcC3/NzQIdqbHC9ms6dz5fjTZf8QFQUDRJ0DUF43a28itrLwc/CBKP8NWVoTXT + cLwFCgmKRJcgnNm1jS4IfLsOczsy66AqdwZ+Ze3l4JuZOGDRmi6+Xsj52LEig7ILCoeu62697QTn + a+GcZ4ArXLcv8GtrLwefIojD18ZmJ4nGfd12tuF8wvaFt6aWQ+Oe79a6MhHY99k8pbNtp7b2cvAT + syMcHVr/KWXiZz5rqH3cGImjLdk2TVZkBTloR6am4ejbFW+nvvZy8IEszsR10+D8OQa+I7zV8jJL + rclsBltbyofdht1LJBG78nS27VTWXg6+XBQsqWfDVrgW3jsaDITox4irb5Z+NK58Q3hHLZmLJwM3 + k67Ar669HHxmvsOLJ6u7cwqtfWlACRPXyKNfx3dsXPk+wMqgmagvpip97fnVtZeDr4PPV2ckPNRW + Foo73yK5ZyYhnMRt/o0HrgGuO/7ylc2HL9jXyq+tvRx8xIW0eMxeAy/xdKETz3nKrMRdPK9naHzh + Ah93gfZfZxMfpTfw62ovB18U1kMM8Qin0dPMkV5j0fPAsOsf+Wrs7QDfGAVfdtNUHvpqKVfXXg5+ + ZlotEveVTaa96WtIjI+ugZwko+00+u04aO06DlxdNSqvvhhr1bWXgy96mOWlzAh5HSIMxo2t4yC5 + 6c7bY6rVSJqSvR3M/BChk6f0ddWsrr0c/PNh6vhlYorTJ8HgNbMXxuqnIEBLh6fthesDhI8ybReK + /trbtlNZezn4B4EsqM9nurdLXxmFBHczI4j7u9fSuvLDKiu07ARenYURswGprL0cfKwO7me7LNab + ly9WH9MXvMH5XU+ttkYR9J8/gMNbqwhjra9hyhf4NbWXg4+U7ERkAWBgGbbHjx8Au8EhzKFugp7G + bSc+gOCtjbWGo6Wvq2Zt7eXgD5uQmhdHRFrw6RX6nkQpK10oGW78Szv4TEeENSG7T53t+cCvqr0c + /HEKu5G83Om6kCqDVmyUMkSghIby1Xrg+gCuOn92s+7LyPpry6yqvRz82bN/GeSAbhpq+4vgcV0M + rGmo9Hj2xvCC+AD+INEdq0FuX17K9bWXg39JiniMbIM3Yrkb3LIUpFpeo59PLde47cwR23Gca3AX + egO/tvZy8FmryWjSU9suyazRxE/Y4izXQ8NJF9MIPpMvp61hSpy7nR24tbWXg+85S++fBKdcusiY + DEk7/zpeBiQIbGfrbYdX2zkLBUX/6szmi4qssvZy8Cfan0tMWTDF35vx8StEgvg5iaP3L83gi5je + aM83vKm+FOjAr6y9HHwWXDiyMwuG7T1ld2C0zhr6UsqOOzE/atx2jpP2QutC/iXLnb7u+bW1l4N/ + 3af/wcYPumY86QL8fR3zu2Eu59aWsuGtW44LlYjEvbPGWm3t5eAP47u964AXTngbMdDHxF3zffM1 + ayk3H7jyt0iM3DW5+XYmgjb2rKu9HPz9YvTi5X9pwKR0+Ady/rM8Q76fTdOtcdthOqK7oLEpCGDr + bIZbW3s5+LI61te5joo/CEakwprdfDy7BGZd49r6wtXMdOLqKQuh7ys5QoBSZe3l4HtHWfP2Gjvc + k47XUzTs7sShJ3fExmBKH4BrpD0HA8blobN7fm3t5eDfE73nlWiyhKA7b6f7GjgxgH041mFoFEHH + H7LgyLpE8Wfu68Ctrr0cfKERL/Uh9dWyP7RBEUssi3SYH00lGrm2PT8+ANM2Bv1hod/Xyq+uvRx8 + Vl5hGB68kSct2gsHYjiyTTJWmTUcmsGXKc3myInbm/EF8OtqLwefWpB36bXd0yINl/IWYfDJvFTt + /1wZGseIxmR8FwwRv0TofU2yqmsvB3/nmSwbkQJINFxEDOwTktPXA9fW3+q3gzA1WvgrI3EHSG/g + V9ZeDj6SAoMRIYAjxo6tR1yQwSXBfvY2XY7Gq+bikuMPOcPgzpfQVXuhuvZy8F3rp7SLKmCtad8Z + +b8aoM+M19zO09uoTPEBjpX9glGK5l1fj6zq2svBH4SU7TOm1DUmAEkyGCKrbJkOju6MYBoPXOAT + /0usWSiDett2KmsvB5/PAvUV8YjMbVzxK4+7NR8pZQeuR2MYseiJsOYPbgoXg87aC9W1l4OfnLiz + HqYWDymQgeu0G6SbK97HM+1H61XThDgUcRHVNHY2RgR+Xe3l4B/zNewPnqzthxGgFsz2jijicpvP + xMq6ddux3fA1Wbl3SDbu7MCtrL0cfP1L8VgSarZMdW6Wi732oFjOOINiytqSoGPbiTBcNu4RHNGX + LKi69nLwya40j3HEl/OWUUmN4r314I7ca1pRGJpXPuk5N6UIp+zLzvdr4VTVXg7+MiR27Tbk1ZVT + esS6ZGaPs33nduFvdZryATbvBQHHJ0fl3q6albWXg29kEot+HNBEOL8GRZYqiOGL+OY9b40zXODr + aGqqySrzduttz6+rvRz8tD3HZTsGOqEym6kl347de7gNWTx6m7cdN1ictRlprb+VX1d7OfiTtR48 + hcccJeuvpaCOPCvr6ZT5azaD712LhX5Sp5gLdbbyK2svB/+42alNmcGUaD5LP8bm+/wi7G+0m1sj + bwfsCwNxqr7FuKyzYUpt7eXg67IfGi93JsHlynJJbuK6M9/Ry6ZPaaSOuLda9AMzeOmmvV01a2sv + B/82TEEKd8NkoexQRM8X15SRbJLtp/m2s6MKQp0KF9+/s5VfW3s5+NeQREUEUTY9VJvh9oUgK5Nx + fw4a0UbqiLsOmixdqUm6eMS+9vza2svBXxP953kh1yR+U5wYEqLyba+WIIxW0rzt8Hzh9bJH5uLS + G/iVtZeDjyHOYx0Z3wD34LaTwsyd3904P0ZcYyt7wR/O2sHy/0qh72zlV9ZeDj6SyPFk/XyvoHee + ZTZdoc2a0rgTCS2tXU18HboIg0Sdza0vBfpSW3s5+GwXkBJDdY6p41IiOcVrSFziNGArb62TrAic + Q0Wx9yzM3Ppa+bW1l4P/7ixlJ56a8lKulc8O9wUxiUhmnJSF/DS+cBGvEPMjsabDlV9Zezn4+ggC + yub5omDIKdmgpz/f9WPfIVBpBR/Rl8Xa8KWL623lV9ZeDv55HO+pq4ykpqfA5YUXY2QlXi+pkPXf + DP7KJZJnHqpyZ4E1WBd1tZeDz9Rx3fXSXAcfOQZ6O/RBumykieaIQ+tV0wDRYHJAiljlbvW159fW + Xg6+7vEpriC8fO/ljizD/TW6nMKDN7tvNq78c+EqO0RKFg1AZ1fN2trLwZe4jVnjYcVP7WtvuLiZ + cj1C78PmuVqJsl63ejunVG8qu740WWGEW1V7OfjkQKfzdqWEfu06q11Hv8Hr6iHVelrl//EBNoY+ + hL32/d5WfmXt5eCfB3XETRk0jsv90A2m69GLpBPiZz0ezXu+F5tJKPg9pTtrrJl9VtVeDv72LMtx + PmQRAlmFw7Fau0ddfcej+Pm5cZgS7FhMiJXFFzVoXzPc6trLwZ+Ge74uRBFNtAFFOaYqbzLGEpul + udPoq+kLFEZM146Jy1O5q9tOde3l4Ie3IPvMtIXfhYHK9GZDJ+4j6d3PtDU+sjZ6FKZtugs44p2B + X137D8B/JvRMMWJXJOCiiL+UKm/cTwbDxFYdrlEwfr60s+V05vZFmqquvRz8VfDq+p64ImnPpx0/ + zznLENbTH/wUGhtrTtp1/ZJ4MPAZ++LnV9deDj4+/jzK6cDRT4LJCOFuuz07OhabLJYbB+g6OoQR + CPqcLyh8u9rzq2svB39P28w6/DUzcxOnfx7TAX3UdOZEz9bIz6dtwQHQVo68rM5kQdW1l4MvlQZB + 5JQfYdQ0EeOKeriXiyg0Gf0djS1lqGvXeTifZpJzZ3t+be3l4FN9oqoxd2HndruTn+9kkr5ijdzn + yACjrbcDfKwdTmvzPvcmiKuuvRx8omfN4zePq5HWc4fp8c515HDp19H/H7DyDz47nnHOkd4OXHeR + qtrLwb/T/S457KsTE4BISol/b/rhuXvywmxc+S7LuCMRcGya1VdLeaut/Qfgv2ljcJcju4OlIFb+ + pL1GAL3kUUO/ddvRzeSzYzgfwtK+WMphiFhVezn4TJRXok8yiOGlxhId8TqBiddMFzcGYK0rP6ia + yL6cR6R39HXV1Bqpqv0H4PM4uqkwDrGs1BFvukKbqLtvtkun0qjJsuegTC2DYBZucb2BX1l7OfjZ + o4qj7+LAna9o5zwaMZGPiFh8UqO3rnweXxprYZJNYtPZyq+svRz8dZFKJpzpipuOAHrgL7agXYf/ + Gp/cuufT3w4WfkQj6ir3BX5t7T8AP7+TpKyXN4WsLJ47SRcmBOjrOyGKN9IF8R/CnhJfTeeiM/l/ + de3l4DtWxyPrKD/25ZTQWTkAxBxhe1w238ZJlg9woLnjBCELdjZGrK69HPzZ+0c+UyRFYPRJj7i2 + dA2sfWm1ePv+DwA/chHEBtnT+hqgB/hVtZeDr6NJgzIk/Hkr/ohIvidhC8YEV5+n9aqJnKytGcEs + UnE662rW1l4OPq6CVERk2cuwacpDim8j3zf7C9yFp5E6EszwMFgj/dfi6ezAra29HHyyz+y6iTCY + RdF733rx0kMjKLtz5r11jHh4p8yTl7oG6dlZV7O29nLwqXAN+pLsvP293fndcxi9rO8ulX73nxvv + +T6A1I4d7ZHDXW97fmXt5eCj4gsGvRnaud7Pp8mhfkPIQmei3PNo3nY4TbHON6ihR+xt26msvRx8 + zQTJq/hS9nyzlO2LxmDu+lxJtNjdyFLeoqW2LCdrgdNbq69HVm3t5eDLxXoct4nPRT59B3x8Hzv0 + zGaQH0mrFBT488TUV2NnGKfetp3K2svBP5j4bldEO5wRFaEF45pPHsRelj50b2UvHO6YvkfS0kis + 6WzlV9ZeDn72ABUALbhTEqj7/rVjtQ4vv5f7ZfzS2Fg7kSMw1sLuxUC+r9tOde3l4PMUXPYXNNJB + YW2/923oMPObyqgkjQP0eDV/cf/DmH/pa+VX114O/r0gJb+c7ZhoTskbS0DRMjHa0QcbxtbYDh/g + FLAomMXp1dkYsbr2cvC5Gr1GHq+k7O2aTPueuPmEEtF+MZyNXU3OnCPCKXe7WT+/LwV6de3l4L8u + 9mkUmrIu904LyvQirK3ve0JSzrkZfCp2f+asYW081tWBS5hfV3s5+PLhvLJ0FZCkyP5fxr6SJK7p + jRCPpTW84MSOJenzwGVyt/alTKmuvRz86eG1cB/v+D6YsTqaevnSz7118Yspl9vaC/r44nyljxjg + usX2tfJray8HP+ztDLIER0R+JOIUs6/rij3a309LYz9f4M7pwcZTE1O5s0jW6trLwRfV4X6ZIwj0 + EpWyjw+Kfnb1uR9utk/rVVMcJc8qRj6+haGvF67fal3tPwCfkXK4oO3D+1CpLEcaLgGq/KfCK+Rq + 1OGGjpjCgrkg6khn7oLVtZeDb1MYjcp10eK5NT0pcd6ZuBNNspreq7Gl7AOwlFmklHES78xRtrr2 + H4D/Xokcgu3L87xGfQLRj40c12jx8jJq3XZwk+03EUs5UVv0deDW1l4Ovnfnc98330s3Hnd8Ngm6 + YMiCElrZrbXe8w3P98N1k2NWb0bWQvPqai8H/3j5t2/SmR4eg8945yOu/Dc7fd7fjPQbr5rzrD29 + CUikxB360mS5ptXVXg5+XANFpFj9xz0wnHoxp1zxJWcZ7HrzNoMfyggaI5fNsy+6IPDrav8B+Dlk + QElwCqKaDMkxX5FIHxweWujmriZXB9xwAfRkcVtve35l7eXgS8dKCB73RQLqcHzx14I/mE5+UyaK + jVxNq8eFFSkOeUT/qLMDt7L2H4DvqnnkQ3DBqJO8m2Yd483oVM9BoEdrNqK5uUSc0xzLC7czBXp1 + 7eXgu9lc1M43OcqcNl7Kl7sJ2x2RfeEv3rznkwMFZw1PWVu/s5VfWXs5+LRYx+OO+YiMyI8JCgH6 + vRy2C7dPOQatBy66TtiZjoJZOiPK0vDV1V4O/mvWNF352TeMfMG46cyRY5h23XwGVI3BlD6AWYo3 + Fvm/v3S28itrLwefAN3+Epk+82T3GQYOX5Q8l5YDgWgrYw3oQjvcZTd2Mp0pU6prLwc/+gl3pOAS + BXG7CJ7aZXTrSTQbK57tK19zB2dqNE3culv5dbWXg7+90zWJnY8N30srzVLn8QQ5YaSMM7i27vmu + sZaQcYoJZWfDFDfNqtrLwT/tN/Lm11FCyjvu8z1irh3UoGx3LveURvDJ6iKUUkuT90VnM9za2svB + n6bz5Sb47K+0jvzkVQpxcEgu0mh3oNbbTnwAhy1PDZH2nfV2amsvB5/pxYHELSgoh903pPwA8vxM + CybVuzXqcIN+xQn+4BEcVPG+bju1tZeDv578WP6chbjfHlbhO33dQwTQ6+ivrQducN/iwsPGwZ2z + O/Crai8H/9l3M1xizTCw1kvGLFslF2wpL+wGn9aVL+CSwZ0IdP2Fznw1DT/rai8H3xSLvyA+vgvJ + /GosU2JFhoGdSGPgbl35q+aOqbyOZlgF97Xya2svBx+L9TY6YWItgFU61iuFG2tNcNlxz3lv7WqG + 7j+8O2Lf2TtrKdfWXg5+1nkxsOE2EuIRYdyoU5oLmsnH0h7DzUrZjo+xNtn6l84aa7W1l4PPdxE7 + B3XhWl5s5VUbn4BHSNYr01CLv/Gez0sABYU0yIVn7A38ytp/AP71eMdpqBmnsL3Y3nvY9ZiJ2PQ7 + 2RQ1g89H2XnLGdtVv7M9n1lKTe3l4D95PvPLZ0cO/buirplnnS4mfGDu1TSlFXyBW3HRx5Zl7tgZ + +JW1l4MPlnTf48jG9yZAvFEEE0v3fNJEI5S0DlNwRrwkTMmEZa2dPbJqa/8B+PSxaCIyOqTUzJhr + W54u+pT9DZlWq4s4NweaF5xBXaLuNFm1tZeDLwJ0O+w64ZiPJXjn5/Tkeu9LdM2o3dm67fjT6YyY + uLvv9HbPr6y9HPxzu+JeLxILOf/Rc0/5Gsii0Za3neNgI/jhsiPee7Tho972tefX1l4OviXpTNfJ + JEXhyOXSQyKRor08Sght3nY4BXMPm+lLufl2psmqrb0cfNm34ij9NXYeX4T9eTNztfpDxbk2+u1A + nnde5BDNEzF0Xyu/tvZy8EdWgom7zrS+R/R33pEYV0zftUxosktrP3+P3YZlMMsdb7fOwK+svRx8 + 8beINae4pvC8iHAsp6xsSkwbfbWt9YUb3fzdkIDGRtRiX+DX1l4OPuGtPcatyvw2oTG47Kwvxuz5 + LuO9NDPWDj2KoCiLiEOb6g38utrLwUecly/gbBUBvRDE5XuTBh1u7lpiy9v6yKI/t+PHuhdF0dm2 + U1t7Ofj7mDADN67hWvrPfBHMau5MPDVn8WXNvB0bvYcip8iDw11nvZ3a2svBfx8e4ukdPEGFM61M + p/h+6cDrBxhlNR+4TAu9sXSoImutO/Drav8B+KgiLvaJdy1KPjn6sNqF8o48ha7cmgRNiCinidHR + uHBr7uyFW1t7OfhWd9guvAympNTsDttzolrWYeC55nHa+ML1Ac5z2AS9GuX2B35V7eXgZ73MoFJK + SGF1F5RBPANt/c1VP89384EbfhecjnSQ5qE38CtrLwefvdQt/Vxv5zJu2gas5Rdxh+fFfFAMNRrc + CZqb6Fm5vZweEZ1xNWtrLwc/TNsFd4xkKISgy/zs1y0Zes8vEqcc7tZthx6FwRfqlBtnbwduZe3l + 4K8rx4V8nXm9tpkRw5rW6zLyw55Fl20WR1jxwm+c4WYDvZGmamsvBz+dYX2h+3JMwuKWPA+cHRfj + rLw+29N42+EVFl2K0KA7uuau9vz62svBFwsUQs3xfbHVPEOXk5m428n6iJLAImnaduIDuN5b9ahT + cvp6ai/U114O/pRuLfd7kM43pWvkt+BaQpSOpsnrKLd5L7jgYx+SNhlFhxC9L/Bray8HPyKyuEvt + 3Ba0AbJV+t7kU9NIIXevQ1tLGfg7XZ33m+XfmbVjfe3l4I8M8y9++S7iX1/BQIcuNoLrwnk6AFq3 + nVFfjcRrp+6detvza2svB//VV4P4pqcziiw4Y8OZXDjJ5Bak2eZtRwQXRZY/X9xaVy1lK7+y9nLw + HYRGV0JBDzZrORmkyG06mQuS8TC9a/NYiw+ws1DyX0Ji11c/v772cvDnmxoubKBYTPGmYHp8o5LM + CbN4D9eXxtuOP9N5bp4if+7oqp8P/Mray8Hf7kccVn4JVDb2F5ExIMYgaCPU6G8jYy0+AKWRzuyM + dnt0pUasr70cfEo40GeCRAE1I/6ICMlk29FesPOcrXs+Yr7Z+TARZ7n59HXVrK29HPzw+J701Ej9 + 6VI0GI5tfVzPCeXeiy6ucdvhVhXac87km3/XGfiVtZeDb2d5BzfNbfEbuARIXJRBS/hMpSBrtvF2 + ghYOfgpHew+SeGfgV9ZeDj7b3vxey3rJgB7o0N/zmjKZkLayLs8yN658xrSiC7Tz2Tv2pcMNT8S6 + 2svBp8e6nIYPfmYOG9P8EiEuce2ch8RRtRV8eTjLihDnxtkXVzOagXW1l4O/TVRZMWe9B5v96J6z + oMkaYU3zkomyGsEXhstJYLKIDIc7e2TV1l4O/vMYoG/iyGifQwPn2olZmRHXWFofjeEFglit+WNm + rYmHcXSlRqyvvRx8bL6LnyA2K3UQRr6dnr3jZZpyueg3einHB9ClYw96rB5bXU2y6msvB5+tEf3t + gpU5MLmjP2Qq+16iy02yJglazduOKc0kMMhX0N+2U1d7OfisNLX0X+46Zq2nVoAUWBFZbpvam/vd + Jo6welhFfvWnuAyMnfXza2v/AfgMdiIp+N1mj1BOX9LiLg0HzEqWd0drS3n3i6JL0VXmK9sZ+LW1 + l4N/Rkdhfd02p7yOeGrPceoz6DFMXJDO1q5mmL0sBgQD8PsyuDu/jGpqai8HPwnnkNDHX3Ce9DMX + dH0h0KZbYZYwNmYj+gDCz50fjGQETnd24NbWXg7+JnweXYez2pFCOXWw1Xx1Y9g73vl82izcA3yN + tUnwDcVXbzPc2trLwZ+WbGaCnUn/xs7xlFsw4mxSqlwcqNfWAxdRTeiiIeJgqtKVCPocamsvB5+P + 4HFt6yQDdDc7vMw8srFi4qo8DndjTlZ8AONzoR1LBIN21lKurb0cfGOraOSf27s/jl0TRalBGV2K + bn9+htYD97RxhjjlxBk8O7vt1NZeDj6a4Fdoh0RQGiyxEeSI53wn/Mp4/LYOU069Bb18fR0N/c4O + 3Nray8F/b83e9V4oGMTNCw06xWRFNtqCIz4vbRRx2w4TJRxZ/OfJl9BXP7+29nLwN42cg7NjMHa4 + ty+8fb+Ss5yQswFsmzgC+JxeQmatr3nsnQ1TamsvB3+VP7mTYKVBN+3Vwt4Y8GDw6G3u9HGN7AVS + uAXuI6M1d6i+uprVtZeDzz8zTL7lsN5aX9MoG0tbLaauk25Mo8FdJFnrJUuH89SaOmusVddeDv6D + OmLPMb9F3Xmn5Ulys712TVzva30ahym6RUxSvW917XybXe351bWXg48TRwcts8aWvGjr5PGmzPXU + CmfTe2gzsrbyp/CZClfsAf+8N/Drai8Hf+d5ua0C4YTW6AZwPA4Vj5a+rUIia1swJfARw112JDU5 + fLvSZNXXXg6+zo7Ol794hoYZl7Ym42+RuM+5P0QrbcMU4B8hpySEjljWzlZ+Ze3l4DNWu8JAM5MN + zmzc3a9yYj6iFRM/iMZJFvskAmjMBT8qu35n4FfWXg5+ms6UrjsoTcwXHL2ig4XpbRdpxMGJp3Xl + u+u48YiNCFfy7sCvqr0cfOeszsv0DNgjuJr7IJlvFJZlKxrlVDZ2Na18ebjR2kG964y3U117Ofhs + RlxojbBIp9zzCeHCypf7jtWvod/YWPMB7GR2ME8JyvbeVn5d7eXgTzbkLULEHkbrfGQzS8d10l0z + +zNGbH7h2u39tvgujNvQ222nsvYfgJ8PHi+IcYxN5QUxlD00Gu4cW3SwPZr3fDlB/iBBZXLQe1v5 + dbWXg/+wer0W/i5u+KhNesoPQdC1HvLiaBIbWcooELMmBW/NuHD2tvIray8HXyKilX/D/MUOfJG6 + z8RX3Dfh3jk+zeDv2/ylhRNI3N3Kr6y9HHxJ80xZhh1tjXe+ONyLA0+Y3L0O36u9t7ObB1GZhuyr + L+8FL9zK2svBN0AhfiPd2c35THETb7U0cF4YF0qJ1KhMMRWQ+WpQtpz0EX2xlKtrLwefHwgHX0qg + fDHd5Sk7CgR9EUre4zb0bn5ksZINzxFS3GPt7apZWXs5+Dq+x7VPDxvTja3sxN9LXKKZ+r37y9S+ + 8mlSCE1xEvvravrB1tReDv65C2HNshENcfVz3HLcbhFnrySplc1v61XTZeqLfMtJrDPGmqt2Xe3l + 4I+ulOlcZJ5nr617Xh/epu8gPeLCm5obZ7jxVrDd8NAnDVp7u2pW1l4OPiry61B0sUef4g2ilfSQ + 4U5IB9dLl9u48l0ZJEaYp+AxHJ09smprLwef4G5ZBkt+wI1Npn1pel4DqImnJt5a6z3fB1jdZMfD + YdJbY6229nLwnzPv+SJ3lgIqWSMNJyt3jTbJlGYgrSLoWPG8mmNadh57Z1fN2trLwb+zMRN3dc3k + FGC/mPnLYi8SZvDaKlq3nQgKOn2rYQPQF1FWglJd7eXgb5f8Nl0dLIZpYPeS6ZWXNLN3pIrx0G0F + f9bS4TDA8gVps6/GGr5kVe3l4Pt6nYUDWco+YJFE7/1Kj4snOeKO1NMM/miKZcvH0u/Mb0faSF3t + 5eDzrqY/GV/X8DExeTxk1Zho8ZjisDbereyFcXbc6u3Q/vcmggZ+Xe3l4G8Unx5X9yMZ6HitUqkR + 0zLdJugcGHL7yhdsGh9D3vTQ2Qx3nOtqLwf/pgFFkpUURA1KpB+bs/67/9pZiyc1r/wYQR/DIWrU + TtbZnl9Z+w/Afzz/Ob6sWLI6O+YqkB/2r8wUMbmtjDXfJUbQKjJIZ7kvijjKS13t5eBPd8bQlDU/ + XO8jsuYIDYlHKYvfk6VsK2nKBzAw0C5yskz9gV9Vezn4iUQcEX/B0fEPd81Dt4HnhTBPeX2p+app + loI0xXWEcVJvV83K2svBX7ntyGNlnH/cIcLiDSWOlZCEVMsh2cpSDotsUUE4O/5bOgO/tvZy8Cnh + bpaXmf3iYcx900OHnSnCfjpWlI/Gez7KiHw+nQon+daZOKK29nLwp/3K3HBe6n8mO+Yqh2zid2V5 + cRDithpZY+Ayv6D9F1a2HJ2t/Nray8HPicn68hh3eNF65e4bIo+ALN+JRuTavPLnMNxBPXfudmZ8 + IcO3rvZy8O90CcjiOSIIUfPxxBAkT8nSm913GO+0bjvhpzm5Zu74mr2t/Mray8FHkXqEppx3WtdH + H5kO3T404A0mBiSNCXHkdQsSIqepnTqlN1lQbe0/AF/WPJORrLPJWHOebPtD+FWguMY1v3WGi5kf + po5IERyPOmMv1Nb+A/AjcD5EspOG2pavzIAnEcflW74VonLrtoNyt9hwIh9u7+22U1l7Ofgjgg57 + jXlKeXDnX659fuWh+xG8O7+j1jEiOoS2QkiqSeI66+3U1l4O/iWiDEtt2plqRnhBRJ9zAniMcpcz + 3a0DdHovVx1EzXGlBe2rsVZbezn425kfBLWRAcarsyyOeMEm2a9j2fJwXa0vXMIu242OHW/+3qgj + tbWXg8+w2lavoTC9vKXYXzKxlkb8XodrkCtK655P5CgEYGHzRWfU2cqvrL0c/IRONkvi3ub0ij7n + fHSFU0XSi9FQbs3J4tMmhmth+rLu69Lbbaey9nLwr9tS9//ENc3aO4Pb5SEjC4WQMFR/p3Xl+wEd + 4ljDPrIzvx355HW1l4OfQ6r2TsZZb8pakFBfkKXwFuTMiDJoBH/nnT8zFljDO7KzMWJt7eXg35yg + HuxMTywZ6DwYReld/uOXwTIb/VbwJaWQwi1MR8zR+9rztQOrai8HPw7Vx11z5/gyIZDwqDBC3Bjk + uGo+b+uBax+bNUut/mmdO3Mdqa29HHw9HLERabs2TgvH43mF2uqvz3QJQm8MIyZHIXT0haLErWtv + RNna2svBp/nMEYf7ZlK4HV2HBSzXlxDGLTeaeOO2o0mEme+HhHk3djbDra29HPxtvk1RyJ15oeny + rPsmPWU9b3Tl21i3tbfjA+iqUUAjZfUmiKutvRz8435osnjKGl1538pntQkRqowLbeg+t/bzva/0 + FbTWYpTYWT+/tvZy8OnD5fAhZyKq0UGfEptO9vmEEfyP2xtrHPnF8m3eWZHc0ddtp7b2cvDfWYqY + KZYIUIIIoVmjrLLjdedhbOoMaNzzYzfTrg5tyrZ2dtWsrb0cfM9ZjZ1DDLe30JH5ALAD3NeVHh2b + KrWDv597NJQjcqqzqyZjvqray8EHNw1EOoaLEEtPn1b/fgP9e8jC6VtvO2HsSOkY9o695WRFwFFV + 7eXgh7edsaFT8SJ8NjLeD9NbziOUuM7JVh3uV0LTl44eW7C3baey9nLwL6JDs6zpSkeihphWd3zU + 1kVOnPz55hmuCSIdKKrsKnmnt9tOZe3l4Ivn08OX0/QIbDpEVPLOd+NcTNDz4wbUduDyjCGE4yLu + c5xLX3t+de3l4Jt1jKN7DVMct5z5iMvJ9fKz9ti62Yk3gk/bwd0RXyrsHfsapmAx1dVeDv5XLqXO + WhDXeC8QSFxpEZR4rAtu8duoQPcBvkzEtNV2fLiu7vnVtZeDb2uf9pkhkVHKu12u5U9+OYu/Ocji + rZosRvwIcLHxhK9sX72d6trLwWdoxABNHhdu4PsKpny/7L6GpBd53a1RTQyT0PJRd2Ze5XtvK7+y + 9nLwj1kb2T8t/HXMDtpn9eB6nnd/xaY0bzvrwk05MuI4pG59KdBNlepqLwff5Dy67tf4XMxeZDY9 + IVKJ9CZhBtvd6KvpA6wapfOXoUZnpKnq2svB3+aHj+mWBAzwDM9CQVFl08Ll93hJ9htfuJPNy8sW + R9aR21lIWXXtPwBfLNOYSIDGvNtqsMsGBgzsBv0c9JkbGWuTq47jNsSgCEF9UUeqay8Hf3GvMcLa + +Fa/QR+J+dUkPIu14LFeZ6MI2lRsWcM5n42Bf+3rqllbezn4/ByvbU8jwGUjSkMkPEdUDj9xg/Wt + 9ZH1dcUP44uviMTewK+rvRz8Jz/hizDtycDW+X6I8FjisSUE3e+gcYzIMMkzK9RGIhI6cxesrr0c + /GtBElwFs5Ig6ufsYbU+maevLju8oZr3fPNbUywbf0hxO1v5lbWXg08NxGJqtcXPjEzxypy6hJsX + Jbrp7tTI27F6CFPCbSrslHs7cCtrLwf/eMfR2Mo/BRGvs4EKUqsGA70E3/W7+cD9eqpMUnD0lHvb + 8ytrLwcfNV8mZcTDHdys8fKZAEy6a0j6L1/JRuOLSbI6TRCGviHx3Jc4orr2cvBZIuAlz+aI/MS9 + rg4hNTL15BdIDhpa04K83CRScGpe4yvoDfzK2svBz8t7SQdC02c7YoiSV9DPwWfgQbWuzXv+CnQ+ + X3Z+E63ODtzK2svBx4MWDGSegrj2zjf5P4XQcp4E6CFFb20vsLwIExPmdrNWaV/g19ZeDn7+utec + vHZmhhcPHehXcMr0CuLemqOamFZhrHGt8cQ9ls4mWbW1l4Ofhmy8+oYnyKObQZiFpXz6e5IjnvS0 + Hrjh12aAy6E2xpWdrfzK2svB1/N6uJdGOFN47ib3/XNJxlgXCmEaWx9ZWslrhC0KPjPM6gz8ytrL + wcfHJ/dn4eu+qcPGzdrPYCWMYNIy3Etre0HrAkMW88sPqjODO7nhdbWXg880nKujQFASFSnc15Dz + g++BKX7szz02eqx9kTS1KWLX8c/OVn5l7T8AP7vhzNv2zgdZ1iW+Q1QZPWhoJWSht75wd9lwWFju + rMTt3YFfV3s5+DiU+UIZJMadr+DSH1PYTWG2pj2Syhp5OzslOxcxImjJl72BX1l7OfgSWLmM0KGk + eR4RBJGWvXblVbz53t6xeeVLqnHPNMxy2+mstxMpOzW1l4OfcDOTuJphoQZNx6Ktf5qdS0o8PPDa + bzv0cDgACHF+wp21F4BfVXs5+NFVQ15gns9V09oX0qQLadiNoL9srSxlqpRVUhAJNOZdbyu/tvZy + 8HlLTTG+XdDzY34llM8j97wIkmw9e+tth4nyEXd9D7mts7Sgqbb2cvBPSz6nZ3HwWvvII5Qpy5TB + vyQpoa29HQ0dtBE56BsKVl8U8eDtVdVeDn4Olf/Isv0Kevh7r3K4d7Qd4XxmWqn1hRv2YcaIxzkL + nO5sz6+tvRx8w707mr7vfURcx6n9dYVe/5rSdPPbbLxq2nPs96Iv46rf2W2ntvZy8JnJcvPab+8s + pl5gJ9TX48Hc9ORlSdIIPhKuA52l7MbisbNtp7b2cvCFZPG9jMdsWrxr2V/EtYcnAInQcLTafPEQ + METBHxGJYKDYV3uhtvZy8GcEnXHP+pphpYybL59vX/kBvhLLnJHNKx9TUFttn4kv+hLEWTh1tZeD + n2V0UNxKMBzze0lz40WXlvyIzPJT2BpjO5gJbNTPkXop3LsvBXp17eXg72dGpTTKIsMiQrqucXjW + lbF47DtMkFtXPnM1aawLqvhwdtbPD2O4mtrLwR9fsnOz811/LY7cQxTodTK+2IzS8RmawY8Dhbjd + I7c39sKpI1tTezn40ucxFt5jffU2UcO3EbuJq+DihLyH1oQ405lwdsSIY3nU3YFbWXs5+MwW5K9u + mRJFasf4EGNd4xEeIUa5RiBtK58kJaxL3F19m525C1bXXg6+0TYX99ul3vrkKnuujL/WmdHgw6ti + awSfnSze1EiXMk+9dTWray8HHyH/XpkKLqxejG0RqMQTr/cbFsvvnBt1uD4Aupq7ZvDzO7vtVNde + Dn62xtfjlsA63kyt0dRccvZ7Wt7HPtHqIu4DhAQ34lhx0Pu651fXXg6+tIiMIM7n6OUwFeGs4lnN + tvTgUUqmRr8dapTVhjba8NHP+2ovVNf+A/C3xw18ERuJD25gThBwh98a9JEY1sZ7fnyA6CuE1xFx + UFftheray8F/JcmwOTY8eQz6kCXSQYi+arSZKPpS2m47bvc6yd6KpqG9+edX114Ofn7IDofI5jNN + OVE0KZ/pQRdejzuaXyNL2QfgIK6ZHxYMncV2VNdeDv6bzn19BXDvr6Pxfc40mqG8xzy/AxeS5pWP + +xmXHfYOJvJ9bTuMF6tqLwff62rn1a59LI/4SZnXxbYyF8RcIGpoDaxZefGbxs+8Hdma9tXbqa69 + HHxmC1a9U/FaXmvTFpTNPbi4s+Xy18YBug9wULlEAMaCqtzbyq+rvRx8tnPbe00St4dnT/PO0Bdf + kANS8l3cT6OjLNbC17QmAqax0DsDv7L2cvBDenjf5+6Kw/BYF+YB/jMafhgw0qq03XboqcNJCQ+F + 0qize3517eXgL2HnOD3S0DJPWRKw8/oK6ON/tG/v20gdkUkZyREjxRfFXV97fnXt5eBrrAk+f7MW + DM/8ic2LCGgcKplZy4a/3LbysRCNxjgpu+9o03W17VTXXg4+QgdVCk2izWFwzxzmFGwPWaEY+s0r + Pz5AiFK8GDA2+7pqVtdeDr6cIEIIY5Pb1rA913QJN1keSrac3Tkbtx32eUKCTkFEh3/b1wy3uvZy + 8D1obwNE/o4Wvt1nEFpw8uX6slU+18aEOAMZXsHhNCUhbumLn19dezn4+joJowxZ7eAM8tzbw3It + 3Kb4kNJGN9IFiUARcR3e7I62sy8RdHXt5eBHRBB6Dgr9ncbrENVHGzrfR8q4a8/QeuCO4eeLeK67 + gyne14FbW3s5+Otoz1lw8YnOL9rzFWthSqFRnB+M2eaV7xdEAL247femRtwjPrym9nLwMWo42fGw + fveHFYDx4Y4tznmNy+mcUuMYMXqZRBE2n2Ve5r7ogtW1l4Nvg88EoK9k1o3va5io0jNEYtZFGHo0 + djWNz82Ew5KcLmjvbM+vrf0H4L/jM+oeP9Mq/VwY9JLSZfjEjOSlR2+cZPkt7aZkYTYVNhJ97fm1 + tZeDv8rJ4ia4HgKChMTpY+JVZsOVTSqooL7GF67US2RQcwFax87YC9TJdbWXg4+pIE/mpk9hoD9h + ii/n5aYTO7QzsrWfjx9ufKhjHf4LU2ePrNray8HXtxe/+rhqzjo5kyduWE4Jop/mlza9cYwY4DMw + OM1UoknY17ZTW3s5+BiCifY8dvhL/HlORIPnbZi+eneRqzRuO45ZnEO/oy1+R32BX1t7OfjCyV7T + pug53uuzHdRx88Z55DZzDep4I/gYKfNKXYptjhvRF/i1tZeDz/dCR4EA1L0+NFgSgw6pcfu1vZg8 + W+tV0wN6ouqjuDiNFDsDv7L2cvCfBTvNQIWn7DHl64uav7uejGQpi5Zb68pfYw697yKVj72zq+ZZ + WXs5+Nx8kUXArfkyIy6Ya0mIW3eKCX5TqVEWxPBiNBxjNAX/3hprtbWXg4+hQ2e+ok6l4+IzcqG0 + im/S1rks1qXxwJVkLeZ7cd7qmXbmIl5dezn4gybagbUgjDu7DWpnnjv3HYam7jyabG3bzmEM741r + J3Pd74w6Ul17Ofi7680xkp9f2LHmHeM14+cjL2jsDGNqfOEajuGCIqGvvJk723aqay8H38G63zKI + ocMw/9xvKx5bfLh5KaMdNBrcIS34Cm06qwfz0tdtp7r2cvAdg3u+CQ+lR+Aov0EXeWebEeORI4/N + K98Y0ZaPNYWw1tdt54gRaE3t5eDzVKNHWfSTEyLD7qY5ZJnlyLL54IfROMM1I0MQd8s0m9HW7Oqe + X117OfhnmuXNO2FfTBHqT1t9Hm6OjNRBYlQa7/nxAez5GqXbrHPUHfhVtZeDn9ZRSJDcFDs87x3j + lKwHJmhAeI15Vuueb79HkvUL9pfOAugtnLrafwD+eSFlcsMhBsVGZ7JznZTQJluZ+8Xd6LdDD2Q4 + jDXC6QtzrbOVX1l7OfjTHJnzlv1rgH4uF3uvzHBKcNyp5ZCawYe5g4SlEuw7O3DJyKpqLwdfIp+3 + f94nNgBUWJ65y/D4BpZ0Xm8+mw9calI63E1U0DD2NUw5QglbU3s5+KOYJpcbzPDtyW48YSj7JBOt + hw+GHO7GFy6uIP7PhjmFK9tXSxn4dbWXgy9qPtM8owrijeREC/pqOCQ95ZTk1zSDTwzkkh8WDK6b + ne35lbWXg7+9MreNzJFHvqRAIiR24ywWLcFgGBuHKQenI4ne67bzEz97W/mVtZeDn4Yggw/zra2f + 5teTax3Rp3CLx9dwq/WFixZk1RvR42ZNva38ytrLwRcQZEwucn5n7fUl/Ge084T5yHLhrLWufP4N + wo39F3CR6cx7QVphXe3l4MfcnO0livh0c7xw2EZv08V8cdl51ka6IIs803POI6xZe9NkVddeDj6W + poMVN5D28JgNcyMZMXtqjUJs7v8B4EdrSvPOPjb3RReMDIGq2svBn7QxB8MmalA3TukOlij/l2Vm + wGCu26jDPfyMuJqu4F+wQPu67dTWXg7+O52JaTuS5mqoctyJUQKKzTWYoTNda71qYkWEM7Zd33ul + s8Zabe3l4OvkeMouWVw2t3ydTdR8jE39Bu5Hw9DaWNPOHHWqySrlZXW27dTWXg7+eIfUWaSJBb95 + 57oV0pCka9jXh7lmI3vhYCfjT6JIZH7RmTKluvZy8DU0n/FORInJkchYk+m0HuRuljKs9uuW9oIJ + olvOFrYvvgGWDh3t+Q21l4M/XHmZNs75ZOfjw2nwejaUEZ7u/KyxZhvBnzV1vLKYiI+cifsCv7b2 + H4DvYA0m/nLOnKuXWxLxq4+vvbmdD+5mG/gUjdqZIllHGsetr5VfXXs5+OZWTxJSfr/bfBNHiOYb + Dl1NOap4422+mgcxqRmu5IhptPlsPV01G2ovB/9J173vOfyTH2xlLc49ye/QbfDmok5pXfncNH2F + M30Tr7uuth3Pk7ray8F3yTzfSzaomZl3kMGtmBoWmOfFhGR/moYpsXrCuHDEg3Cb6spFvKH2cvDH + nN0BGS3EXZAJQz7lg15igwzSQdZ0z//6AAY1bGX5lSEhdbbyK2svB1/UudxtG811e4auD12QSNDw + czdURFZu3HZ4+Ug2lZcS9M+eHlkWTm3t5eCzGfGkklAjgdvEeEgRLaMTSZ1CEddGF4wPMOPHUlXH + KLor44uG2svBf9csK+hdoL26XV63sET2vhqa2VjrbWopxwfwTkOZosKd+3KUbai9HHwQH8zt59PU + 3CBRFrc3LmHQfduQrNbWbQcZSEPf9ynmtSf2QoBfWXs5+BdXI0IUtPBTIKt00AiBDmkK3qx41sYD + l/KZCBFfLRQvS09S0K/g9rray8Gfw4eIle+xoi6cyy3aZL7C7+vOaxg8tq18L6uIGNwcJM6T3sCv + rL0cfM72YjtGTYbINhmfPWMna2rKjhjCUbkR/MNWP3NxH0xqts7Ar629HPwgsE6yauQPZ8LT930i + jHv1QzBNz1PjC3fWn9YkZfsyU3n11V6orr0c/Pk1QEcOFx5xbOv2zAJCw1p2vEf9zqvxwKWDCx+Z + gw7aQKUnfn5YIlbWXg4+ItnIVdAD1PL0b1wxeV7cOGwuiWObo6xASm47oF9pvUwq+9p2qmsvB1/Y + Mw/BAVeBj+aESMJ2h2oTj0q3oTEzJcCPW4P5jK3fMKur9kJ17eXgIyMHWfArqYk/Rby3YH9tx/kY + 6b5NUtAv8B1bcYvdjOX76udX114OPi3iJSMlBCrP7rA9znt+uIgbaM1jvv9mb+e9/uGPz9/94f+6 + /in/w/OH//iHPz1//NN//+Pzz3/+G//p+X+uf/xv//D8L/d//cc//N0f/un6x/jf+T/97/yH//36 + 45/+j/jPf/eHP/39n+L/9p/++z/8w9/94e//+F+u/I9//09/+I//8kf/83/9t//P676fP/7xv/z5 + b/3hj3/yf/x/P//89+/fP/lf//eff7z+/h/+pZL/rJL/7au031RSDs7O88kKupjzvbYCMn0sD8KR + +dD7kpf9k6vg+xd0fo3Lf/5v+frTk/9DVP3XYfnTP/93gP91VKD3N2H5a0WUQ2K8yXnVobQaQEPC + r9WCuYVYP/ygfxTnGEslOn5rdNSn/xT4/GKtBCD/4X/9h/i6/+eslb9eRjkqhzYRw2oElON55uXJ + yVv5lLGLDkpd+hMrgX8th7nzOXwelW9llKMCCaSxd3juYb7QcbRxFrMqNE3Pfv/6g5/P/wcViYe/ + B1R+U0Y5KussgRali+41X1h6zODMU8WMEOrohvyENfNvqJj8rr8DVH5bRjkq73hxthUHjte/vCc/ + GqNNXA70ijwQzNSsFaq84XeAyrcyylFZ7imm7Y/ppsvQ6Zx+38yQU9qQE+j+CcnnX9cKl+Xxd7Cv + fCvjB6iM17Red6SNXbP7ynSk21b7vBP+jet71VrxC1w+/wuKjeDflVGOCg8DagMR45J48AIGXMhE + hICIN4y352QVKuHJ9TtA5bdllKNyGlVq95hesh6QjxMXjs3UhbdbHiyWSlR+F78gLMN/90MuR+XB + mWI8tb1aalsMAQlKhIZqcrKFWf/2rf/Pd9B/vfT/277yFVXz8Vsck7d/X0Y5Kk4fpqxccMISZ8V8 + 5FAk2mFxXqcfmn3/BZXNyGr6PCrfyyhHBZ/9PlimeAS5p1AVhKh4Gq8xk3xoDFb8gigQV0/xT6+V + 72WUo6JjdMFC9h8Ph7jmU6acchdddtkH3zV322PGN/z8GfS9jHJUTJUiCWufQ/n5SplHEJyH12/p + uIyv6lDh5PX5tULT9JsyylG5lyCo7isO072SwkoyOelAzeG0Yqf7J1nkf9lXyP8YWnz8F/S9jHJU + ElBWL0O/oWE0diCSlLZAwHd7Hb0/0s38KyrUGsvvYK18K6McFbZRgy6c7YX5ZDrme3hE6QbdOcbv + w0/URP+GCuf+38G+Io/235dRjgqvkG3gE/UcR/ZjSoO0sXBzecKq/f5R0OS/oRITkc//gph+/vsy + ylFBmQvnRN3zO3Gqd+l/cFH5F5NJbu/0E47GX1ChXRPn+XFUvpdRjkrYimnOZKmM3MCyRFiklZko + jDxDXmNNJyECrX8H76DvZZSjIizxGPE3t+NejHOHSDJAJhTjd+VznGpucfGC/x2cQd/LKEclMf1G + OBBdywQ2s+d+L3YtZ56Ccpl/FGz1r7+gZeJv8vlf0LcyylHZkC9wnHjPj2+2416Tti0blXFmiP4z + vu+fUeGRopn98R7/r8ooRyUO4zNucpuUPpYCwyCW4jGUon15rvPnJ3OUQ9716W72r8ooR+XEWPAg + TGKBmL3e23Sxu3hEgh4bFsP6833lqxxS8w//gn5VRjkqEURreDu7n2zh7M2Fi8BAI+p14x23nzBp + /vILCi9jQUmffTPjqHwroxwVWkkhpU/IaCYrRqjFNGT2q0nsH3JXzS9IOfzHfg+o/KaMclTOvN3z + S8G1HUJWrkGL0t4yP0Slu55/FSrhtvI7WCvfyihHJU85mrVHrJPjYlo65/0i9cHLFIuyVqFCO8Sm + 7uO/oG9llKOyu70xjdbc3yPsmhzwcb/FiDd+t2h+frdlYEa5Kf3u06h8L+MHqDxLfrj56xvwIGD4 + po290nG94isizeXHHcpAhY/wpzsJvyqjHJVHC45SRXvlnuLX9K5r9mLMDz4nx4yfvw4J0iN87fO/ + oO9l/ACVNzyhJNhypaMrCekfD2reTuuOYPz+RDz+l5M5+lYolZ/+BX0voxyVHKaIt9ehR5AYkpuk + kO2GVsueeLWuP+ckUNAfOp6/A1S+lVGOSqKzvqbbvnJ5NDuTxyPTWxs7J1aqP/Ku/cta4egQXiaf + XivfyyhHZaeDTsIjGVSc55an+cJIkLIhLxFVvGIeFGmICLKf5iT8qoxyVE5XNy1ar2Wb7YqwwsaA + Y9NzGsuvYKo4g3i7npxdP75WvpVRjsrB91lkLOnSEm6dy2bxLOkRsONnlO6ad9CmEbx8/r7yvYxy + VPbZrcRJivqI7nHycUCdXLjILOd7SDquWSte2+S7H18r38ooR8U8SO/gNtgifLNgKN4IpcivZNVw + eqn5BWHw/x7utt/L+AEqXNKvM24YHkOL+TKjD4PEZLvNego1a0WKa+hvPr1WvpdRjkoMg3aDH8tt + kOiaZFgkxhzp9skGR3XFL2iXZn58/r7yvYxyVN6TVDrRb02CDiPOjCuJJDk8U6mTHLerUImUos+v + leO3ZZSjsr5C9YKMsJFTCRk7DEKomVd68plFbM3dllmFDNuPo/K9jHJU3qwbOV1acOlZ73d1z7dI + rsxS/qRv+DmHcg9XCmnNn0blF2WUo/Is6PuGzeJazZcfdNsYHTIWY+9mJlKDCji5a3wcle9llKOi + WYDp9R6eQNP9SinTyHVl0ZZzxeUM8/N9hbeaM+3j95VflFGOypVfxkM4x3Qf+irpGi/E9XGhgrmu + 5azYbWEiuO3ju+0vyihHhQu04Y9B4e5gtqdoOl0jT25mTbcWQwUqkpPJjD7edfpFGeWoSJdLZn1S + EeTeXMn0nW8qop0AzMsTseK+YqXwPvz4feUXZZSjgowwzibv7NrD4uJi67U/7I4uLgsO54obv0f3 + vnycqbH/ooxyVOSz4arQ3L/7wbaere45n9c0HonqjjfCz3fbcFSUvPHpM+gXZfwAFbE58x12qI+x + ED+mOYYgOWVEYk/oqrVCePhxXpy18q2MclSCP+nu5n5vGEQfdPMzevAnjUBMPtaKbrZymJ98/Bb3 + izLKUSGTwtXQtWWQJvwKIfl4vIfGUy6HmWrFLS4cZw30P/4L+l5GOSpxiGLvkDj4IWnLDYLA3PjX + SMMb7vVvKmF+q/T+l3Y2vT5jjI+3s39RRjksUxIYsiVsat7ZhLuP2CguS4eJPPP3teZo5uTJw/Pj + i+V7GeWoxJSMto7CiMwOI5t7xXWfMRzyCNB4+luH0F9bLEHw/tuLpUHj/c//9W8J3/+yXr+XUA6J + 1TFy6ttPj2R52V9kpzCVyK51JFMVryAnF8XHx+/7vyijHBUv5tmzMNtITjoPjVqJX4n0nZBXa6GC + pyFIdnYqfvrn84syylHRcLq4A5tyhKkGxopeNniEcfG28m/+1s/nl/o6zVKWC9PH7/u/KKMcFQHb + 6bGnSv/0Qxwmuee2WX0FRAcH0M91MIbUVhnzxg/3sn9RRjkqssH8Wu7Xh0Hl4Xm2XUlIngwlVJaU + Kjg9yxBRMR8/gH5RRjkqL+qKyxrd1GnJzA9XADvMnM2KmXGdFb8g5RAHffwX9IsyylHJ80J26WKy + 6WiPF1HDKP3mTe65OGA/Mgn7lyOR44RR5sdvtr8ooxwVr8F7JO9I7xADP/Hapu4pU/Pm1/Wu4gxS + Di7Zx0/mX5RRjsqFTXqmlQ5o5Vw5sOjRrJUf8GR0lulv3/d/fQYxB0TT+/wv6HsZP0DlEsdmGPYs + G48rphEbAyw5jZEdmN3sfn4yByp6cZ8+g35RRjkqnitbfvbXfZZLnW0ls3+chmcNvff6oxitf91X + oCKd9cMn8xcq/76MclTCC9BY+SY3NEI8v1imkfqgv0DtXaGv4+8T4/vPn8zfyyhHJX4103lc78ic + GJ/HBMSbmZ5sp55C4/9bv6C/8jSMH6Rl9rcWy//sp+EvSiiHZHued53eCJzOXPp199NmwnxIwQze + U8WIOcpx0/lbiPz/YIpmvX4roxyVTAgnlnL1U5muBaHnELFxDhpQBLxclf/WQvlrBxBi6udlZFD5 + bRk/QYUlzfY+57whPAmvuh/46GUbEFlBNdf9yYjgd3BZ+V5GOSosh4c1CBqoCDLahasvVgxhZs7m + ZvNVsVbwDXF/P/4L+l5GOSpft3tvZjGunkP3gjgZrMGki3C/pEIVqMzYnHK7P30sfy+jHJWRgJny + nzNPjq4kL+9JapU2HKacx1DNWiFt4MjxcVS+l1GOypHnMDHe5nSOowHzIQxyfsz90AdN4f8mUfCv + Hcu6ncbevwNYflvGD2Bxt48GNhJ/vq/tHd1pYfKs76w9t1dst2FQiGv4aVR+UUY5KkiC+WDwe4up + 35CcqILIblMsHJywseInRBMtNu7ja+UXZZSjYuyzU8DI8/QUSqI9M5UD+spFKUQhVPE2VM7J+efj + a+V7GeWoSFhO08q4CF+fEw0nDGZgOi0zmR0ZTAUfYXUCrfPHL7e/KKMcFVKgl+Y9rOJJ3r2GjrTF + 25lLwsIYuaKXrRxdp4/zsn9RRjkq75wvQXqyny9yXQaDBqjzymyRk54Hc0XXVjnhuv35X9C3MspR + IflfMXc0DoIgyz/8lRkvxIrk2wYxV60VPN2Pe1ASYn8roxwVo58kyGvxRtzHeAuFv5O2kwTfMPyt + eDR/lXN8vLvyizLKUYn2m9/QuxiqThrbcs6oMv1NVtqTiMSfX/mVo/X7OziDvpVRjso9scl+x5s9 + 6f6G+F0T+zE69BvaEFkq9hWOmBva7qf3lV+UUY7KuQRdnQcN6SEQeAOkg98IzoZkYPHkP18rHDEZ + Bn/8F/SLMspRmS/MWsZU252BMvHmmQ1UOV6d5yDX6G8+hH7ddlJOjA4/v1a+lVGOCqdsG+1BjJkR + eMYbC1la+sIiYaHGPCtaCdtot10/fl/5RRnlqIwTwaWsdxNQ5pwPYxp2EciT2Rl9L0sFU1A5Ai1+ + D6j8toxyVAZmANhO4TSPm5H4Ihh9DPrRGMlYLRWNWywnce4ffzP/ooxyVLRV8LVQeLA9mAty6zmu + sF+8OaMJc6joJGjwj16YH99XvpdRjorELOlkoste0ne2ItOkDUBVdy2MaljSVJxBC0e/z3cSnKO/ + LaMclcFIGfktbFvRmwS7SykXJ0nXLDUo1WiZw7/R+/Lja+V7GeWoHGsyRc0LnwhUhP1YZ/aTGT2O + e55Z4t+cHP6Vk1lcHg/Uj6PyvYxyVATN4amsspMW0rrlK+WVFYvJIZ6tKWvFL4hLFsn851H5VkY5 + KmZBBILRP1tDVTcPm18Pli3iBsXQWnMyR4L3+vlf0PcyfoCK+woDvRU9kAdNjCFReY7xYQG2g6UO + FWTb38EvSOTfvy+jHBW22OiT18sB+cTXICALcTfnX0ohGtUKlxF9mcj+/fwv6FsZP0BF2FbajB8F + bZN0EjMbkHG9WvCIL4Y7FfvKrhfxO7jFfS+jHJUt46mY/KQxpssPva3TyNPo5d+YHG41qNAXfV73 + Hvyb35RRjoph4cIPjQsahoYeS1xfNCrlw4g+sXoqUGH9M33ebZGc+rdllKOy04pxDw+d7ZndWGZ5 + sHS7PMSPSM6p6SQIoHbMf3xf+V5GOSp5fr4ubiga22D6YdEEu2eR6oyucFfMDvURBvY2n0flWxnl + qEi2HsLfl0535acxjvkMIWZa2WpwXqw5gzy4UYV+B6j8toxyVEim9lPmNyNY3Vunqjw/TSNXW06d + BkU/31f0x4/fQX/lF2WUo4IlyHqFWOqRSEyYSrjrOeS5TA/DdbHiF8SVM4hBn14rvyijHJVzIcBc + Dz8dSbJ3khx0DKz5F1wFhJ+7opuNpLsT6n0cle9llKMilOC6JskPNlnqD3ZXsojzYVJG8O1srvgF + 7Qaqn/dXd135bRk/QSXFrVZWYsTijIQfXyQw0W2Zr95TsdseXpjC3j+9Vn5RRjkqyx4JmPpMIlAY + zdN6UNxSTUnLMTmcKiaqBinmzB+/r/yijHJUjAgRs70NZUrdgYa/pp3zYn6e4arRTIWb2v551/lf + lFGOisnysC7387DoQeIX+zNyn7yCYBozoopOwjFPQfb+/C/oWxnlqGRecdtjw3XDD5vs1c/o5sv5 + PCe6z1mFipiDzzvScML4bRnlqLyOUQQN/ojGw8Jk+aMhxPm+SezYC1bMgxh72lg+rpn6RRnlqHAR + R9v3QZYn1Mv5ZIm2jkSHEYA41fRtj2UWOvrxW9wvyihHhS/PnaebZBdHkMbuoMBkyr+OdhpvpAot + ptf3Jgfi4/vK9zLKUXkQ9j2UN0fQETcXs/cJ5TZ7BvFhXCq6ThFwIFj846h8L6MclReNWuj7q8my + j5znjS0WdzjOriLOGer9/G6rkcDT5vNr5XsZ5ahgl1Mcrjos72bewWdRF3dAX/F3R83/GlRGg+aP + Tz7cJH9bxg9QkRIUmg9xU8YdEyfXa5hI7OShuHVMFdzs/7e5e8nS0waiALwlnr9gMR6AEBtIptm7 + P9mTGDixkM+JetyDrr4tCanqPhYjVQ4/zXfQvYxyVPYhnyQ4CHJg9FoOrgDMwBgAeyHubro1a4VH + T3tudm4SXcooR6Xj9jtSZJ4DmqDAhWHakq3EGoBZz7nU3FdyE6t9L85Q91pGOSr5E8SYh9ApsR1C + VvcVWtDAhKpqao8VHMqfqoLm58pDGeWo8IjojMjChP41snGyXIQz9xOTDW2FUNGhRBsRUNNcYvdQ + RjkqbCZH3f1JWMOGnE0VhDn52T7GzQSZqWLyoRwkieY9/ocyylExNZXsMaznKt+cmV5O9sNdMSMz + RkQgfH/a4iN4eTe/rzyUUY7K5L1wfLwH0fgnTUn67k/sE3LbfnCgrzlXcmtvbb9W7mWUozL7NM+j + qDYuelGcsYSoWVQ1Mr44rk9N33b9jEFHu/V95aGMclRiQrT1bI6e/5H2PSVnyuyykueqhM4VO4i7 + wG+sI/4PowTjrGsZ5ahgjy6BZgpb/Vy5tjJcOXKWhQhIu2l/f9ou3YBs2Dw76KmMclRYK65HRMkY + D65OgqZYbOhoazGy4j8qVFMyprg2Ns/bfSqjHJWOhyZCBv3l0sd57HlR+vpgbiSUBKKH1zsol4P+ + 3voW91RGOSprDmTrxfoxFpwYpDHST7uBkFvukBVDNahkT+XWtzio3MooR2UZJiu+42IUtSTjubCi + wdPeEi2vA6JqrehHNM+ZyqhcyyhHhREAeqCvTyeabOcJ3esYHdwGDVmJvt/34nI55vetv8xPZZSj + IrUuGaXiwSGu9H02TcCI23ajkMDG530v7kc5krwa31eeyihHBYfUG7C3OAaNbMFkrrXss43JRsOz + UHeuSD5pPSX7icqvZZSjYuNEDxfpUpvTxcjjBEZODt1Eh6L315y264DA0n6t3MooR2Xd3NZYQaN9 + ZT/ORDK4ikHR1haTsy/ve3G64xo0zXn8T2WUoyLjAzGOD6WM85SbtmHH5+c/vzI0quE6KSf0S3O2 + 4FMZ5ahEYY2GqpmhQY2pdyAXZooGq3gaaCw1py3PeXSY5jvoXkY5Kgdpx84jYRVeIaEtcnOigCeI + 0ds2Oas5VyZa4OZZh3mtXMsoRwWxqTdu78yVO89n4cxUdpTe87ZtflLzOpzcBJvb6EHlVkY5Kpq+ + LH7JpHjojRIgt3GfcTRyCqQpWYVydzF+XJASmu+gexnlqLAp5cnD/FhewyDjYx53xMGBBOT0mjnf + 8+KWDoN+bm4u+FTGC1QSdqA/JHYHfYM01RGHXZxfwKtkMlhz2kJFTc3Xyr2MclSSTsI6nU4CJhJe + zYcGwjFuW+awD3tXc64oh2/9V0DlUkY5KtJBCf/DcnbcNVgsclgn2ZCIaUKm7/9+HvRj6bJg+RKo + /FpGOSrbEUPIs4oVB/lkvCKpzduOcnWj0qyYfGRU8L3b31fuZZSjwix77gZc6hT2vcs3FXtom7Qm + eIDFVNNJUA5O8xdYK9cyylFxP0n4xoE8FZ//wIlLwuemhQpG5mGqO22JplpPyfKSvZZRjko4o/yt + bBDH8KtnTtNrI/hcG5CtAiDfT8lcCF2Uv8B95V5GOSqU3eInaMdSRCkdEr0HOcwZvY8SB+Ca/oo2 + wtJc5e2fcyujHBWv5bCg7+PCHUG0RUfTcEo8ZLHB1OW1auq/TpM/sFX/6+/fJW79+xeX//XOTluE + lFUcjoTdnCfVRZ6cH41JsWRXn5Vv/3wH7rYwJ+I3CwA= + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:20:57 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:20:56 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - vLqqOLcWkbQm3aIBHfcEmIwzrJtjtqNdArlWt57BFwl8nfWymjIeK67csuZ1woEb + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/api/cassettes/TestGcpIntegration.test_gcp_crud.yaml b/tests/integration/api/cassettes/TestGcpIntegration.test_gcp_crud.yaml new file mode 100644 index 000000000..cae0b1f3e --- /dev/null +++ b/tests/integration/api/cassettes/TestGcpIntegration.test_gcp_crud.yaml @@ -0,0 +1,345 @@ +interactions: +- request: + body: '{"auth_provider_x509_cert_url": "fake_url", "auth_uri": "fake_uri", "client_email": + "api-dev@datadog-sandbox.iam.gserviceaccount.com", "client_id": "123456712345671234567", + "client_x509_cert_url": "fake_url", "host_filters": "api:test", "private_key": + "fake_key", "private_key_id": "fake_private_key_id", "project_id": "datadog-apitest", + "token_uri": "fake_uri", "type": "service_account"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '389' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/integration/gcp + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:22:32 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:22:32 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - 11g4TM+MO8VJV6iUJTOff4hAGEXsIqbG4IMv2YuWygOleCGxCxx6NihCkVtjenZN + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"automute": true, "client_email": "api-dev@datadog-sandbox.iam.gserviceaccount.com", + "host_filters": "api:test2", "project_id": "datadog-apitest"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '147' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: PUT + uri: https://api.datadoghq.com/api/v1/integration/gcp + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:22:32 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:22:32 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - nSRgqrrNNmPPT6VSGZq0R9QdtdJF1qxzho2//eboP+tsIQDRgfSx3bSVb1t6QyYb + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/integration/gcp + response: + body: + string: !!binary | + H4sIAAAAAAAAAzWNMQ7DIAxF7+KZMHRk6j2iCLngpq4AR2CiSlHvXhi6/vf0/noBdpXclcBp7WTg + qPKmoJ4jOIioGGVf8GClpmAgJKainjJyGsIAS6Tz/hcblviQj2XMdm9UTw6EIUgvaoPkEaBapTZw + 62bgJU39k5PSXGbMzZsbfLcfG/EYy5oAAAA= + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:22:33 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:22:32 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - f5hY0MW4w2fhZz0SAfv1+LF9me92dJz6mowUerU7gZ8k/CpuQLqOWzykixb5WZaX + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/integration/gcp + response: + body: + string: !!binary | + H4sIAAAAAAAAAzWNMQ7DIAxF7+KZMHRk6j2iCLngpq4AR2CiSlHvXhi6/vf0/noBdpXclcBp7WTg + qPKmoJ4jOIioGGVf8GClpmAgJKainjJyGsIAS6Tz/hcblviQj2XMdm9UTw6EIUgvaoPkEaBapTZw + 62bgJU39k5PSXGbMzZsbfLcfG/EYy5oAAAA= + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:22:34 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:22:33 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - gH++OYwf8a2QZXnzDsHHnXqPhHbI48oqNvFjE/0p0ObpMBY4290QCI5SB0tU0MAF + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"client_email": "api-dev@datadog-sandbox.iam.gserviceaccount.com", "project_id": + "datadog-apitest"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '100' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/gcp + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:22:34 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:22:34 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - /Lq4EjXKMzRKp9qa/TaJTTVqSY3uTwQpdi8SFIU3firYrLG0qdPC+ksTJBROerQS + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"client_email": "api-dev@datadog-sandbox.iam.gserviceaccount.com", "project_id": + "datadog-apitest"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '100' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/integration/gcp + response: + body: + string: '{}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '2' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:22:34 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:22:34 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - vwiIwb5QepaQFIQrmPfIwwVWkQ/z0inFQwNEDjqDDy4v3CsF5qbv9dnyfb7UGzLf + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/api/cassettes/TestSynthetics.test_delete_test.yaml b/tests/integration/api/cassettes/TestSynthetics.test_delete_test.yaml new file mode 100644 index 000000000..cb51aa138 --- /dev/null +++ b/tests/integration/api/cassettes/TestSynthetics.test_delete_test.yaml @@ -0,0 +1,309 @@ +interactions: +- request: + body: '{"config": {"assertions": [{"operator": "is", "target": 200, "type": "statusCode"}], + "request": {"method": "GET", "timeout": 30, "url": "https://example.com"}}, + "locations": ["aws:us-east-2"], "message": "Test API", "name": "Test with API", + "options": {"tick_every": 300}, "tags": ["test:synthetics_api"], "type": "api"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '320' + Content-Type: + - application/json + User-Agent: + - datadogpy/0.42.0 (python 3.9.7; os darwin; arch x86_64) + method: POST + uri: https://api.datadoghq.com/api/v1/synthetics/tests + response: + body: + string: !!binary | + H4sIAAAAAAAAA42RzU7DMBCEXwXtlaR1nNKCbwghxI1Db6iK3GSbGJw42Ou2UZR3x25Tztys9Tcz + +zOCI0negQCtjggJ9H6vVVmoKpSGL5ee5Sa1tA4/JOvAfQKhI+GGjhokVbpC9gp2CRhbX1Q5zx6z + PAFtSknKdBeNPDnhXYrSUcoj3aJzssYQsg12d88f7yGhQo2EVSEJROe1TqCT7R9zUtTMYGs6RcZe + 8h5Yzlm22YQGhz7CsZ8ESovy5gWc8SzNWJo9bTMu8rXg68V6la3y1T1jgrHAt6ZSBzWH/0fg/H4O + bIj6mGi6g6pBjGDxx4ep4tNbHVqKhBPLJZ5l22tclKaNkUiNiXt+e93G/aoWjQ+qnE0JSOfQ3vY3 + gunRyjByoJWL8HXW6/VeTBVPR9LWGPScsWkXLEw/68fgXX4XeEQ7RHs2Tb/yUT0t+QEAAA== + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Tue, 19 Oct 2021 12:36:26 GMT + Transfer-Encoding: + - chunked + cache-control: + - no-cache + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + pragma: + - no-cache + strict-transport-security: + - max-age=15724800; + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '12000' + x-ratelimit-period: + - '60' + x-ratelimit-remaining: + - '11991' + x-ratelimit-reset: + - '34' + status: + code: 200 + message: OK +- request: + body: '{"config": {"assertions": [{"operator": "is", "target": 200, "type": "statusCode"}], + "request": {"method": "GET", "timeout": 30, "url": "https://example.com"}}, + "locations": ["aws:us-east-2"], "message": "Test Browser", "name": "Test with + Browser", "options": {"device_ids": ["laptop_large"], "tick_every": 900}, "tags": + ["test:synthetics_browser"], "type": "browser"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '368' + Content-Type: + - application/json + User-Agent: + - datadogpy/0.42.0 (python 3.9.7; os darwin; arch x86_64) + method: POST + uri: https://api.datadoghq.com/api/v1/synthetics/tests + response: + body: + string: !!binary | + H4sIAAAAAAAAA41Ry07DMBD8FbRXkuI4UBofQYgf6A1VkZtsU4MTu/ampar676zTFnHkFjkzs/M4 + QSRNYwQFXo8RW8jAj2trmtq06fGzyh/7Kve7Df8h3THyAwgjqXgcaItkmlivgztEDLDKwIVuYpay + WBRlBtY1mowbJp4+RDXGHHWkXCZ0jzHqDvnQkiXvXq46GbRokbCtNYEaRmszGHT/izsY2v4B924w + 5MJ090mUUhTPCzZ79Ilw85ZBE1DfNEEKWeSFyItqWUhVzpWcz6pn+bh4uhdCCcFpe9eajbma+A+h + ccPGdKBOEHA3cqD0OQbLLrZEPqqHB/zWvbc4a1yfLiBtXar5/W2Z6jU9upFZpThnoCNXeqvuBM5j + 0JyS0SYm8CXeZb5X1+K0T+iQ+VKI84olnL/yT1zo3jTIFU1DWO3J+domfNqBV/yqcY/hCKpi8vkH + /JAX9BgCAAA= + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Tue, 19 Oct 2021 12:36:26 GMT + Transfer-Encoding: + - chunked + cache-control: + - no-cache + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + pragma: + - no-cache + strict-transport-security: + - max-age=15724800; + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '12000' + x-ratelimit-period: + - '60' + x-ratelimit-remaining: + - '11990' + x-ratelimit-reset: + - '34' + status: + code: 200 + message: OK +- request: + body: '{"public_ids": ["yjs-xa7-rt6"]}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '31' + Content-Type: + - application/json + User-Agent: + - datadogpy/0.42.0 (python 3.9.7; os darwin; arch x86_64) + method: POST + uri: https://api.datadoghq.com/api/v1/synthetics/tests/delete + response: + body: + string: !!binary | + H4sIAAAAAAAAAz3KsQ5AMBAA0H+5lcpdSel9h01E0A6VDqInIdJ/Z7K+vAecj168m8QnScDD88ss + wKBRkyJUZHvSXBvWbUVdY2xXIDIilLCfSwzrFNzX7y2pa27VIQbymF/e+9/HXwAAAA== + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Tue, 19 Oct 2021 12:36:27 GMT + Transfer-Encoding: + - chunked + cache-control: + - no-cache + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + pragma: + - no-cache + strict-transport-security: + - max-age=15724800; + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '12000' + x-ratelimit-period: + - '60' + x-ratelimit-remaining: + - '11994' + x-ratelimit-reset: + - '33' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - datadogpy/0.42.0 (python 3.9.7; os darwin; arch x86_64) + method: GET + uri: https://api.datadoghq.com/api/v1/synthetics/tests + response: + body: + string: !!binary | + H4sIAAAAAAAAA5VRy47bMAz8lYLX2ruysy/rVLQo+gO5FQtDazG2WslUJHrTwNh/L+Vkix566Y0g + Z8jhzAqMmTPo7ytkNrxICdEsGS1UEJcX74be2dL80dV3oavj8SATNmMhbWydzzNPyG7I/UuiU8YE + zxV4Ggw7mjecOWW95BpN5rot04A5mxFl8V5WfPh85VUwm/Cne3I8/TUKNDumtOm5V7tWNY9PIuUc + C+H9cgVDQsNoe8PSblXb1I2qm27ftHr3oNuHm+6xvXu6/6iUVkp+2fCUQK8wmdn6su2QaPxkDRtL + 43S8GSgI8CJtXryvAINx/t/AN/mOrDu4/9FA88GNRULC4yKOlHJJ5cLEHLO+vcVfJkSPVy0BeaKS + y7ev+5KHC0iLsHZKzpssGbx7vwJFTJcPweUCvjh2yfsLWdwCTSMKv1Xq7VlWULzyV7D46gYU17ck + vYlMsfcFX4KU2H/2+IrpDLoTsrB/Awd7o/lVAgAA + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Tue, 19 Oct 2021 12:36:28 GMT + Transfer-Encoding: + - chunked + cache-control: + - no-cache + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + pragma: + - no-cache + strict-transport-security: + - max-age=15724800; + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '12000' + x-ratelimit-period: + - '60' + x-ratelimit-remaining: + - '11994' + x-ratelimit-reset: + - '33' + status: + code: 200 + message: OK +- request: + body: '{"public_ids": ["pj9-4m9-pqf"]}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '31' + Content-Type: + - application/json + User-Agent: + - datadogpy/0.42.0 (python 3.9.7; os darwin; arch x86_64) + method: POST + uri: https://api.datadoghq.com/api/v1/synthetics/tests/delete + response: + body: + string: !!binary | + H4sIAAAAAAAAAz3KQQqAIBAAwL/stYx1E8l9R7cIqTQwDIzsJP69Tl2HKeB89Nk7m/2db+Cp/LJk + YCAkKSQKaUZJ3GumoSNEo3SDyIjQQnrWGDYb3NfTYYQ6jUjXDnWuL5KWkHxfAAAA + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Tue, 19 Oct 2021 12:36:28 GMT + Transfer-Encoding: + - chunked + cache-control: + - no-cache + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + pragma: + - no-cache + strict-transport-security: + - max-age=15724800; + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '12000' + x-ratelimit-period: + - '60' + x-ratelimit-remaining: + - '11993' + x-ratelimit-reset: + - '32' + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/api/cassettes/TestSynthetics.test_get_all_tests.yaml b/tests/integration/api/cassettes/TestSynthetics.test_get_all_tests.yaml new file mode 100644 index 000000000..42b907739 --- /dev/null +++ b/tests/integration/api/cassettes/TestSynthetics.test_get_all_tests.yaml @@ -0,0 +1,313 @@ +interactions: +- request: + body: '{"config": {"assertions": [{"operator": "is", "target": 200, "type": "statusCode"}], + "request": {"method": "GET", "timeout": 30, "url": "https://example.com"}}, + "locations": ["aws:us-east-2"], "message": "Test API", "name": "Test with API", + "options": {"tick_every": 300}, "tags": ["test:synthetics_api"], "type": "api"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '320' + Content-Type: + - application/json + User-Agent: + - datadogpy/0.42.0 (python 3.9.7; os darwin; arch x86_64) + method: POST + uri: https://api.datadoghq.com/api/v1/synthetics/tests + response: + body: + string: !!binary | + H4sIAAAAAAAAA42RzW6DMBCEX6XaayEx0KTFt6qqqt56yK2KkAMbsIoxtZekCPHuXSek50o+WPY3 + M/szgSdFgwcJrT4hRNAPh1aXha74aTyWsc4e49xo/iFVM/cJhJ6kHztqkHTpC9Vr2EdgXX1RZWny + lGQRtLZUpG130aizl4OPUXmK00Ab9F7VyCE7trt7/njnhApbJKwKRSC7oW0j6JT5Y86amgU0ttNk + 3SVvI7JUJNucCxz7AId6IigdqpsXpCJN4kTESb5LUpltZSpWD5ucz70QUgjmja30US/h/xH44bAE + NkR9SLTdUdcgJ3D4PXBX4Tq4lksKhJfrNf4o07e4Kq0JkUiNDXN+e92F+WqDdmBVJuYIlPfobvOb + wPboFLfMtPYBvvZ63d6LrcLqSLkaWZ8KMe/ZwvaLfmLv8qvAE7ox2It5/gUbg5FA+QEAAA== + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Tue, 19 Oct 2021 12:36:20 GMT + Transfer-Encoding: + - chunked + cache-control: + - no-cache + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + pragma: + - no-cache + strict-transport-security: + - max-age=15724800; + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '12000' + x-ratelimit-period: + - '60' + x-ratelimit-remaining: + - '11997' + x-ratelimit-reset: + - '40' + status: + code: 200 + message: OK +- request: + body: '{"config": {"assertions": [{"operator": "is", "target": 200, "type": "statusCode"}], + "request": {"method": "GET", "timeout": 30, "url": "https://example.com"}}, + "locations": ["aws:us-east-2"], "message": "Test Browser", "name": "Test with + Browser", "options": {"device_ids": ["laptop_large"], "tick_every": 900}, "tags": + ["test:synthetics_browser"], "type": "browser"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '368' + Content-Type: + - application/json + User-Agent: + - datadogpy/0.42.0 (python 3.9.7; os darwin; arch x86_64) + method: POST + uri: https://api.datadoghq.com/api/v1/synthetics/tests + response: + body: + string: !!binary | + H4sIAAAAAAAAA41Ry27CMBD8lWqvTcBxChQfW1X9AW4VikyyBAsndu0NDyH+vesAVY+9Rc7M7Dwu + EEnTEEGB10PEBjLww8aaujINP9IJ8yD3eTgS/yHdMvILCCOpeO5ph2TqWG2CO0YMsM7AhXZklrJ4 + LcoMrKs1GdePPH2Maog56ki5TOgOY9Qt8qEVSz693XUyaNAiYVNpAtUP1mbQ6+4XdzS0+wPuXG/I + hfHuTJRSFIuCzZ59Ijy8ZVAH1A9NkEIWeSHyYrkqpCrnSorJYvmymM2fhVBCcNrONWZr7ib+Q6hd + vzUtqAsE/B44UPocgmUXOyIf1XSKJ915i5PadekC0s6lmj8/Vqle06EbmFWKawY6cqWP6i7gPAbN + KRltYgLf4t3me3cNjvuEFpkvhbiuWcL5O//ChR5MjVzROITVnpyvbMKnHXjFfYUHDGdQSyZffwD2 + ff+SGAIAAA== + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Tue, 19 Oct 2021 12:36:20 GMT + Transfer-Encoding: + - chunked + cache-control: + - no-cache + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + pragma: + - no-cache + strict-transport-security: + - max-age=15724800; + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '12000' + x-ratelimit-period: + - '60' + x-ratelimit-remaining: + - '11996' + x-ratelimit-reset: + - '40' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - datadogpy/0.42.0 (python 3.9.7; os darwin; arch x86_64) + method: GET + uri: https://api.datadoghq.com/api/v1/synthetics/tests + response: + body: + string: !!binary | + H4sIAAAAAAAAA81TsW7bMBD9lYJrpYSSYxvSlLQoim4dvBWGcJbOEhFKZMiTHUPQv/eoyK7Remim + FuBAHN9798h7HAShJy/yH4PwBNTzVmh1QBEJ2++0KgtVcem0L2O1WMdZq/iEoA6UiZv7U0cNkip9 + AVaJbSS0KYGU6SYMHH3e+xjBU5yG0xa9hxpZdMP0D0/fv7FiB+2lclTUzOXWdIqMmzws5SKVySrj + 9icbwKFbJEqHQFgVQFxKZZrEiYyTbJOk+WKVp/LuYZnx+ihlLuUZb5zIB9FAV+mgtHemfqyAoDJ1 + 83JXmvZiqeu1jgS2oPRt4Mg3MpXaq3d48P1uvkNDZIMp0+1VHTw5fOn5WcK2d6FlQPj8/h5fobUa + Z3MtUmPCYL5+2YSBqBZNz6yFZD/gPbrzAAZhLDrgV2S08gH89nxv4/5sqjBrAlcj81Mpxy1LGDvz + B9Yunws8oDsFeTmO0VVULPQeKxa4Dgu9YuzS59gdaZK+HZadM0f2+e7AfJp5N0Lz6+jP4KyTy83P + nf8iPOvsYb1c/dvw/O7hP85KhQdVIn/X6etrsGRsoUO2wpivk5SFJG3Hn7Jce6yABAAA + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Tue, 19 Oct 2021 12:36:21 GMT + Transfer-Encoding: + - chunked + cache-control: + - no-cache + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + pragma: + - no-cache + strict-transport-security: + - max-age=15724800; + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '12000' + x-ratelimit-period: + - '60' + x-ratelimit-remaining: + - '11998' + x-ratelimit-reset: + - '40' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - datadogpy/0.42.0 (python 3.9.7; os darwin; arch x86_64) + method: GET + uri: https://api.datadoghq.com/api/v1/synthetics/tests + response: + body: + string: !!binary | + H4sIAAAAAAAAA81TsW7bMBD9lYJrpYSSYxvSlLQoim4dvBWGcJbOEhFKZMiTHUPQv/eoyK7Remim + FuBAHN9798h7HAShJy/yH4PwBNTzVmh1QBEJ2++0KgtVcem0L2O1WMdZq/iEoA6UiZv7U0cNkip9 + AVaJbSS0KYGU6SYMHH3e+xjBU5yG0xa9hxpZdMP0D0/fv7FiB+2lclTUzOXWdIqMmzws5SKVySrj + 9icbwKFbJEqHQFgVQFxKZZrEiYyTbJOk+WKVp/LuYZnx+ihlLuUZb5zIB9FAV+mgtHemfqyAoDJ1 + 83JXmvZiqeu1jgS2oPRt4Mg3MpXaq3d48P1uvkNDZIMp0+1VHTw5fOn5WcK2d6FlQPj8/h5fobUa + Z3MtUmPCYL5+2YSBqBZNz6yFZD/gPbrzAAZhLDrgV2S08gH89nxv4/5sqjBrAlcj81Mpxy1LGDvz + B9Yunws8oDsFeTmO0VVULPQeKxa4Dgu9YuzS59gdaZK+HZadM0f2+e7AfJp5N0Lz6+jP4KyTy83P + nf8iPOvsYb1c/dvw/O7hP85KhQdVIn/X6etrsGRsoUO2wpivk5SFJG3Hn7Jce6yABAAA + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Tue, 19 Oct 2021 12:36:21 GMT + Transfer-Encoding: + - chunked + cache-control: + - no-cache + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + pragma: + - no-cache + strict-transport-security: + - max-age=15724800; + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '12000' + x-ratelimit-period: + - '60' + x-ratelimit-remaining: + - '11997' + x-ratelimit-reset: + - '39' + status: + code: 200 + message: OK +- request: + body: '{"public_ids": ["yfc-i37-9mi", "txe-r2k-rwt"]}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '46' + Content-Type: + - application/json + User-Agent: + - datadogpy/0.42.0 (python 3.9.7; os darwin; arch x86_64) + method: POST + uri: https://api.datadoghq.com/api/v1/synthetics/tests/delete + response: + body: + string: !!binary | + H4sIAAAAAAAAA6tWSknNSS1JTYkvSS0uKVayiq6GiySWKFkpGRkYGeoaGugaWoYYGlkZm1kZGeqZ + mhqbGVpqGxhYGRgo6SgVlCblZCbHZ6YAlVemJetmGpvrWuZmKtXqUGZWSUWqbpFRtm5ReYlSbWwt + ABwSVDOrAAAA + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Tue, 19 Oct 2021 12:36:21 GMT + Transfer-Encoding: + - chunked + cache-control: + - no-cache + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + pragma: + - no-cache + strict-transport-security: + - max-age=15724800; + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '12000' + x-ratelimit-period: + - '60' + x-ratelimit-remaining: + - '11997' + x-ratelimit-reset: + - '39' + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/api/cassettes/TestSynthetics.test_get_devices.yaml b/tests/integration/api/cassettes/TestSynthetics.test_get_devices.yaml new file mode 100644 index 000000000..ecbd72941 --- /dev/null +++ b/tests/integration/api/cassettes/TestSynthetics.test_get_devices.yaml @@ -0,0 +1,308 @@ +interactions: +- request: + body: '{"config": {"assertions": [{"operator": "is", "target": 200, "type": "statusCode"}], + "request": {"method": "GET", "timeout": 30, "url": "https://example.com"}}, + "locations": ["aws:us-east-2"], "message": "Test API", "name": "Test with API", + "options": {"tick_every": 300}, "tags": ["test:synthetics_api"], "type": "api"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '320' + Content-Type: + - application/json + User-Agent: + - datadogpy/0.42.0 (python 3.9.7; os darwin; arch x86_64) + method: POST + uri: https://api.datadoghq.com/api/v1/synthetics/tests + response: + body: + string: !!binary | + H4sIAAAAAAAAA42RzU7DMBCEXwXtlaR1HFpa3xBCiBuH3lAVuck2sXDiYG9aStR3Z92mnLlZ629m + 9meEQJqGAAqsOSAk0A87a8rCVFxaLpt0fWzSHzT8Q7pm7gMIA6lw6qhBMmUodG9gm4Dz9UWVy2yV + 5QlYV2oyrrto9DGoIaSoA6Uy0i2GoGvkkA3b3T29v3FChRYJq0ITqG6wNoFOt3/M0VAzga3rDDl/ + yVuIXIrsccENnvoIx34SKD3qmxdIIbM0E2m23mRS5UslH2ZCitVK3AuhhGC+dZXZmyn8P4Iw7KbA + hqiPia7bmxrUCB6/Bp4qPgdvuaVIBDWf47due4uz0rUxEqlxcc+vL5u4X9OiG1iVi3MCOgT0t/2N + 4Hr0mkdm2oQIX2e9Xu/ZVfF0pH2NrJdCnLds4fpJP7J3+VngAf0p2ovz+RdyZmyE+QEAAA== + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Tue, 19 Oct 2021 12:36:24 GMT + Transfer-Encoding: + - chunked + cache-control: + - no-cache + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + pragma: + - no-cache + strict-transport-security: + - max-age=15724800; + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '12000' + x-ratelimit-period: + - '60' + x-ratelimit-remaining: + - '11993' + x-ratelimit-reset: + - '37' + status: + code: 200 + message: OK +- request: + body: '{"config": {"assertions": [{"operator": "is", "target": 200, "type": "statusCode"}], + "request": {"method": "GET", "timeout": 30, "url": "https://example.com"}}, + "locations": ["aws:us-east-2"], "message": "Test Browser", "name": "Test with + Browser", "options": {"device_ids": ["laptop_large"], "tick_every": 900}, "tags": + ["test:synthetics_browser"], "type": "browser"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '368' + Content-Type: + - application/json + User-Agent: + - datadogpy/0.42.0 (python 3.9.7; os darwin; arch x86_64) + method: POST + uri: https://api.datadoghq.com/api/v1/synthetics/tests + response: + body: + string: !!binary | + H4sIAAAAAAAAA41Ry07DMBD8FbRXkuI4pbQ+ghA/0BuqIjfZJgYnNvampar676zTFnHkFjkzs/M4 + QSRNYwQFXo8RG8jAj1tr6so0/Lha7nNsKfcfS/5DumXkOxBGUvE4UIdk6lhtgztEDLDJwIV2Ypay + WBZlBtbVmowbJp4+RDXGHHWkXCZ0jzHqFvnQmiXvnq86GTRokbCpNIEaRmszGHT/izsY6v6AezcY + cmG6+yhKKYqnBZs9+kS4ecugDqhvmiCFLPJC5MVqXUhVLpSczxZlIcT8XgglBKftXWN25mriP4Ta + DTvTgjpBwK+RA6XPMVh20RH5qB4e8Fv33uKsdn26gNS5VPPb6zrVa3p0I7NKcc5AR670Vt0JnMeg + OSWjTUzgS7zLfC+uwWmf0CLzpRDnDUs4f+WfuNC9qZErmoaw2pPzlU34tAOv+FnhHsMR1IrJ5x+n + JaIEGAIAAA== + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Tue, 19 Oct 2021 12:36:24 GMT + Transfer-Encoding: + - chunked + cache-control: + - no-cache + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + pragma: + - no-cache + strict-transport-security: + - max-age=15724800; + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '12000' + x-ratelimit-period: + - '60' + x-ratelimit-remaining: + - '11992' + x-ratelimit-reset: + - '36' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - datadogpy/0.42.0 (python 3.9.7; os darwin; arch x86_64) + method: GET + uri: https://api.datadoghq.com/api/v1/synthetics/browser/devices + response: + body: + string: !!binary | + H4sIAAAAAAAAA3XOQQ7CIBAF0Ks0s+6CaquGM9SV7oxpqIyFZLBNQV003F2kQevC5Yc/L38CiQ99 + QQv8NMFTS6eAF2XJclCoO+VCKlhIWgIHEoPrh4bE2CGEN7vvW00I/CrIYg43YUKAOtayOtZ8/nG3 + m92CZavEOtESuh/Qjfevd5z/F9L6fZsGVlWCTJzTWCOI/nPz6OwQW/7sXyCWcfEEAQAA + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Tue, 19 Oct 2021 12:36:24 GMT + Transfer-Encoding: + - chunked + cache-control: + - no-cache + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + pragma: + - no-cache + strict-transport-security: + - max-age=15724800; + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '1000' + x-ratelimit-period: + - '60' + x-ratelimit-remaining: + - '999' + x-ratelimit-reset: + - '36' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - datadogpy/0.42.0 (python 3.9.7; os darwin; arch x86_64) + method: GET + uri: https://api.datadoghq.com/api/v1/synthetics/tests + response: + body: + string: !!binary | + H4sIAAAAAAAAA81Ty27bMBD8lYLXSgklJ6qtUx8ogtx68K0whLW0lthSIkOu7LqC/r1LRXaNwofk + lN6I5czsLHc4CEJPXuTfB+EJqOej0GqPIhK232pVFqriUpY18erQxL9R8Q1BHSgTN/fHjhokVfoC + rBKbSGhTAinTTRg4+Lz3MYKnOA23LXoPNbLomunvPn17ZMUO2nPloKiZy63pFBk3ebiXi1QmH+65 + /dEGcOgWidIhEFYFEJdSmSZxIuNktU7SfJHl6d2NTOVyKd9LmUt5whsn8kE00FU6KO2cqT9WQFCZ + unm6KU17ttT1WkcCW1D6OnDkiUylduoVHny/nWdoiGwwZbqdqoMnh089P0s49i60DAif397iL2it + xtlci9SYsJiHr+uwENWi6Zm1kOwHvEd3WsAgjEUH/IqMVj6An5/ved1fTBV2TeBqZH4q5bhhCWNn + /sDa5c8C9+iOQV6OY3QRFQu9x4oFLsOyWu5jrCm2P5aT9PWwbJ05sM9XB+bzzLsSmr9XV4KTnSc/ + dX5BeLJFIuXd24bnXw//cVYq3KsS+btOX1+DJWMLHbIV1nyZpFVI0mb8A12OrVWABAAA + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Tue, 19 Oct 2021 12:36:25 GMT + Transfer-Encoding: + - chunked + cache-control: + - no-cache + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + pragma: + - no-cache + strict-transport-security: + - max-age=15724800; + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '12000' + x-ratelimit-period: + - '60' + x-ratelimit-remaining: + - '11995' + x-ratelimit-reset: + - '36' + status: + code: 200 + message: OK +- request: + body: '{"public_ids": ["66h-9wh-zei", "98v-egt-pj8"]}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '46' + Content-Type: + - application/json + User-Agent: + - datadogpy/0.42.0 (python 3.9.7; os darwin; arch x86_64) + method: POST + uri: https://api.datadoghq.com/api/v1/synthetics/tests/delete + response: + body: + string: !!binary | + H4sIAAAAAAAAA6tWSknNSS1JTYkvSS0uKVayiq6GiySWKFkpGRkYGeoaGugaWoYYGlkZm1kZmepZ + GhiZm1pqGxhYGRgo6SgVlCblZCbHZ6YAlZuZZehalmfoVqVmKtXqUGaWpUWZbmp6iW5BloVSbWwt + AHFG6JOrAAAA + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Tue, 19 Oct 2021 12:36:25 GMT + Transfer-Encoding: + - chunked + cache-control: + - no-cache + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + pragma: + - no-cache + strict-transport-security: + - max-age=15724800; + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '12000' + x-ratelimit-period: + - '60' + x-ratelimit-remaining: + - '11995' + x-ratelimit-reset: + - '35' + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/api/cassettes/TestSynthetics.test_get_locations.yaml b/tests/integration/api/cassettes/TestSynthetics.test_get_locations.yaml new file mode 100644 index 000000000..e5d384d5c --- /dev/null +++ b/tests/integration/api/cassettes/TestSynthetics.test_get_locations.yaml @@ -0,0 +1,322 @@ +interactions: +- request: + body: '{"config": {"assertions": [{"operator": "is", "target": 200, "type": "statusCode"}], + "request": {"method": "GET", "timeout": 30, "url": "https://example.com"}}, + "locations": ["aws:us-east-2"], "message": "Test API", "name": "Test with API", + "options": {"tick_every": 300}, "tags": ["test:synthetics_api"], "type": "api"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '320' + Content-Type: + - application/json + User-Agent: + - datadogpy/0.42.0 (python 3.9.7; os darwin; arch x86_64) + method: POST + uri: https://api.datadoghq.com/api/v1/synthetics/tests + response: + body: + string: !!binary | + H4sIAAAAAAAAA42RzU7DMBCEXwXtlaTYjgqtbwghxI1Db6iK3GSbGJw42OuWEPXdsduUMzdr/c3M + /kzgSVHwIMHoA0IGQ9gZXZW6jqWPnyYXyzFfuX38IdVE7h0IPUk/9tQi6cqXatCwzcC65qwqBF/x + IgNjK0Xa9meNOnoZfI7KUy4S3aH3qsEYsol2N49vrzGhRoOEdakIZB+MyaBX3R9z1NTOYGd7Tdad + 85asEIw/iNjgOCQ49ZNB5VBdvUAwwXPOcr7ecCGLeynEgos15/yWMclY5Dtb672ew/8j8GE3B7ZE + Q0q0/V43ICdw+BXiVOkZnIktJcLLuzv8Vt1gcFHZLkUitTbt+eV5k/arO7Qhqgp2ykB5j+66vwns + gE7FkSOtfYIvs16u92TrdDpSrsGoF4ydttHCDrN+it7VZ4kHdGOyZ6fTL1JUoWf5AQAA + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Tue, 19 Oct 2021 12:36:22 GMT + Transfer-Encoding: + - chunked + cache-control: + - no-cache + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + pragma: + - no-cache + strict-transport-security: + - max-age=15724800; + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '12000' + x-ratelimit-period: + - '60' + x-ratelimit-remaining: + - '11995' + x-ratelimit-reset: + - '38' + status: + code: 200 + message: OK +- request: + body: '{"config": {"assertions": [{"operator": "is", "target": 200, "type": "statusCode"}], + "request": {"method": "GET", "timeout": 30, "url": "https://example.com"}}, + "locations": ["aws:us-east-2"], "message": "Test Browser", "name": "Test with + Browser", "options": {"device_ids": ["laptop_large"], "tick_every": 900}, "tags": + ["test:synthetics_browser"], "type": "browser"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '368' + Content-Type: + - application/json + User-Agent: + - datadogpy/0.42.0 (python 3.9.7; os darwin; arch x86_64) + method: POST + uri: https://api.datadoghq.com/api/v1/synthetics/tests + response: + body: + string: !!binary | + H4sIAAAAAAAAA41Ry07DMBD8FbRXktZx1EJ8BCF+oDdURW6yTQ1ObOxNS6n676zTFnHkFjnz2Jk5 + QSRNYwQFXo8RW8jAjxtrmtq0/FgdNvniQ+ff75/8h3THyDcgjKTicaAdkmlivQnuEDHAOgMXuolZ + yuKxKDOwrtFk3DDx9CGqMeaoI+UyoXuMUXfIRiuWvHu66mTQokXCttYEahitzWDQ/S/uYGj3B9y7 + wZALk+9ClFIUD+xMR58It9syaALqmyZIIYu8EHlRrQqpyqWScrZYLqtqeS+EEoLT9q41W3M94j+E + xg1b04E6QcDPkQOlzzFYvmJH5KOaz/FL997irHF9ckDauVTz68sq1Wt6dCOzSnHOQEeu9FbdCZzH + oDklo01M4Eu8y3zPrsVpn9Ah86UQ5zVLOH/ln7jQvWmQK5qGsNqT87VN+LQDr/hR4x7DEVTF5PMP + bzV2lhgCAAA= + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Tue, 19 Oct 2021 12:36:22 GMT + Transfer-Encoding: + - chunked + cache-control: + - no-cache + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + pragma: + - no-cache + strict-transport-security: + - max-age=15724800; + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '12000' + x-ratelimit-period: + - '60' + x-ratelimit-remaining: + - '11994' + x-ratelimit-reset: + - '38' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - datadogpy/0.42.0 (python 3.9.7; os darwin; arch x86_64) + method: GET + uri: https://api.datadoghq.com/api/v1/synthetics/locations + response: + body: + string: !!binary | + H4sIAAAAAAAAA62W224bNxCGX0XQVQuEBc8H3aUuGgRwWwNO24u2CIbkUF54vSvsoYEa+N3D1brV + riO5sKQrAVoO/3+G3wz5eVnWAbqirtrl6o/PyyIuV0v41K5gQ6q66e4Q2o6w5ZtlBQ+Yv32o77f1 + 4pu3v99+u3x8cyyA7wNuse7LIwFt3Xd3091/6h88FC+tfu7ntqjWsKkb/N+gqadtrHB7KCIACVh1 + DZRTW1dQQYTF1fjlUBz2h+J+bKC6T33THQnZVXgqdNvV4f6uLh+OBHzC+Wm8b7CEKr60epL2dV3F + unppsdif2w00RXtobQvkq0P4s6cURb24gb48SEffjkETN7/cFceWPk/z5+8WV1AWqW6qAg55ytvv + YqbbN7g+kOw/fYOrwX/f7nP9rWjWxbj18H1C9qZcrcOGFFWH62bsE1KCJ4qbqEVEZbQMLkWIWqXI + QEHQEizu9353dbN4PwlfXIPfd07evxuMr2sy/LbbKrdcV4R20xR/Q4f/dmdOHsM2lEiYFkJTo4Uh + NDkuVFL5D+QKvceA2tjAk48+BDXp2kHjXU0+5N/b/zRuRo3rpwlwfUij38RsI17AsRGMKOYZRE6j + M4FGFrgWQRqZuAtSOD8p2+D09Y6zxiWcGkscd84oLwVGzlhEpzln1tlomUlS+XNra4ydO007ACCE + XG+I9fooCh89tEUgJpOndzQoLjIGRBsErhV3nokMADopuRXRaOsTM0zuHXdpB8LbEH4YtY4i8fH7 + g1rnOjfCPXHsGJVEa6tzaaXXOiQtQFNQTGutDHeGK3qW87nWuc4tVaNzxayWhA1sWPA2tz1aRCGV + 8dpQHSF57iYdeELN51qnOi8e8u3Y7ZnJpnfMcEcdkV4h1aBZHmCSytyORhmHHKWzSurTmBkV9+RM + FS+VxYR8SxBRBZqkMJxSzb3XVjoWkvQ0csf0Sfw8z2KqeKEsLBvPQupMO9FBqmR1noTShWAN+uDQ + COQehTPxtC54lsVM8TJZzCg1eSzZxKkdngNSwwCZ4pJ6br2L0ovJhH9FR8yzmCuemsXTxbabo2NP + D3NUOpKHjgPIIFnUFpjl1FiaG8U4ECE3xUk0/Tpeo1+pXcD99ExZ7oVcHkVF4klHRzNYFhVGj8kZ + k8Cc636mdr77yXSmzhIhfIjRO5uc89xLVECTZtSzfBfIOHnevYKefe3naue7n5EoCLrgqRvpsRoo + pPzGScEH4SJQN3kznOR+rrZ8/OvxC6m5rHu+DQAA + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Tue, 19 Oct 2021 12:36:22 GMT + Transfer-Encoding: + - chunked + cache-control: + - no-cache + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + pragma: + - no-cache + strict-transport-security: + - max-age=15724800; + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '12000' + x-ratelimit-period: + - '60' + x-ratelimit-remaining: + - '11999' + x-ratelimit-reset: + - '38' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - datadogpy/0.42.0 (python 3.9.7; os darwin; arch x86_64) + method: GET + uri: https://api.datadoghq.com/api/v1/synthetics/tests + response: + body: + string: !!binary | + H4sIAAAAAAAAA81TwY7TMBD9FeQrya7jqoXktIAQ4sahN1RF03iaeNeJXdtp6Ub5d8bZtFSoB/bE + 3qzxe2/eeJ4HFtAHz4qfA/MBQk9HptUBWcJsv9WqKpWk0uNznYrlKf3odnQToI6UiVv4UxcaDKry + JVjFNgnTpoKgTDdh4OiL3qcIPqQi3rboPdRIomuiv/v04zspdtBeKkcVmrncmk4F4yYPS74QPPsg + qP3JRnDslrDKIQSUJQQqCS6yNONplq8zUSxWhRB3mcizLHvPecH5GW8cKwbWQCd1VNo5Uz9ICCBN + 3ezvKtNeLHW91gnDFpS+DRxpIiPVTr3Cg++38wxNCDaaMt1O1dGTw31PzxKPvYstI8IX9/f4C1qr + cTbXYmhMXMy3r+u4ENWi6Ym14OQHvEd3XsDAjEUH9IqEVj6CX57vZd1fjIy7DuBqJL7gfNyQhLEz + fyDt6qnEA7pTlOfjmFxFxULvUZLAdVjy4zZdPkH6/LifpG+HZevMkXy+OjCfZ96N0Py5uhGcxWXy + c+d/CM9ytcrz1f8Nz98e3nBWJB5UhfRdp6+vwQZjSx2zFdd8naQ8Jmkz/gZOvwRsgAQAAA== + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Tue, 19 Oct 2021 12:36:23 GMT + Transfer-Encoding: + - chunked + cache-control: + - no-cache + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + pragma: + - no-cache + strict-transport-security: + - max-age=15724800; + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '12000' + x-ratelimit-period: + - '60' + x-ratelimit-remaining: + - '11996' + x-ratelimit-reset: + - '38' + status: + code: 200 + message: OK +- request: + body: '{"public_ids": ["jzg-25y-8rf", "9wb-5ka-zjq"]}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '46' + Content-Type: + - application/json + User-Agent: + - datadogpy/0.42.0 (python 3.9.7; os darwin; arch x86_64) + method: POST + uri: https://api.datadoghq.com/api/v1/synthetics/tests/delete + response: + body: + string: !!binary | + H4sIAAAAAAAAA6tWSknNSS1JTYkvSS0uKVayiq6GiySWKFkpGRkYGeoaGugaWoYYGlkZm1kZGesZ + WZgbWJprGxhYGRgo6SgVlCblZCbHZ6YAlWdVpesamVbqWhSlKdXqUGaWZXmSrml2om5VVqFSbWwt + ABzAO/KrAAAA + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Tue, 19 Oct 2021 12:36:23 GMT + Transfer-Encoding: + - chunked + cache-control: + - no-cache + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + pragma: + - no-cache + strict-transport-security: + - max-age=15724800; + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '12000' + x-ratelimit-period: + - '60' + x-ratelimit-remaining: + - '11996' + x-ratelimit-reset: + - '37' + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/api/cassettes/TestSynthetics.test_get_update_pause_test.yaml b/tests/integration/api/cassettes/TestSynthetics.test_get_update_pause_test.yaml new file mode 100644 index 000000000..61665fc1b --- /dev/null +++ b/tests/integration/api/cassettes/TestSynthetics.test_get_update_pause_test.yaml @@ -0,0 +1,665 @@ +interactions: +- request: + body: '{"config": {"assertions": [{"operator": "is", "target": 200, "type": "statusCode"}], + "request": {"method": "GET", "timeout": 30, "url": "https://example.com"}}, + "locations": ["aws:us-east-2"], "message": "Test API", "name": "Test with API", + "options": {"tick_every": 300}, "tags": ["test:synthetics_api"], "type": "api"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '320' + Content-Type: + - application/json + User-Agent: + - datadogpy/0.42.0 (python 3.9.7; os darwin; arch x86_64) + method: POST + uri: https://api.datadoghq.com/api/v1/synthetics/tests + response: + body: + string: !!binary | + H4sIAAAAAAAAA42Ry26DMBBFf6WabSG1jYhS76qqqrrrIrsqQg5MwKrB1B7nIcS/105I190hc+49 + nvEEnhQFDxKMPiJkMIa90XWlm3h0Eod8XZ7zriviH1Jt5L6A0JP0l4E6JF37So0adhlY115TheAb + XmRgbK1I2+GaUScvg89RecpFonv0XrUYJdtY9/Dy+RENDRokbCpFIIdgTAaD6v+Yk6ZuAXs7aLLu + 6itZIRhfRyNdxgSn+2RQO1T3LhBM8JyznD9vuZDFWvL1qijFZlM+MiYZi3xvG33Qi/w/AR/2i7Aj + GpPRDgfdgpzA4U+IU6XP4Ey8UiK8fHrCs+pHg6va9kmJ1Nm05/e3bdqv7tGGmCrYnIHyHt19fxPY + EZ2KI0da+wTfZr293qtt0tORci3GvGBs3sUKOy75KXbX3xUe0V1SPZvnX7DMPlH5AQAA + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Tue, 19 Oct 2021 12:36:16 GMT + Transfer-Encoding: + - chunked + cache-control: + - no-cache + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + pragma: + - no-cache + strict-transport-security: + - max-age=15724800; + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '12000' + x-ratelimit-period: + - '60' + x-ratelimit-remaining: + - '11999' + x-ratelimit-reset: + - '44' + status: + code: 200 + message: OK +- request: + body: '{"config": {"assertions": [{"operator": "is", "target": 200, "type": "statusCode"}], + "request": {"method": "GET", "timeout": 30, "url": "https://example.com"}}, + "locations": ["aws:us-east-2"], "message": "Test Browser", "name": "Test with + Browser", "options": {"device_ids": ["laptop_large"], "tick_every": 900}, "tags": + ["test:synthetics_browser"], "type": "browser"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '368' + Content-Type: + - application/json + User-Agent: + - datadogpy/0.42.0 (python 3.9.7; os darwin; arch x86_64) + method: POST + uri: https://api.datadoghq.com/api/v1/synthetics/tests + response: + body: + string: !!binary | + H4sIAAAAAAAAA41Ry07DMBD8FbRXkuI4NFAfQYgf6A1VkZtsEwsnNvampar676zTFnHkFjkzs/M4 + QSRNUwQFXk8RW8jAT1trmtq0/FgdqvwoZd63kv+Q7hj5AYSRVDyO1COZJtbb4A4RA2wycKGbmaUs + nosyA+saTcaNM08foppijjpSLhN6wBh1h3xozZJ3L1edDFq0SNjWmkCNk7UZjHr4xR0M9X/AgxsN + uTDfXYpSiqJ6ZLNHnwg3bxk0AfVNE6SQRV6IvFitC6nKShXVopKrp9XyXgglBKcdXGt25mriP4TG + jTvTgTpBwK+JA6XPKVh20RP5qB4e8FsP3uKicUO6gNS7VPP72zrVawZ0E7NKcc5AR670Vt0JnMeg + OSWjTUzgS7zLfK+uxXmf0CHzpRDnDUs4f+WfuNC9aZArmoew2pPztU34tAOv+FnjHsMR1IrJ5x81 + Q5vRGAIAAA== + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Tue, 19 Oct 2021 12:36:16 GMT + Transfer-Encoding: + - chunked + cache-control: + - no-cache + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + pragma: + - no-cache + strict-transport-security: + - max-age=15724800; + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '12000' + x-ratelimit-period: + - '60' + x-ratelimit-remaining: + - '11998' + x-ratelimit-reset: + - '44' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - datadogpy/0.42.0 (python 3.9.7; os darwin; arch x86_64) + method: GET + uri: https://api.datadoghq.com/api/v1/synthetics/tests/w2f-65x-hh3 + response: + body: + string: !!binary | + H4sIAAAAAAAAAz1QTW+DMAz9K5PPsDJQe+A2TdO02w69TRVKUxeiEpLFTilC/Pc5LdsteX5f9gzE + iiNBDb25ImTg47E3ujEngcbynO+2t7zrKpmwaoX3DYzENU0Dd8hGU6O8gUMGvdOKjRvuHDVSHSlH + RZyXaWqRSLUopnuRP71+fYrjoOw/MhruVti6wbAL9w7boiqLl10l8ZNP5JSWAcXj+u+YvQDaDWfT + Qj1DwJ8oEekZQy+KxKB6s8Gbsr7HZ+2sCCxy59KSH+/7tJyx6KKoqmLJQBFh+FtmBucxKGkkbEOJ + /KjyON2bO6W7sQotir4siuUgFs6v+lm89aXBK4Yp2RfL8gtiU4uydgEAAA== + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Tue, 19 Oct 2021 12:36:16 GMT + Transfer-Encoding: + - chunked + cache-control: + - no-cache + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + pragma: + - no-cache + strict-transport-security: + - max-age=15724800; + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '1000' + x-ratelimit-period: + - '60' + x-ratelimit-remaining: + - '999' + x-ratelimit-reset: + - '44' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - datadogpy/0.42.0 (python 3.9.7; os darwin; arch x86_64) + method: GET + uri: https://api.datadoghq.com/api/v1/synthetics/tests/6w6-y22-hd2 + response: + body: + string: !!binary | + H4sIAAAAAAAAA01QQU7DQAz8CvI5oSGFSuQIQnygN1RF28QkK7LxsvY2RFH/jjdtETfLnhnPzAIs + RiJDBd5ExhYy8PE42Ka2rS530y6fyzLv21IvYjpFfoAgS8XzKD2Kbbg+BpoYAxwyGKgxYmlccWbi + KnKOhiUv09Uhs+lQhfcqcfdy5WUwGve3naz0/06ORisUVj9PxbYsHnaPamX2iXD7nEFD46ftoFog + 4HdU9TTGMCioF/FcbTb4Y5wf8L4hp2EcSk8p4/vbPmWzDikqa1ucMzCseW45FiCPwagJRVtO4Mv3 + S3ev1OJaTuhQ+WVRnA8qQf7KX6DFk21QE6ytDMYL+XpI+FSKVvhV4wnDDNWzks+/GH2NIpUBAAA= + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Tue, 19 Oct 2021 12:36:17 GMT + Transfer-Encoding: + - chunked + cache-control: + - no-cache + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + pragma: + - no-cache + strict-transport-security: + - max-age=15724800; + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '1000' + x-ratelimit-period: + - '60' + x-ratelimit-remaining: + - '998' + x-ratelimit-reset: + - '43' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - datadogpy/0.42.0 (python 3.9.7; os darwin; arch x86_64) + method: GET + uri: https://api.datadoghq.com/api/v1/synthetics/tests/w2f-65x-hh3/results + response: + body: + string: !!binary | + H4sIAAAAAAAAAzVQTW+EIBD9L3N2DQLiyrH33nprGkMVt6YsGGboZmP87x038TDJy7wvhg2CQxpo + uXskd1+H2dP44yewjZG95uk6IUQF2WMJhGA/N2AlFYa8Zu34+7IfDqWNNn1nOtOf1J/PuKTI7JkB + dgMqMfoAdnYBfQU+55TfPaK7cU4sIVTAmUu8ccsG85KR3p50dIj6yty4vqCpAJFjZFNz4RRZ3UgG + 6RFDcnyEqLmWEjkWtW2t9wpWh3jcR7lwcy7x47lysDionL79MI1gwT3QFrx4/pyLhPPpw8JG0Erq + VipplFBG991V9LB/7f8xhje6SwEAAA== + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Tue, 19 Oct 2021 12:36:17 GMT + Transfer-Encoding: + - chunked + cache-control: + - no-cache + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + pragma: + - no-cache + strict-transport-security: + - max-age=15724800; + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '1000' + x-ratelimit-period: + - '60' + x-ratelimit-remaining: + - '999' + x-ratelimit-reset: + - '43' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - datadogpy/0.42.0 (python 3.9.7; os darwin; arch x86_64) + method: GET + uri: https://api.datadoghq.com/api/v1/synthetics/tests/6w6-y22-hd2/results + response: + body: + string: !!binary | + H4sIAAAAAAAAA6tWykksLokvycxNLS5JzC2IT0stSc5ITVGyMjQzsjQBYnNzAwMDHaWi1OLSnJJi + Javo2FoAX33hDDUAAAA= + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Tue, 19 Oct 2021 12:36:17 GMT + Transfer-Encoding: + - chunked + cache-control: + - no-cache + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + pragma: + - no-cache + strict-transport-security: + - max-age=15724800; + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '1000' + x-ratelimit-period: + - '60' + x-ratelimit-remaining: + - '998' + x-ratelimit-reset: + - '43' + status: + code: 200 + message: OK +- request: + body: '{"config": {"assertions": [{"operator": "isNot", "target": 404, "type": + "statusCode"}], "request": {"method": "GET", "timeout": 30, "url": "https://example.com"}}, + "locations": ["aws:us-west-2"], "message": "Test API edited", "name": "Test + with API edited", "options": {"tick_every": 60}, "tags": ["test:edited"], "type": + "api"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '328' + Content-Type: + - application/json + User-Agent: + - datadogpy/0.42.0 (python 3.9.7; os darwin; arch x86_64) + method: PUT + uri: https://api.datadoghq.com/api/v1/synthetics/tests/w2f-65x-hh3 + response: + body: + string: !!binary | + H4sIAAAAAAAAA22RzWrDMBCEX6XstXYiWbFxdSullF5KD7mVEBR7Y4valivJcYLJu3eVOIFCb2L1 + zcz+TOC88oMDCY0+IETQD7tGF1tdUmlM9nGWHuO6FvTjVUXcF3h0XmKpPZawicDY6kKLhOdcRNCY + QnltugurRicHF48kiZNAt+icqpDM11R7eP58f5itIiixQTLdKg+yG5omgk61d3TUvv7Lt6bT3thL + espEwnhG+f7UB43qNfVcWFQ3S0hYwmPOYv605okUmeTZQqRJnqePjEnGiG9Nqfd67uEfQb5gIl2J + /C5ww24OrL3vQ6Lp9roCOYHFn4FmDM/BNtRSIJxcLvGo2r7BRWHaEIm+NmHbb6/rsGXdohlIJdg5 + AuUc2ts2JzA9WkUjE63dh/GBv457PeOLKcMNvbIVksWKrc4bcjH9bDGRffG9xQPaE8iMnc+/p7OC + UAECAAA= + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Tue, 19 Oct 2021 12:36:18 GMT + Transfer-Encoding: + - chunked + cache-control: + - no-cache + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + pragma: + - no-cache + strict-transport-security: + - max-age=15724800; + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '500' + x-ratelimit-period: + - '60' + x-ratelimit-remaining: + - '499' + x-ratelimit-reset: + - '43' + status: + code: 200 + message: OK +- request: + body: '{"config": {"assertions": [{"operator": "isNot", "target": 404, "type": + "statusCode"}], "request": {"method": "GET", "timeout": 30, "url": "https://example.com"}}, + "locations": ["aws:us-west-2"], "message": "Test Browser edited", "name": "Test + Browser edited", "options": {"device_ids": ["tablet"], "tick_every": 1800}, + "tags": ["test:edited"], "type": "api"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '359' + Content-Type: + - application/json + User-Agent: + - datadogpy/0.42.0 (python 3.9.7; os darwin; arch x86_64) + method: PUT + uri: https://api.datadoghq.com/api/v1/synthetics/tests/w2f-65x-hh3 + response: + body: + string: !!binary | + H4sIAAAAAAAAA3WRzW6DMBCEX6XaayGxISDiY6uqt55yq6LIgQ1YBUz9ExIh3r3rhPTU3pD5ZnZ2 + dgLrpPMWBLTqjBDB4I+tKg+qoqcxOcV5dombJqU/TtbEfYJD6wRWymEF+wi0qW90mvCCpxG0upRO + 6f7GytEKb+ORJHES6A6tlTWS+Y7enl6MHi2ap8UuggpbJOODdCB637YR9LL7H+90r5w2twAZSxPG + c4rgrkOQyEFR7NKgfDhCwhIecxbz7Y4nIs0Fz1dplhRF9syYYIz4TlfqpJYIfwiK1Sbfclb8Cqw/ + LgMb54YwUfcnVYOYwOC3pzXDpzctRQqEFes1XmQ3tLgqdRdGomt0KPz9bReKVh1qT6qUzRFISwU9 + Cp1AD2gkrUy0sh/aBf6+7v2Sr7oKZ3TS1EgWG7aZ9+Sih8VioorPqkSq7H5NeaTGw2mcKr8OeEZz + BcELxub5B4wRAFQeAgAA + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Tue, 19 Oct 2021 12:36:18 GMT + Transfer-Encoding: + - chunked + cache-control: + - no-cache + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + pragma: + - no-cache + strict-transport-security: + - max-age=15724800; + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '500' + x-ratelimit-period: + - '60' + x-ratelimit-remaining: + - '498' + x-ratelimit-reset: + - '42' + status: + code: 200 + message: OK +- request: + body: '{"new_status": "paused"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '24' + Content-Type: + - application/json + User-Agent: + - datadogpy/0.42.0 (python 3.9.7; os darwin; arch x86_64) + method: PUT + uri: https://api.datadoghq.com/api/v1/synthetics/tests/w2f-65x-hh3/status + response: + body: + string: 'true' + headers: + Connection: + - keep-alive + Content-Length: + - '4' + Content-Type: + - application/json + Date: + - Tue, 19 Oct 2021 12:36:18 GMT + cache-control: + - no-cache + content-security-policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + pragma: + - no-cache + strict-transport-security: + - max-age=15724800; + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '500' + x-ratelimit-period: + - '60' + x-ratelimit-remaining: + - '497' + x-ratelimit-reset: + - '42' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - datadogpy/0.42.0 (python 3.9.7; os darwin; arch x86_64) + method: GET + uri: https://api.datadoghq.com/api/v1/synthetics/tests + response: + body: + string: !!binary | + H4sIAAAAAAAAA8WSMW/bMBCF/0rAtZJDUbYiaypaFN06eSsM4SydJaKUyJBUFEPwf+/Rsj0YcZKp + 3Qjeu+PjvW9iHp13rPg9MefBD3RkBgaHNYuYGXZKVqWs6XIU+zhbvcZtm1LFQxOaTt0F1tKTfhsx + pSvwUvenGoyuGFw8kiQWodqhc9AgDdvQ3cM3q0eH9uHcHrEeuvvFTvfSa3sys+Kp4EmWko+DCS1g + JJmqLAIZKcHTleAiiRMeJ+tNIoo0K5Jska5Enq++cF5wftFry4qJtdDXKkzaW918rcFDrZv2eVHp + joSzsX5QKmLYgVRvC4/0RV3LvbzvIV/k4ilJllcPbtid/9B6b4Ip3e9lEzxZfB5oT+E42PBkULji + 8RFfoTMKz+Y69K0OCf38sQnJyA71QF0pJz/gaMOXRCamDVqgLZJaul/aB/28wTn877rGU7q2QRqx + 5MvjlqZocx4xsRpfZIWUwhw/7BQJKVsvqz8lvqA9sCLJOT8eo4+IysYsPggRt7W4Jcodet8izXTl + bqbkHl0I79N1TW9mbpS+vYBHpTegWl5Xcnn5E2BlYv20/q9gZYtbD/+Mow8gEgTDuxApMF6bUgXo + blFaB5K2x79YoHJZpwQAAA== + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Tue, 19 Oct 2021 12:36:19 GMT + Transfer-Encoding: + - chunked + cache-control: + - no-cache + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + pragma: + - no-cache + strict-transport-security: + - max-age=15724800; + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '12000' + x-ratelimit-period: + - '60' + x-ratelimit-remaining: + - '11999' + x-ratelimit-reset: + - '41' + status: + code: 200 + message: OK +- request: + body: '{"public_ids": ["w2f-65x-hh3", "6w6-y22-hd2"]}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '46' + Content-Type: + - application/json + User-Agent: + - datadogpy/0.42.0 (python 3.9.7; os darwin; arch x86_64) + method: POST + uri: https://api.datadoghq.com/api/v1/synthetics/tests/delete + response: + body: + string: !!binary | + H4sIAAAAAAAAA6XMQQqEMAxA0btkq5Ek1qI5hzsRUVup4EKw4gzSu8+svIDbz+Pf4Pzmo3dD9Ec8 + QLv7KWMEBSFhZEJuWhYtrXJTmKo2hjIiJYIc9nPa1nlY3Z9fsqCtPhhCCSl/97KXxa8IBieQ+vQD + JGC9jKsAAAA= + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Tue, 19 Oct 2021 12:36:19 GMT + Transfer-Encoding: + - chunked + cache-control: + - no-cache + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + pragma: + - no-cache + strict-transport-security: + - max-age=15724800; + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '12000' + x-ratelimit-period: + - '60' + x-ratelimit-remaining: + - '11998' + x-ratelimit-reset: + - '41' + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/api/constants.py b/tests/integration/api/constants.py new file mode 100644 index 000000000..0b9030900 --- /dev/null +++ b/tests/integration/api/constants.py @@ -0,0 +1,10 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +import os + +API_KEY = os.environ.get("DD_TEST_CLIENT_API_KEY", "a" * 32) +APP_KEY = os.environ.get("DD_TEST_CLIENT_APP_KEY", "a" * 40) +API_HOST = os.environ.get("DATADOG_HOST") +MONITOR_REFERENCED_IN_SLO_MESSAGE = "monitor {} is referenced in slos: {}" +TEST_USER = os.environ.get("DD_TEST_CLIENT_USER") diff --git a/tests/integration/api/test_api.py b/tests/integration/api/test_api.py index 208146836..7efc96f8d 100644 --- a/tests/integration/api/test_api.py +++ b/tests/integration/api/test_api.py @@ -1,499 +1,390 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc # python import datetime +import json +import mock import os +import re import time -import unittest -import requests -# 3p -from nose.plugins.attrib import attr -from nose.tools import assert_raises -from nose.tools import assert_equal as eq -from nose.tools import assert_true as ok +import requests +import pytest # datadog from datadog import initialize -from datadog import api as dog -from datadog.api.exceptions import ApiError -from datadog.util.compat import json -from tests.util.snapshot_test_utils import ( - assert_snap_not_blank, assert_snap_has_no_events -) - -TEST_USER = os.environ.get('DATADOG_TEST_USER') -API_KEY = os.environ.get('DATADOG_API_KEY') -APP_KEY = os.environ.get('DATADOG_APP_KEY') -API_HOST = os.environ.get('DATADOG_HOST') -FAKE_PROXY = { - "https": "http://user:pass@10.10.1.10:3128/", -} - - -class TestDatadog(unittest.TestCase): - host_name = 'test.host.unit' - wait_time = 10 - - def setUp(self): - initialize(api_key=API_KEY, app_key=APP_KEY, api_host=API_HOST) - dog._swallow = False - - @attr("tags") - def test_tags(self): - # post a metric to make sure the test host context exists - hostname = self.host_name - dog.Metric.send(metric='test.tag.metric', points=1, host=hostname) - - dog.Tag.get_all() - - dog.Tag.delete(hostname) - assert len(dog.Tag.get(hostname)['tags']) == 0 - - dog.Tag.create(hostname, tags=['test.tag.1', 'test.tag.2'], source='datadog') - new_tags = dog.Tag.get(hostname)['tags'] - assert len(new_tags) == 2 - assert 'test.tag.1' in new_tags - assert 'test.tag.2' in new_tags - - dog.Tag.create(hostname, tags=['test.tag.3'], source='datadog') - new_tags = dog.Tag.get(hostname)['tags'] - assert len(new_tags) == 3 - assert 'test.tag.1' in new_tags - assert 'test.tag.2' in new_tags - assert 'test.tag.3' in new_tags - - dog.Tag.update(hostname, tags=['test.tag.4'], source='datadog') - new_tags = dog.Tag.get(hostname)['tags'] - assert len(new_tags) == 1 - assert 'test.tag.4' in new_tags - - dog.Tag.delete(hostname, source='datadog') - assert len(dog.Tag.get(hostname)['tags']) == 0 - - def test_events(self): - now = datetime.datetime.now() - - now_ts = int(time.mktime(now.timetuple())) - now_title = 'end test title ' + str(now_ts) - now_message = 'test message ' + str(now_ts) - - before_ts = int(time.mktime((now - datetime.timedelta(minutes=5)).timetuple())) - before_title = 'start test title ' + str(before_ts) - before_message = 'test message ' + str(before_ts) - - now_event_id = dog.Event.create(title=now_title, text=now_message, - date_happened=now_ts)['event']['id'] - before_event_id = dog.Event.create(title=before_title, text=before_message, - date_happened=before_ts)['event']['id'] - time.sleep(self.wait_time) - - now_event = dog.Event.get(now_event_id) - before_event = dog.Event.get(before_event_id) - - self.assertEquals(now_event['event']['text'], now_message) - self.assertEquals(before_event['event']['text'], before_message) - - event_id = dog.Event.create(title='test host and device', - text='test host and device', - host=self.host_name,)['event']['id'] - time.sleep(self.wait_time) - event = dog.Event.get(event_id) - - self.assertEquals(event['event']['host'], self.host_name) - - event_id = dog.Event.create(title='test event tags', - text='test event tags', - tags=['test-tag-1', 'test-tag-2'])['event']['id'] - time.sleep(self.wait_time) - event = dog.Event.get(event_id) - - assert 'test-tag-1' in event['event']['tags'] - assert 'test-tag-2' in event['event']['tags'] - - def test_aggregate_events(self): - now_ts = int(time.time()) - agg_key = 'aggregate_me ' + str(now_ts) - msg_1 = 'aggregate 1' - msg_2 = 'aggregate 2' - - # send two events that should aggregate - event1_id = dog.Event.create(title=msg_1, text=msg_1, - aggregation_key=agg_key)['event']['id'] - event2_id = dog.Event.create(title=msg_2, text=msg_2, - aggregation_key=agg_key)['event']['id'] - time.sleep(self.wait_time) - - event1 = dog.Event.get(event1_id) - event2 = dog.Event.get(event2_id) - - self.assertEquals(msg_1, event1['event']['text']) - self.assertEquals(msg_2, event2['event']['text']) - - # TODO FIXME: Need the aggregation_id to check if they are attached to the - # same aggregate - - def test_git_commits(self): - """Pretend to send git commits""" - event_id = dog.Event.create(title="Testing git commits", text="""$$$ - eac54655 * Merge pull request #2 from DataDog/alq-add-arg-validation (alq@datadoghq.com) - |\ - 760735ef | * origin/alq-add-arg-validation Simple typechecking between metric and metrics (matt@datadoghq.com) - |/ - f7a5a23d * missed version number in docs (matt@datadoghq.com) - $$$""", event_type="commit", source_type_name="git", event_object="0xdeadbeef")['event']['id'] - - time.sleep(self.wait_time) - event = dog.Event.get(event_id) - - self.assertEquals(event['event']['title'], "Testing git commits") - - def test_comments(self): - now = datetime.datetime.now() - now_ts = int(time.mktime(now.timetuple())) - before_ts = int(time.mktime((now - datetime.timedelta(minutes=15)).timetuple())) - message = 'test message ' + str(now_ts) - comment_id = dog.Comment.create(handle=TEST_USER, message=message)['comment']['id'] - time.sleep(self.wait_time) - event = dog.Event.get(comment_id) - eq(event['event']['text'], message) - dog.Comment.update(comment_id, handle=TEST_USER, message=message + ' updated') - time.sleep(self.wait_time) - event = dog.Event.get(comment_id) - eq(event['event']['text'], message + ' updated') - reply_id = dog.Comment.create(handle=TEST_USER, message=message + ' reply', - related_event_id=comment_id)['comment']['id'] - time.sleep(3) - stream = dog.Event.query(start=before_ts, end=now_ts + 100)['events'] - ok(stream is not None, msg="No events found in stream") - ok(isinstance(stream, list), msg="Event stream is not a list") - ok(len(stream) > 0, msg="No events found in stream") - comment_ids = [x['id'] for x in stream[0]['comments']] - ok(reply_id in comment_ids, - msg="Should find {0} in {1}".format(reply_id, comment_ids)) - # Delete the reply - dog.Comment.delete(reply_id) - # Then the post itself - dog.Comment.delete(comment_id) - time.sleep(self.wait_time) - try: - dog.Event.get(comment_id) - except: - pass - else: - assert False - - @attr('timeboards', 'validation') - def test_timeboard_validation(self): - graph = { - "title": "test metric graph", - "definition": - { - "requests": [{"q": "testing.metric.1{host:blah.host.1}"}], - "viz": "timeseries", - } - } - # No title - try: - dog.Timeboard.create(title=None, description='my api timeboard', graphs=[graph]) - assert False, "Should report an api error" - except ApiError as e: - exception_msg = e.args[0]['errors'][0] - eq(exception_msg, "The parameter 'title' is required") - - # No description - try: - dog.Timeboard.create(title='api timeboard', description=None, graphs=[graph]) - assert False, "Should report an api error" - except ApiError as e: - exception_msg = e.args[0]['errors'][0] - eq(exception_msg, "The parameter 'description' is required") - - # No graph - try: - dog.Timeboard.create(title='api timeboard', description='my api timeboard', graphs=None) - assert False, "Should report an api error" - except ApiError as e: - exception_msg = e.args[0]['errors'][0] - eq(exception_msg, "The parameter 'graphs' is required") - - # Graphs not list - try: - dog.Timeboard.create(title='api timeboard', description='my api timeboard', - graphs=graph) - assert False, "Should report an api error" - except ApiError as e: - exception_msg = e.args[0]['errors'][0] - eq(exception_msg, "The 'graphs' parameter is required to be a list") - - # Empty list of graphs - try: - dog.Timeboard.create(title='api timeboard', description='my api timeboard', graphs=[]) - assert False, "Should report an api error" - except ApiError as e: - exception_msg = e.args[0]['errors'][0] - eq(exception_msg, "The 'graphs' parameter is required") - - # None in the graph list - try: - dog.Timeboard.create(title='api timeboard', description='my api timeboard', - graphs=[graph, None]) - assert False, "Should report an api error" - except ApiError as e: - exception_msg = e.args[0]['errors'][0] - eq(exception_msg, "The 'graphs' parameter contains None graphs") - - # Dashboard not found - try: - dog.Timeboard.get(999999) - assert False, "Should report an api error" - except ApiError as e: - exception_msg = e.args[0]['errors'][0] - eq(exception_msg, "No dashboard matches that dash_id.") - - @attr('dashboards') - def test_timeboard(self): +from .constants import MONITOR_REFERENCED_IN_SLO_MESSAGE + +WAIT_TIME = 10 + + +class TestDatadog: + host_name = "test.host.integration" + + @pytest.fixture(autouse=True) # TODO , scope="class" + def cleanup_roles(self, dog): + """Prepare Azure Integration.""" + self.cleanup_role_uuids = [] + + yield + + # Ensure we cleanup any resources we created during tests + # These should be removed during tests, but here as well in case of test failures + for uuid in self.cleanup_role_uuids: + dog.Roles.delete(uuid) + + def test_tags(self, dog, get_with_retry, freezer): + with freezer: + hostname = "test.tags.host" + str(int(time.time())) + + # post a metric to make sure the test host context exists + dog.Metric.send(metric="test.tag.metric", points=1, host=hostname) + # Wait for host to appear + get_with_retry("Tag", hostname) + + # Ready to test + dog.Tag.create(hostname, tags=["test_tag:1", "test_tag:2"], source="datadog") + get_with_retry( + "Tag", + hostname, + retry_condition=lambda r: "test_tag:1" not in r["tags"] + or "test_tag:2" not in r["tags"], + retry_limit=30, + source="datadog", + ) + + # The response from `update` can be flaky, so let's test that it work by getting the tags + dog.Tag.update(hostname, tags=["test_tag:3"], source="datadog") + get_with_retry( + "Tag", + hostname, + retry_condition=lambda r: r["tags"] != ["test_tag:3"], + retry_limit=30, + source="datadog", + ) + + all_tags = dog.Tag.get_all() + assert "tags" in all_tags + + assert ( + dog.Tag.delete(hostname, source="datadog") is None + ) # Expect no response body on success + + @pytest.mark.vcr(match_on=("method", "scheme", "host", "port", "path", "query", "body")) + def test_events(self, dog, get_with_retry, freezer): + with freezer: + now_ts = int(time.time()) + before_ts = now_ts - datetime.timedelta(minutes=5).total_seconds() + + now_title = "end test title " + str(now_ts) + now_message = "test message " + str(now_ts) + + before_title = "start test title " + str(before_ts) + before_message = "test message " + str(before_ts) + + now_event = dog.Event.create( + title=now_title, text=now_message, date_happened=now_ts + ) + before_event = dog.Event.create( + title=before_title, text=before_message, date_happened=before_ts + ) + + assert now_event["event"]["title"] == now_title + assert now_event["event"]["text"] == now_message + assert now_event["event"]["date_happened"] == now_ts + assert before_event["event"]["title"] == before_title + assert before_event["event"]["text"] == before_message + assert before_event["event"]["date_happened"] == before_ts + + # The returned event doesn"t contain host information, we need to get it separately + event_id = dog.Event.create( + title="test host", text="test host", host=self.host_name + )["event"]["id"] + event = get_with_retry("Event", event_id) + assert event["event"]["host"] == self.host_name + + event_id = dog.Event.create( + title="test no hostname", + text="test no hostname", + attach_host_name=False, + alert_type="success", + )["event"]["id"] + event = get_with_retry("Event", event_id) + assert not event["event"]["host"] + assert event["event"]["alert_type"] == "success" + + event = dog.Event.create( + title="test tags", text="test tags", tags=["test_tag:1", "test_tag:2"] + ) + assert "test_tag:1" in event["event"]["tags"] + assert "test_tag:2" in event["event"]["tags"] + + event = dog.Event.create( + title="test source", + text="test source", + source_type_name="vsphere", + priority="low", + ) + event_id = event["event"]["id"] + now_ts = event["event"]["date_happened"] + get_with_retry("Event", event_id) + events = dog.Event.query( + start=now_ts - 100, end=now_ts + 100, priority="low", sources="vsphere" + ) + assert events["events"], "No events found in stream" + assert event_id in [event["id"] for event in events["events"]] + + def test_comments(self, dog, get_with_retry, freezer, user_handle): + with freezer: + now_ts = int(time.time()) + + message = "test message " + str(now_ts) + + comment = dog.Comment.create(handle=user_handle, message=message) + comment_id = comment["comment"]["id"] + assert comment["comment"]["message"] == message + + get_with_retry("Event", comment_id) + comment = dog.Comment.update( + comment_id, handle=user_handle, message=message + " updated" + ) + assert comment["comment"]["message"] == message + " updated" + reply = dog.Comment.create( + handle=user_handle, message=message + " reply", related_event_id=comment_id + ) + assert reply["comment"]["message"] == message + " reply" + + def test_timeboard(self, dog, get_with_retry): graph = { "title": "test metric graph", - "definition": - { - "requests": [{"q": "testing.metric.1{host:blah.host.1}"}], - "viz": "timeseries", - } + "definition": { + "requests": [{"q": "testing.metric.1{host:blah.host.1}"}], + "viz": "timeseries", + }, } - timeboard_id = dog.Timeboard.create(title='api timeboard', description='my api timeboard', - graphs=[graph])['dash']['id'] - remote_timeboard = dog.Timeboard.get(timeboard_id) + timeboard = dog.Timeboard.create( + title="api timeboard", description="my api timeboard", graphs=[graph] + ) + assert "api timeboard" == timeboard["dash"]["title"] + assert "my api timeboard" == timeboard["dash"]["description"] + assert timeboard["dash"]["graphs"][0] == graph - eq('api timeboard', remote_timeboard['dash']['title']) - eq('my api timeboard', remote_timeboard['dash']['description']) - eq(graph['definition']['requests'], - remote_timeboard['dash']['graphs'][0]['definition']['requests']) + timeboard = get_with_retry("Timeboard", timeboard["dash"]["id"]) + assert "api timeboard" == timeboard["dash"]["title"] + assert "my api timeboard" == timeboard["dash"]["description"] + assert timeboard["dash"]["graphs"][0] == graph graph = { "title": "updated test metric graph", "definition": { "requests": [{"q": "testing.metric.1{host:blah.host.1}"}], "viz": "timeseries", - } + }, } - timeboard_id = dog.Timeboard.update(timeboard_id, title='updated api timeboard', - description='my updated api timeboard', - graphs=[graph])['dash']['id'] - - # Query and ensure all is well. - remote_timeboard = dog.Timeboard.get(timeboard_id) - - eq('updated api timeboard', remote_timeboard['dash']['title']) - eq('my updated api timeboard', remote_timeboard['dash']['description']) - - p = graph['definition']['requests'] - eq(p, remote_timeboard['dash']['graphs'][0]['definition']['requests']) - - # Query all dashboards and make sure it's in there. - - timeboards = dog.Timeboard.get_all()['dashes'] - ids = [timeboard["id"] for timeboard in timeboards] - assert timeboard_id in ids or str(timeboard_id) in ids - - dog.Timeboard.delete(timeboard_id) - - try: - dog.get(timeboard_id) - except: - pass - else: - # the previous get *should* throw an exception - assert False - - def test_search(self): - results = dog.Infrastructure.search(q='e') - assert len(results['results']['hosts']) > 0 - assert len(results['results']['metrics']) > 0 - - @attr("metric") - def test_metrics(self): - now = datetime.datetime.now() - now_ts = int(time.mktime(now.timetuple())) - metric_name = "test.metric." + str(now_ts) + timeboard = dog.Timeboard.update( + timeboard["dash"]["id"], + title="updated api timeboard", + description="my updated api timeboard", + graphs=[graph], + ) + + assert "updated api timeboard" == timeboard["dash"]["title"] + assert "my updated api timeboard" == timeboard["dash"]["description"] + assert timeboard["dash"]["graphs"][0] == graph + + # Query all dashboards and make sure it"s in there. + timeboards = dog.Timeboard.get_all()["dashes"] + ids = [str(timeboard["id"]) for timeboard in timeboards] + assert str(timeboard["dash"]["id"]) in ids + + assert dog.Timeboard.delete(timeboard["dash"]["id"]) is None + + def test_search(self, dog): + results = dog.Infrastructure.search(q="") + assert len(results["results"]["hosts"]) > 0 + assert len(results["results"]["metrics"]) > 0 + + def test_metrics_simple(self, dog, get_with_retry, freezer): + with freezer: + now_ts = int(time.time()) + + metric_name_single = "test.metric_single." + str(now_ts) + host_name = "test.host." + str(now_ts) + + def retry_condition(r): + return not r["series"] + + # Send metrics with single and multi points, and with compression + assert ( + dog.Metric.send(metric=metric_name_single, points=1, host=host_name)[ + "status" + ] + == "ok" + ) + + metric_query_single = get_with_retry( + "Metric", + operation="query", + retry_condition=retry_condition, + retry_limit=60, + start=now_ts - 600, + end=now_ts + 600, + query="{}{{host:{}}}".format(metric_name_single, host_name), + ) + assert len(metric_query_single["series"]) == 1 + assert metric_query_single["series"][0]["metric"] == metric_name_single + assert metric_query_single["series"][0]["scope"] == "host:{}".format(host_name) + assert len(metric_query_single["series"][0]["pointlist"]) == 1 + assert metric_query_single["series"][0]["pointlist"][0][1] == 1 + + def test_metrics_list(self, dog, get_with_retry, freezer): + with freezer: + now_ts = int(time.time()) + + metric_name_list = "test.metric_list." + str(now_ts) host_name = "test.host." + str(now_ts) - dog.Metric.send(metric=metric_name, points=1, host=host_name) - time.sleep(self.wait_time) - - metric_query = dog.Metric.query(start=now_ts - 3600, end=now_ts + 3600, - query="avg:%s{host:%s}" % (metric_name, host_name)) - assert len(metric_query['series']) == 1, metric_query - - # results = dog.Infrastructure.search(q='metrics:test.metric.' + str(now_ts)) - # TODO mattp: cache issue. move this test to server side. - # assert len(results['results']['metrics']) == 1, results - - matt_series = [ - (int(time.mktime((now - datetime.timedelta(minutes=25)).timetuple())), 5), - (int(time.mktime((now - datetime.timedelta(minutes=25)).timetuple())) + 1, 15), - (int(time.mktime((now - datetime.timedelta(minutes=24)).timetuple())), 10), - (int(time.mktime((now - datetime.timedelta(minutes=23)).timetuple())), 15), - (int(time.mktime((now - datetime.timedelta(minutes=23)).timetuple())) + 1, 5), - (int(time.mktime((now - datetime.timedelta(minutes=22)).timetuple())), 5), - (int(time.mktime((now - datetime.timedelta(minutes=20)).timetuple())), 15), - (int(time.mktime((now - datetime.timedelta(minutes=18)).timetuple())), 5), - (int(time.mktime((now - datetime.timedelta(minutes=17)).timetuple())), 5), - (int(time.mktime((now - datetime.timedelta(minutes=17)).timetuple())) + 1, 15), - (int(time.mktime((now - datetime.timedelta(minutes=15)).timetuple())), 15), - (int(time.mktime((now - datetime.timedelta(minutes=15)).timetuple())) + 1, 5), - (int(time.mktime((now - datetime.timedelta(minutes=14)).timetuple())), 5), - (int(time.mktime((now - datetime.timedelta(minutes=14)).timetuple())) + 1, 15), - (int(time.mktime((now - datetime.timedelta(minutes=12)).timetuple())), 15), - (int(time.mktime((now - datetime.timedelta(minutes=12)).timetuple())) + 1, 5), - (int(time.mktime((now - datetime.timedelta(minutes=11)).timetuple())), 5), - ] - - dog.Metric.send(metric='matt.metric', points=matt_series, host="matt.metric.host") - - def test_type_check(self): - dog.Metric.send(metric="test.metric", points=[(time.time() - 3600, 1.0)]) - dog.Metric.send(metric="test.metric", points=1.0) - dog.Metric.send(metric="test.metric", points=(time.time(), 1.0)) + def retry_condition(r): + return not r["series"] - @attr('monitor') - def test_monitors(self): - query = "avg(last_1h):sum:system.net.bytes_rcvd{host:host0} > 100" - - monitor_id = dog.Monitor.create(query=query, type="metric alert")['id'] - monitor = dog.Monitor.get(monitor_id) - time.sleep(self.wait_time) - assert monitor['query'] == query, monitor['query'] - assert monitor['options']['notify_no_data'] == False, monitor['options']['notify_no_data'] - - options = { - "notify_no_data": True, - "no_data_timeframe": 20, - "silenced": {"*": None} - } - dog.Monitor.update(monitor_id, query=query, options=options, timeout_h=1) - monitor = dog.Monitor.get(monitor_id) - assert monitor['query'] == query, monitor['query'] - assert monitor['options']['notify_no_data'] == True, monitor['options']['notify_no_data'] - assert monitor['options']['no_data_timeframe'] == 20, monitor['options']['no_data_timeframe'] - assert monitor['options']['silenced'] == {"*": None}, monitor['options']['silenced'] - - dog.Monitor.delete(monitor_id) - try: - dog.Monitor.delete(monitor_id) - except: - pass - else: - assert False, 'monitor not deleted' - - query1 = "avg(last_1h):sum:system.net.bytes_rcvd{host:host0} > 100" - query2 = "avg(last_1h):sum:system.net.bytes_rcvd{host:host0} > 200" - - monitor_id1 = dog.Monitor.create(query=query1, type="metric alert")['id'] - monitor_id2 = dog.Monitor.create(query=query2, type="metric alert")['id'] - monitors = dog.Monitor.get_all() - monitor1 = [a for a in monitors if a['id'] == monitor_id1][0] - monitor2 = [a for a in monitors if a['id'] == monitor_id2][0] - assert monitor1['query'] == query1, monitor1 - assert monitor2['query'] == query2, monitor2 - - def test_user_error(self): - query = "avg(last_1h):sum:system.net.bytes_rcvd{host:host0} > 100" - - dog._swallow = True + points = [(now_ts - 60, 1), (now_ts, 2)] + assert ( + dog.Metric.send(metric=metric_name_list, points=points, host=host_name)[ + "status" + ] + == "ok" + ) + metric_query_list = get_with_retry( + "Metric", + operation="query", + retry_condition=retry_condition, + retry_limit=60, + start=now_ts - 600, + end=now_ts + 600, + query="{}{{host:{}}}".format(metric_name_list, host_name), + ) + assert len(metric_query_list["series"]) == 1 + assert metric_query_list["series"][0]["metric"] == metric_name_list + assert metric_query_list["series"][0]["scope"] == "host:{}".format(host_name) + assert len(metric_query_list["series"][0]["pointlist"]) == 2 + assert metric_query_list["series"][0]["pointlist"][0][1] == 1 + assert metric_query_list["series"][0]["pointlist"][1][1] == 2 + + def test_metrics_tuple(self, dog, get_with_retry, freezer): + with freezer: + now_ts = int(time.time()) + + metric_name_tuple = "test.metric_tuple." + str(now_ts) + host_name = "test.host." + str(now_ts) - monitor = dog.Monitor.create(query=query, type="metric alert") - assert 'id' in monitor, monitor - result = dog.Monitor.update(monitor['id'], query='aaa', silenced=True) - assert 'errors' in result, result + def retry_condition(r): + return not r["series"] + + points = (now_ts - 60, 1) + assert ( + dog.Metric.send( + metric=metric_name_tuple, + points=points, + host=host_name, + compress_payload=False, + )["status"] + == "ok" + ) + metric_query_tuple = get_with_retry( + "Metric", + operation="query", + retry_condition=retry_condition, + retry_limit=60, + start=now_ts - 600, + end=now_ts + 600, + query="{}{{host:{}}}".format(metric_name_tuple, host_name), + ) + assert len(metric_query_tuple["series"]) == 1 + assert metric_query_tuple["series"][0]["metric"] == metric_name_tuple + assert metric_query_tuple["series"][0]["scope"] == "host:{}".format(host_name) + assert len(metric_query_tuple["series"][0]["pointlist"]) == 1 + assert metric_query_tuple["series"][0]["pointlist"][0][1] == 1 + + def test_distribution_metrics(self, dog, freezer): + with freezer: + now_ts = int(time.time()) + + metric_name = "test.distribution_metric." + str(now_ts) + host_name = "test.host." + str(now_ts) - dog._swallow = False + # Submit a distribution metric + assert dog.Distribution.send( + distributions=[{ + 'metric': metric_name, + 'points': [(now_ts - 60, [1.0])], + 'type': 'distribution', + 'host': host_name, + }] + )["status"] == "ok" - monitor_id = dog.Monitor.create(query=query, type="metric alert")['id'] - assert monitor_id == int(monitor_id), monitor_id - try: - result = dog.Monitor.update(monitor_id, query='aaa', silenced=True) - except ApiError: - pass - else: - assert False, "Should have raised an exception" + # FIXME: Query and verify the test metric result. Currently, it takes + # too long for a new distribution metric to become available for query. - @attr('snapshot') - def test_graph_snapshot(self): + def test_graph_snapshot(self, dog, get_with_retry, freezer): metric_query = "system.load.1{*}" event_query = "*" - end = int(time.time()) - start = end - 60 * 60 # go back 1 hour + with freezer: + end = int(time.time()) + start = end - 60 * 60 # go back 1 hour # Test without an event query snap = dog.Graph.create(metric_query=metric_query, start=start, end=end) - ok('snapshot_url' in snap, msg=snap) - ok('metric_query' in snap, msg=snap) - ok('event_query' not in snap, msg=snap) - eq(snap['metric_query'], metric_query) - snapshot_url = snap['snapshot_url'] - while dog.Graph.status(snapshot_url)['status_code'] != 200: - time.sleep(self.wait_time) - if 'localhost' in dog._api_host: - snapshot_url = 'http://%s%s' % (dog.api_host, snapshot_url) - assert_snap_not_blank(snapshot_url) - assert_snap_has_no_events(snapshot_url) + assert "event_query" not in snap + assert snap["metric_query"] == metric_query + snapshot_url = snap["snapshot_url"] # Test with an event query - snap = dog.Graph.create(metric_query=metric_query, start=start, end=end, - event_query=event_query) - ok('snapshot_url' in snap, msg=snap) - ok('metric_query' in snap, msg=snap) - ok('event_query' in snap, msg=snap) - eq(snap['metric_query'], metric_query) - eq(snap['event_query'], event_query) - snapshot_url = snap['snapshot_url'] - while dog.Graph.status(snapshot_url)['status_code'] != 200: - time.sleep(self.wait_time) - if 'localhost' in dog._api_host: - snapshot_url = 'http://%s%s' % (dog.api_host, snapshot_url) - assert_snap_not_blank(snapshot_url) + snap = dog.Graph.create( + metric_query=metric_query, start=start, end=end, event_query=event_query + ) + assert snap["metric_query"] == metric_query + assert snap["event_query"] == event_query + snapshot_url = snap["snapshot_url"] # Test with a graph def graph_def = { "viz": "toplist", - "requests": [{ - "q": "top(system.disk.free{*} by {device}, 10, 'mean', 'desc')", - "style": { - "palette": "dog_classic" - }, - "conditional_formats": [{ - "palette": "red", - "comparator": ">", - "value": 50000000000 - }, { - "palette": "green", - "comparator": ">", - "value": 30000000000 - }] - }] + "requests": [ + { + "q": "top(system.disk.free{*} by {device}, 10, 'mean', 'desc')", + "style": {"palette": "dog_classic"}, + "conditional_formats": [ + {"palette": "red", "comparator": ">", "value": 50000000000}, + {"palette": "green", "comparator": ">", "value": 30000000000}, + ], + } + ], } - graph_def = json.dumps(graph_def) + graph_def = json.dumps(graph_def, sort_keys=True) snap = dog.Graph.create(graph_def=graph_def, start=start, end=end) - ok('snapshot_url' in snap, msg=snap) - ok('graph_def' in snap, msg=snap) - ok('metric_query' not in snap, msg=snap) - ok('event_query' not in snap, msg=snap) - eq(snap['graph_def'], graph_def) - snapshot_url = snap['snapshot_url'] - while dog.Graph.status(snapshot_url)['status_code'] != 200: - time.sleep(self.wait_time) - if 'localhost' in dog._api_host: - snapshot_url = 'http://%s%s' % (dog.api_host, snapshot_url) - assert_snap_not_blank(snapshot_url) - - @attr('screenboard') - def test_screenboard(self): - def _compare_screenboard(board1, board2): - compare_keys = ['board_title', 'height', 'width', 'widgets'] + assert "metric_query" not in snap + assert "event_query" not in snap + assert snap["graph_def"] == graph_def + snapshot_url = snap["snapshot_url"] + + # Test snapshot status endpoint + get_with_retry( + "Graph", + snapshot_url, + operation="status", + retry_condition=lambda r: r["status_code"] != 200, + retry_limit=20, + ) + + def test_screenboard(self, vcr_cassette, dog, get_with_retry): + def _compare_screenboard(apiBoard, expectedBoard): + compare_keys = ["board_title", "height", "width", "widgets"] for key in compare_keys: - assert board1[key] == board2[key], key + assert apiBoard[key] == expectedBoard[key] board = { "width": 1024, @@ -508,7 +399,7 @@ def _compare_screenboard(board1, board2): "y": 18, "x": 84, "query": "tags:release", - "timeframe": "1w" + "time": {"live_span": "1w"}, }, { "type": "image", @@ -516,9 +407,9 @@ def _compare_screenboard(board1, board2): "width": 32, "y": 7, "x": 32, - "url": "http://path/to/image.jpg" - } - ] + "url": "http://path/to/image.jpg", + }, + ], } updated_board = { @@ -532,226 +423,549 @@ def _compare_screenboard(board1, board2): "width": 32, "y": 7, "x": 32, - "url": "http://path/to/image.jpg" + "url": "http://path/to/image.jpg", } - ] + ], } create_res = dog.Screenboard.create(**board) _compare_screenboard(board, create_res) - get_res = dog.Screenboard.get(create_res['id']) + get_res = get_with_retry("Screenboard", create_res["id"]) _compare_screenboard(get_res, create_res) - assert get_res['id'] == create_res['id'] + assert get_res["id"] == create_res["id"] - get_all_res = dog.Screenboard.get_all()['screenboards'] - created = [s for s in get_all_res if s['id'] == create_res['id']] - self.assertEquals(len(created), 1) + get_all_res = dog.Screenboard.get_all()["screenboards"] + created = [s for s in get_all_res if s["id"] == create_res["id"]] + assert len(created) == 1 - update_res = dog.Screenboard.update(get_res['id'], **updated_board) + update_res = dog.Screenboard.update(get_res["id"], **updated_board) _compare_screenboard(update_res, updated_board) - assert get_res['id'] == update_res['id'] + assert get_res["id"] == update_res["id"] - share_res = dog.Screenboard.share(get_res['id']) - assert share_res['board_id'] == get_res['id'] - public_url = share_res['public_url'] + share_res = dog.Screenboard.share(get_res["id"]) + assert share_res["board_id"] == get_res["id"] + public_url = share_res["public_url"] + if vcr_cassette.record_mode != "none": + time.sleep(WAIT_TIME) response = requests.get(public_url) assert response.status_code == 200 - revoke_res = dog.Screenboard.revoke(get_res['id']) + dog.Screenboard.revoke(get_res["id"]) + if vcr_cassette.record_mode != "none": + time.sleep(WAIT_TIME) response = requests.get(public_url) assert response.status_code == 404 - delete_res = dog.Screenboard.delete(update_res['id']) - assert delete_res['id'] == update_res['id'] + delete_res = dog.Screenboard.delete(update_res["id"]) + assert delete_res["id"] == update_res["id"] - @attr('monitor') - def test_monitor_crud(self): + def test_monitor_crud(self, dog, get_with_retry, freezer): # Metric alerts query = "avg(last_1h):sum:system.net.bytes_rcvd{host:host0} > 100" - options = { - 'silenced': {'*': time.time() + 60 * 60}, - 'notify_no_data': False - } - monitor_id = dog.Monitor.create(type='metric alert', query=query, options=options)['id'] - monitor = dog.Monitor.get(monitor_id) + with freezer: + options = { + "silenced": {"*": int(time.time()) + 60 * 60}, + "notify_no_data": False, + } + monitor = dog.Monitor.create(type="metric alert", query=query, options=options) + assert monitor["query"] == query + assert monitor["options"]["notify_no_data"] == options["notify_no_data"] + assert monitor["options"]["silenced"] == options["silenced"] - eq(monitor['query'], query) - eq(monitor['options']['notify_no_data'], - options['notify_no_data']) - eq(monitor['options']['silenced'], options['silenced']) + monitor = get_with_retry("Monitor", monitor["id"]) + assert monitor["query"] == query + assert monitor["options"]["notify_no_data"] == options["notify_no_data"] + assert monitor["options"]["silenced"] == options["silenced"] query2 = "avg(last_1h):sum:system.net.bytes_rcvd{host:host0} > 200" - updated_monitor_id = dog.Monitor.update(monitor_id, query=query2, options=options)['id'] - monitor = dog.Monitor.get(updated_monitor_id) - eq(monitor['query'], query2) - - name = 'test_monitors' - monitor_id = dog.Monitor.update(monitor_id, query=query2, name=name, - options={'notify_no_data': True})['id'] - monitor = dog.Monitor.get(monitor_id) - eq(monitor['name'], name) - eq(monitor['options']['notify_no_data'], True) - - dog.Monitor.delete(monitor_id) - try: - dog.Monitor.get(monitor_id) - except ApiError: - pass - else: - assert False, 'monitor not deleted' + monitor = dog.Monitor.update(monitor["id"], query=query2, options=options) + assert monitor["query"] == query2 + assert monitor["options"]["notify_no_data"] == options["notify_no_data"] + assert monitor["options"]["silenced"] == options["silenced"] + + name = "test_monitors" + monitor = dog.Monitor.update( + monitor["id"], query=query2, name=name, options={"notify_no_data": True} + ) + assert monitor["name"] == name + assert monitor["query"] == query2 + assert monitor["options"]["notify_no_data"] is True + + monitors = [m for m in dog.Monitor.get_all() if m["id"] == monitor["id"]] + assert len(monitors) == 1 + + assert dog.Monitor.delete(monitor["id"]) == { + "deleted_monitor_id": monitor["id"] + } - query1 = "avg(last_1h):sum:system.net.bytes_rcvd{host:host0} > 100" - query2 = "avg(last_1h):sum:system.net.bytes_rcvd{host:host0} > 200" + def test_monitor_validate(self, dog, get_with_retry): + monitor_type = "metric alert" + valid_options = {"thresholds": {"critical": 200.0}} + invalid_options = {"thresholds": {"critical": 90.0}} + + # Check with an invalid query. + invalid_query = "THIS IS A BAD QUERY" + res = dog.Monitor.validate(type=monitor_type, query=invalid_query, options=valid_options) + assert res == {"errors": ["The value provided for parameter 'query' is invalid"]} + + # Check with a valid query, invalid options. + valid_query = "avg(last_1h):sum:system.net.bytes_rcvd{host:host0} > 200" + res = dog.Monitor.validate(type=monitor_type, query=valid_query, options=invalid_options) + assert res == {"errors": ["Alert threshold (90.0) does not match that used in the query (200.0)."]} + + # Check with a valid query, valid options. + res = dog.Monitor.validate(type=monitor_type, query=valid_query, options=valid_options) + assert res == {} - monitor_id1 = dog.Monitor.create(type='metric alert', query=query1)['id'] - monitor_id2 = dog.Monitor.create(type='metric alert', query=query2)['id'] - monitors = dog.Monitor.get_all(group_states=['alert', 'warn']) - monitor1 = [m for m in monitors if m['id'] == monitor_id1][0] - monitor2 = [m for m in monitors if m['id'] == monitor_id2][0] - assert monitor1['query'] == query1, monitor1 - assert monitor2['query'] == query2, monitor2 - - # Service checks - query = '"ntp.in_sync".over("role:herc").last(3).count_by_status()' - options = { - 'notify_no_data': False, - 'thresholds': { - 'ok': 3, - 'warning': 2, - 'critical': 1, - 'no data': 3 + def test_monitor_can_delete(self, dog, freezer): + # Create a monitor. + query = "avg(last_1h):sum:system.net.bytes_rcvd{host:host0} > 100" + with freezer: + options = { + "silenced": {"*": int(time.time()) + 60 * 60}, + "notify_no_data": False, } + monitor = dog.Monitor.create(type="metric alert", query=query, options=options) + + # Check if you can delete the monitor. + monitor_ids = [monitor["id"]] + assert dog.Monitor.can_delete(monitor_ids=monitor_ids) == { + "data": {"ok": monitor_ids}, + "errors": None, } - monitor_id = dog.Monitor.create(type='service check', query=query, options=options)['id'] - monitor = dog.Monitor.get(monitor_id, group_states=['all']) - - eq(monitor['query'], query) - eq(monitor['options']['notify_no_data'], - options['notify_no_data']) - eq(monitor['options']['thresholds'], options['thresholds']) - - query2 = '"ntp.in_sync".over("role:sobotka").last(3).count_by_status()' - monitor_id = dog.Monitor.update(monitor_id, query=query2)['id'] - monitor = dog.Monitor.get(monitor_id) - eq(monitor['query'], query2) - - dog.Monitor.delete(monitor_id) - try: - dog.Monitor.get(monitor_id) - except ApiError: - pass - else: - assert False, 'monitor not deleted' - - @attr('monitor') - def test_monitor_muting(self): + + # Create a monitor-based SLO. + with freezer as dt: + name = "test SLO {}".format(int(time.time())) + + thresholds = [{"timeframe": "7d", "target": 90}] + slo = dog.ServiceLevelObjective.create( + type="monitor", + monitor_ids=monitor_ids, + thresholds=thresholds, + name=name, + )["data"][0] + + # Check if you can delete the monitor. + monitor_ids = [monitor["id"]] + resp = dog.Monitor.can_delete(monitor_ids=monitor_ids) + assert "errors" in resp + assert str(monitor["id"]) in resp["errors"] + assert len(resp["errors"][str(monitor["id"])]) + assert "is referenced in slos" in resp["errors"][str(monitor["id"])][0] + + # Delete the SLO. + dog.ServiceLevelObjective.delete(slo["id"]) + + # Check if you can delete the monitor. + monitor_ids = [monitor["id"]] + assert dog.Monitor.can_delete(monitor_ids=monitor_ids) == { + "data": {"ok": monitor_ids}, + "errors": None, + } + + # Delete the monitor to clean up the test. + assert dog.Monitor.delete(monitor["id"]) == { + "deleted_monitor_id": monitor["id"] + } + + def test_monitor_can_delete_with_force(self, dog, freezer): + # Create a monitor. query = "avg(last_1h):sum:system.net.bytes_rcvd{host:host0} > 100" - monitor_id = dog.Monitor.create(type='metric alert', query=query)['id'] - monitor = dog.Monitor.get(monitor_id) - eq(monitor['query'], query) + with freezer: + options = { + "silenced": {"*": int(time.time()) + 60 * 60}, + "notify_no_data": False, + } + monitor = dog.Monitor.create(type="metric alert", query=query, options=options) + monitor_ids = [monitor["id"]] + + # Create a monitor-based SLO. + with freezer: + name = "test SLO {}".format(int(time.time())) + thresholds = [{"timeframe": "7d", "target": 90}] + slo = dog.ServiceLevelObjective.create( + type="monitor", + monitor_ids=monitor_ids, + thresholds=thresholds, + name=name, + )["data"][0] + + # Check if you can delete the monitor with force option + assert dog.Monitor.delete(monitor["id"], force=True) == { + "deleted_monitor_id": monitor["id"] + } - dt = dog.Monitor.mute_all() - eq(dt['active'], True) - eq(dt['scope'], ['*']) + # Delete the SLO. + dog.ServiceLevelObjective.delete(slo["id"]) + + def test_service_level_objective_crud(self, dog, freezer): + numerator = "sum:my.custom.metric{type:good}.as_count()" + denominator = "sum:my.custom.metric{*}.as_count()" + query = {"numerator": numerator, "denominator": denominator} + thresholds = [{"timeframe": "7d", "target": 90}] + with freezer: + name = "test SLO {}".format(int(time.time())) + slo = dog.ServiceLevelObjective.create( + type="metric", + query=query, + thresholds=thresholds, + name=name, + tags=["type:test"], + )["data"][0] + assert slo["name"] == name + + numerator2 = "sum:my.custom.metric{type:good,!type:ignored}.as_count()" + denominator2 = "sum:my.custom.metric{!type:ignored}.as_count()" + query = {"numerator": numerator2, "denominator": denominator2} + slo = dog.ServiceLevelObjective.update( + id=slo["id"], + type="metric", + query=query, + thresholds=thresholds, + name=name, + tags=["type:test"], + )["data"][0] + assert slo["name"] == name + slos = [ + s + for s in dog.ServiceLevelObjective.get_all(tags_query="type:test")["data"] + if s["id"] == slo["id"] + ] + assert len(slos) == 1 - dt = dog.Monitor.unmute_all() - eq(dt, None) # No response is expected. + slo_search = dog.ServiceLevelObjective.search(query="{} AND type:test".format(name)) + assert len(slo_search["data"]) >= 1 - # We shouldn't be able to mute a simple alert on a scope. - assert_raises(ApiError, dog.Monitor.mute, monitor_id, scope='env:staging') + assert dog.ServiceLevelObjective.get(slo["id"])["data"]["id"] == slo["id"] + dog.ServiceLevelObjective.delete(slo["id"]) + @pytest.mark.admin_needed + def test_monitor_muting(self, dog, get_with_retry): + query1 = "avg(last_1h):sum:system.net.bytes_rcvd{host:host0} > 100" query2 = "avg(last_1h):sum:system.net.bytes_rcvd{*} by {host} > 100" - monitor_id = dog.Monitor.create(type='metric alert', query=query2)['id'] - monitor = dog.Monitor.get(monitor_id) - eq(monitor['query'], query2) + monitor1 = dog.Monitor.create(type="metric alert", query=query1) + monitor2 = dog.Monitor.create(type="metric alert", query=query2) + + dt = dog.Monitor.mute_all() + assert dt["active"] is True + assert dt["scope"] == ["*"] - dog.Monitor.mute(monitor_id, scope='host:foo') - monitor = dog.Monitor.get(monitor_id) - eq(monitor['options']['silenced'], {'host:foo': None}) + assert dog.Monitor.unmute_all() is None # No response expected - dog.Monitor.unmute(monitor_id, scope='host:foo') - monitor = dog.Monitor.get(monitor_id) - eq(monitor['options']['silenced'], {}) + monitor1 = dog.Monitor.mute(monitor1["id"]) + assert monitor1["options"]["silenced"] == {"*": None} - options = { - "silenced": {"host:abcd1234": None, "host:abcd1235": None} - } - dog.Monitor.update(monitor_id, query=query, options=options) - monitor = dog.Monitor.get(monitor_id) - eq(monitor['options']['silenced'], options['silenced']) + monitor2 = dog.Monitor.mute(monitor2["id"], scope="host:foo") + assert monitor2["options"]["silenced"] == {"host:foo": None} - dog.Monitor.unmute(monitor_id, all_scopes=True) - monitor = dog.Monitor.get(monitor_id) - eq(monitor['options']['silenced'], {}) + get_with_retry( + "Monitor", + monitor2["id"], + retry_condition=lambda r: r["options"]["silenced"] != {"host:foo": None}, + ) + monitor2 = dog.Monitor.unmute(monitor2["id"], scope="host:foo") + assert monitor2["options"]["silenced"] == {} - dog.Monitor.delete(monitor_id) + dog.Monitor.delete(monitor1["id"]) + dog.Monitor.delete(monitor2["id"]) - @attr('monitor') - def test_downtime(self): - start = int(time.time()) - end = start + 1000 + def test_downtime(self, dog, get_with_retry, freezer): + with freezer: + start = int(time.time()) + end = start + 1000 # Create downtime - downtime_id = dog.Downtime.create(scope='env:staging', start=start, end=end)['id'] - dt = dog.Downtime.get(downtime_id) - eq(dt['start'], start) - eq(dt['end'], end) - eq(dt['scope'], ['env:staging']) - eq(dt['disabled'], False) + downtime = dog.Downtime.create(scope="test_tag:1", start=start, end=end) + assert downtime["start"] == start + assert downtime["end"] == end + assert downtime["scope"] == ["test_tag:1"] + assert downtime["disabled"] is False + + get_with_retry("Downtime", downtime["id"]) # Update downtime message = "Doing some testing on staging." - end = int(time.time()) + 60000 - dog.Downtime.update(downtime_id, scope='env:staging', - end=end, message=message) - dt = dog.Downtime.get(downtime_id) - eq(dt['end'], end) - eq(dt['message'], message) - eq(dt['disabled'], False) + with freezer as dt: + dt.tick() + end = int(time.time()) + 60000 + + downtime = dog.Downtime.update( + downtime["id"], scope="test_tag:2", end=end, message=message + ) + assert downtime["end"] == end + assert downtime["message"] == message + assert downtime["scope"] == ["test_tag:2"] + assert downtime["disabled"] is False # Delete downtime - dog.Downtime.delete(downtime_id) - dt = dog.Downtime.get(downtime_id) - eq(dt['disabled'], True) - - @attr('monitor') - def test_service_check(self): - dog.ServiceCheck.check( - check='check_pg', host_name='host0', status=1, - message='PG is WARNING', tags=['db:prod_data']) - - @attr('host') - def test_host_muting(self): - hostname = "my.test.host" + assert dog.Downtime.delete(downtime["id"]) is None + downtime = get_with_retry( + "Downtime", downtime["id"], retry_condition=lambda r: r["disabled"] is False + ) + + def test_downtime_cancel_by_scope(self, dog, get_with_retry, freezer): + scope_one = "test:integration_one" + scope_two = "test:integration_two" + with freezer: + start = int(time.time()) + + # Create downtime with scope_one + end = start + 1000 + downtime_one = dog.Downtime.create(scope=scope_one, start=start, end=end) + assert downtime_one["scope"] == [scope_one] + assert downtime_one["disabled"] is False + + # Create downtime with scope_one + with freezer as dt: + dt.tick() + end = int(time.time()) + 60000 + downtime_two = dog.Downtime.create(scope=scope_one, start=start, end=end) + assert downtime_two["scope"] == [scope_one] + assert downtime_two["disabled"] is False + + with freezer as dt: + dt.tick() + end = int(time.time()) + 120000 + downtime_three = dog.Downtime.create(scope=scope_two, start=start, end=end) + assert downtime_three["scope"] == [scope_two] + assert downtime_three["disabled"] is False + + downtimes_with_scope_one = [downtime_one, downtime_two] + downtimes_with_scope_two = [downtime_three] + + # Cancel downtimes with scope `scope_one` + dog.Downtime.cancel_downtime_by_scope(scope=scope_one) + + # Verify only the downtimes with scope `scope_one` were canceled + for downtime in downtimes_with_scope_one: + get_with_retry( + "Downtime", downtime["id"], retry_condition=lambda r: r["disabled"] is False + ) + for downtime in downtimes_with_scope_two: + d = get_with_retry("Downtime", downtime["id"]) + assert d["disabled"] is False + + # Cancel downtimes with scope `scope_two` + dog.Downtime.cancel_downtime_by_scope(scope=scope_two) + + # Verify downtimes with scope `scope_two` were canceled + for downtime in downtimes_with_scope_two: + get_with_retry( + "Downtime", downtime["id"], retry_condition=lambda r: r["disabled"] is False + ) + + def test_service_check(self, dog): + assert dog.ServiceCheck.check( + check="check_pg", + host_name="host0", + status=1, + message="PG is WARNING", + tags=["db:prod_data"], + ) == {"status": "ok"} + + def test_host_muting(self, dog, get_with_retry, freezer): + with freezer: + end = int(time.time()) + 60 * 60 + hostname = "my.test.host" + str(end) message = "Muting this host for a test." - end = int(time.time()) + 60 * 60 - try: - # reset test - dog.Host.unmute(hostname) - except ApiError: - pass + # post a metric to make sure the test host context exists + dog.Metric.send(metric="test.muting.host", points=1, host=hostname) + # Wait for host to appear + get_with_retry("Tag", hostname) # Mute a host mute = dog.Host.mute(hostname, end=end, message=message) - eq(mute['hostname'], hostname) - eq(mute['action'], "Muted") - eq(mute['message'], message) - eq(mute['end'], end) + assert mute["hostname"] == hostname + assert mute["action"] == "Muted" + assert mute["message"] == message + assert mute["end"] == end - # We shouldn't be able to mute a host that's already muted, unless we include + # We shouldn"t be able to mute a host that"s already muted, unless we include # the override param. end2 = end + 60 * 15 - assert_raises(ApiError, dog.Host.mute, hostname, end=end2) + get_with_retry( + "Host", + hostname, + operation="mute", + retry_condition=lambda r: "errors" not in r, + end=end2, + ) mute = dog.Host.mute(hostname, end=end2, override=True) - eq(mute['hostname'], hostname) - eq(mute['action'], "Muted") - eq(mute['end'], end2) + assert mute["hostname"] == hostname + assert mute["action"] == "Muted" + assert mute["end"] == end2 + + unmute = dog.Host.unmute(hostname) + assert unmute["hostname"] == hostname + assert unmute["action"] == "Unmuted" + + def test_get_all_embeds(self, dog): + all_embeds = dog.Embed.get_all() + # Check all embeds is a valid response + assert "embedded_graphs" in all_embeds + + def test_embed_crud(self, dog, get_with_retry): + # Initialize a graph definition + graph_def = { + "viz": "toplist", + "requests": [ + { + "q": "top(system.disk.free{$var} by {device}, 10, 'mean', 'desc')", + "style": {"palette": "dog_classic"}, + "conditional_formats": [ + {"palette": "red", "comparator": ">", "value": 50000000000}, + {"palette": "green", "comparator": ">", "value": 30000000000}, + ], + } + ], + } + timeframe = "1_hour" + size = "medium" + legend = "no" + title = "Custom titles!" + # Dump the dictionary to a JSON string and make an API call + graph_json = json.dumps(graph_def, sort_keys=True) + embed = dog.Embed.create( + graph_json=graph_json, + timeframe=timeframe, + size=size, + legend=legend, + title=title, + ) + # Check various embed attributes + assert "embed_id" in embed + assert embed["revoked"] is False + assert len(embed["template_variables"]) == 1 + assert embed["template_variables"][0] == "var" + assert "html" in embed + assert embed["graph_title"] == title + + var = "asdfasdfasdf" + response_graph = get_with_retry("Embed", embed["embed_id"], var=var) + # Check the graph has the same embed_id and the template_var is added to the url + assert "embed_id" in response_graph + assert response_graph["embed_id"] == embed["embed_id"] + assert len(response_graph["html"]) > len(embed["html"]) + assert var in response_graph["html"] + + assert "success" in dog.Embed.enable(embed["embed_id"]) + + assert "success" in dog.Embed.revoke(embed["embed_id"]) + + @pytest.mark.admin_needed + def test_user_crud(self, dog, get_with_retry, freezer): + with freezer: + now = int(time.time()) + handle = "user{}@test.com".format(now) + name = "Test User" + alternate_name = "Test User Alt" + + # test create user + user = dog.User.create(handle=handle, name=name, access_role="ro") + assert "user" in user + assert user["user"]["handle"] == handle + assert user["user"]["name"] == name + assert user["user"]["disabled"] is False + assert user["user"]["access_role"] == "ro" + + # test get user + user = get_with_retry("User", handle) + assert "user" in user + assert user["user"]["handle"] == handle + assert user["user"]["name"] == name + + # test update user + user = dog.User.update(handle, name=alternate_name, access_role="st") + assert user["user"]["handle"] == handle + assert user["user"]["name"] == alternate_name + assert user["user"]["disabled"] is False + assert user["user"]["access_role"] == "st" + + # test disable user + dog.User.delete(handle) + u = dog.User.get(handle) + assert "user" in u + assert u["user"]["disabled"] is True + + # test get all users + u = dog.User.get_all() + assert "users" in u + assert len(u["users"]) >= 1 + + @pytest.mark.admin_needed + def test_roles_crud(self, dog): + role_name = "test_role" + + data = { + "type": "roles", + "attributes": { + "name": role_name + } + } + + # test create role + role = dog.Roles.create(data=data) + self.cleanup_role_uuids.append(role["data"]["id"]) + assert "roles" in role["data"]["type"] + assert role["data"]["id"] is not None + assert role["data"]["attributes"]["name"] == role_name + + role_uuid = role["data"]["id"] + + # test update role + new_role_name = "test_role_2" + data = { + "type": "roles", + "attributes": { + "name": new_role_name, + "id": role_uuid, + } + } + + role = dog.Roles.update(role_uuid, data=data) + assert "roles" in role["data"]["type"] + assert role["data"]["id"] is not None + assert role["data"]["attributes"]["name"] == new_role_name + + # test assign permission + + permissions = dog.Permissions.get_all() + assert "permissions" in permissions["data"][0]["type"] + assert len(permissions["data"]) > 0 + + permission_uuid = permissions["data"][0]["id"] + data = { + "type": "permissions", + "id": permission_uuid + } + + role = dog.Roles.assign_permission(role_uuid, data=data) + assert "permissions" in role["data"][0]["type"] + + # test unassign permission + data = { + "type": "permissions", + "id": permission_uuid + } + + role = dog.Roles.unassign_permission(role_uuid, data=data) + assert "permissions" in role["data"][0]["type"] + assert len(permissions["data"]) > 0 + + + # test delete role + dog.Roles.delete(role_uuid) - dog.Host.unmute(hostname) + # check if new role is deleted successfully + res = dog.Roles.get(role_uuid) + assert "errors" in res -if __name__ == '__main__': - unittest.main() + @mock.patch('datadog.api._return_raw_response', True) + def test_user_agent(self, dog): + _, resp = dog.api_client.APIClient.submit('GET', 'validate') + assert re.match(r'^datadogpy\/[^\s]+ \(python [^\s]+; os [^\s]+; arch [^\s]+\)$', resp.request.headers['User-Agent']) diff --git a/tests/integration/api/test_aws_integration.py b/tests/integration/api/test_aws_integration.py new file mode 100644 index 000000000..40a8f5ff7 --- /dev/null +++ b/tests/integration/api/test_aws_integration.py @@ -0,0 +1,112 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc + +from itertools import product + +import pytest + +TEST_ACCOUNT_ID_1 = "123456789101" +TEST_ACCOUNT_ID_2 = "123456789102" +TEST_ACCOUNT_ID_3 = "123456789103" +TEST_ACCOUNT_ID_4 = "123456789104" +TEST_ROLE_NAME = "DatadogApiTestRole" +TEST_ROLE_NAME_2 = "DatadogApiTestRolo" + +ROLE_NAMES = [TEST_ROLE_NAME, TEST_ROLE_NAME_2] +ACCOUNT_IDS = [TEST_ACCOUNT_ID_1, TEST_ACCOUNT_ID_2, TEST_ACCOUNT_ID_3, TEST_ACCOUNT_ID_4] +AVAILABLE_NAMESPACES = 76 + +class TestAwsIntegration: + + @pytest.fixture(autouse=True) # TODO , scope="class" + def aws_integration(self, dog): + """Remove pending AWS Integrations.""" + yield + for account_id, role_name in product(ACCOUNT_IDS, ROLE_NAMES): + dog.AwsIntegration.delete(account_id=account_id, role_name=role_name) + + def test_create(self, dog): + output = dog.AwsIntegration.create( + account_id=TEST_ACCOUNT_ID_3, + role_name=TEST_ROLE_NAME, + host_tags=["api:test"], + filter_tags=["filter:test"], + account_specific_namespace_rules={'auto_scaling': False, 'opsworks': False} + ) + assert "external_id" in output + + def test_list(self, dog): + dog.AwsIntegration.create( + account_id=TEST_ACCOUNT_ID_1, + role_name=TEST_ROLE_NAME + ) + dog.AwsIntegration.create( + account_id=TEST_ACCOUNT_ID_2, + role_name=TEST_ROLE_NAME + ) + + output = dog.AwsIntegration.list() + assert "accounts" in output + assert len(output['accounts']) >= 2 + expected_fields = [ + 'errors', + 'filter_tags', + 'host_tags', + 'account_specific_namespace_rules', + ] + assert all(k in output['accounts'][0].keys() for k in expected_fields) + + def test_delete(self, dog): + dog.AwsIntegration.create( + account_id=TEST_ACCOUNT_ID_1, + role_name=TEST_ROLE_NAME + ) + output = dog.AwsIntegration.delete(account_id=TEST_ACCOUNT_ID_1, role_name=TEST_ROLE_NAME) + assert output == {} + + def test_generate_new_external_id(self, dog): + dog.AwsIntegration.create( + account_id=TEST_ACCOUNT_ID_2, + role_name=TEST_ROLE_NAME + ) + output = dog.AwsIntegration.generate_new_external_id( + account_id=TEST_ACCOUNT_ID_2, + role_name=TEST_ROLE_NAME + ) + + assert "external_id" in output + + def test_list_namespace_rules(self, dog): + dog.AwsIntegration.create( + account_id=TEST_ACCOUNT_ID_2, + role_name=TEST_ROLE_NAME + ) + output = dog.AwsIntegration.list_namespace_rules( + account_id=TEST_ACCOUNT_ID_2, + role_name=TEST_ROLE_NAME + ) + assert len(output) >= AVAILABLE_NAMESPACES + + def test_update(self, dog): + dog.AwsIntegration.create( + account_id=TEST_ACCOUNT_ID_2, + role_name=TEST_ROLE_NAME + ) + + dog.AwsIntegration.update( + account_id=TEST_ACCOUNT_ID_2, + role_name=TEST_ROLE_NAME, + new_account_id=TEST_ACCOUNT_ID_4, + host_tags=["api:test2"], + new_role_name=TEST_ROLE_NAME_2, + excluded_regions=["us-east-1","us-west-1"] + ) + + output = dog.AwsIntegration.list() + tests_pass = False + for i in output['accounts']: + assert "excluded_regions" in i.keys() + if i.get('account_id') == TEST_ACCOUNT_ID_4 and i.get('role_name') == TEST_ROLE_NAME_2 and i.get('excluded_regions') == ["us-east-1","us-west-1"]: + tests_pass = True + assert tests_pass diff --git a/tests/integration/api/test_aws_logs.py b/tests/integration/api/test_aws_logs.py new file mode 100644 index 000000000..79d4188cc --- /dev/null +++ b/tests/integration/api/test_aws_logs.py @@ -0,0 +1,65 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc + +import pytest + +TEST_ACCOUNT_ID = "123456789101" +TEST_ROLE_NAME = "DatadogApiTestRole" +TEST_LAMBDA_ARN = "arn:aws:lambda:us-east-1:123456789101:function:APITest" +AVAILABLE_SERVICES = 6 + + +class TestAwsLogsIntegration: + + @pytest.fixture(autouse=True) # TODO , scope="class" + def aws_integration(self, dog): + """Prepare AWS Integration.""" + dog.AwsIntegration.create( + account_id=TEST_ACCOUNT_ID, + role_name=TEST_ROLE_NAME + ) + yield + dog.AwsIntegration.delete(account_id=TEST_ACCOUNT_ID, role_name=TEST_ROLE_NAME) + + def test_list_log_services(self, dog): + output = dog.AwsLogsIntegration.list_log_services() + assert len(output) >= AVAILABLE_SERVICES + + def test_aws_logs_crud(self, dog): + add_lambda_arn_output = dog.AwsLogsIntegration.add_log_lambda_arn( + account_id=TEST_ACCOUNT_ID, + lambda_arn=TEST_LAMBDA_ARN + ) + assert add_lambda_arn_output == {} + save_services_output = dog.AwsLogsIntegration.save_services( + account_id=TEST_ACCOUNT_ID, + services=["s3", "elb", "elbv2", "cloudfront", "redshift", "lambda"] + ) + assert save_services_output == {} + list_output = dog.AwsLogsIntegration.list() + expected_fields = [ + 'services', + 'lambdas', + 'account_id' + ] + assert all(k in list_output[0].keys() for k in expected_fields) + delete_output = dog.AwsLogsIntegration.delete_config( + account_id=TEST_ACCOUNT_ID, + lambda_arn=TEST_LAMBDA_ARN + ) + assert delete_output == {} + + def test_check_lambda(self, dog): + output = dog.AwsLogsIntegration.check_lambda( + account_id=TEST_ACCOUNT_ID, + lambda_arn=TEST_LAMBDA_ARN + ) + assert 'status' in output.keys() + + def test_check_services(self, dog): + output = dog.AwsLogsIntegration.check_services( + account_id=TEST_ACCOUNT_ID, + services=["s3", "elb", "elbv2", "cloudfront", "redshift", "lambda"] + ) + assert 'status' in output.keys() diff --git a/tests/integration/api/test_azure_integration.py b/tests/integration/api/test_azure_integration.py new file mode 100644 index 000000000..1f7c14b10 --- /dev/null +++ b/tests/integration/api/test_azure_integration.py @@ -0,0 +1,83 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc + +import pytest + + +class TestAzureIntegration: + + test_tenant_name = "testc44-1234-5678-9101-cc00736ftest" + test_client_id = "testc7f6-1234-5678-9101-3fcbf464test" + test_client_secret = "testingx./Sw*g/Y33t..R1cH+hScMDt" + test_new_tenant_name = "1234abcd-1234-5678-9101-abcd1234abcd" + test_new_client_id = "abcd1234-5678-1234-5678-1234abcd5678" + not_yet_installed_error = 'Azure Integration not yet installed.' + + @pytest.fixture(autouse=True) # TODO , scope="class" + def azure_integration(self, dog): + """Prepare Azure Integration.""" + yield + # Should be deleted as part of the test + # but cleanup here if test fails + dog.AzureIntegration.delete( + tenant_name=self.test_new_tenant_name, + client_id=self.test_new_client_id + ) + + def test_azure_crud(self, dog): + # Test Create + create_output = dog.AzureIntegration.create( + tenant_name=self.test_tenant_name, + host_filters="api:test", + client_id=self.test_client_id, + client_secret=self.test_client_secret + ) + assert create_output == {} + # Test List + list_tests_pass = False + for i in dog.AzureIntegration.list(): + if (i['tenant_name'] == self.test_tenant_name and + i['host_filters'] == 'api:test'): + list_tests_pass = True + assert list_tests_pass + # Test Update Host Filters + dog.AzureIntegration.update_host_filters( + tenant_name=self.test_tenant_name, + host_filters='api:test2', + client_id=self.test_client_id + ) + update_host_filters_tests_pass = False + for i in dog.AzureIntegration.list(): + if i['host_filters'] == 'api:test2': + update_host_filters_tests_pass = True + assert update_host_filters_tests_pass + # Test Update + dog.AzureIntegration.update( + tenant_name=self.test_tenant_name, + new_tenant_name=self.test_new_tenant_name, + host_filters="api:test3", + client_id=self.test_client_id, + new_client_id=self.test_new_client_id, + client_secret=self.test_client_secret + ) + update_tests_pass = False + for i in dog.AzureIntegration.list(): + if (i['tenant_name'] == self.test_new_tenant_name and + i['host_filters'] == 'api:test3'): + update_tests_pass = True + assert update_tests_pass + # Test Delete + dog.AzureIntegration.delete( + tenant_name=self.test_new_tenant_name, + client_id=self.test_new_client_id + ) + delete_tests_pass = True + list_output = dog.AzureIntegration.list() + if type(list_output) == list: + for i in dog.AzureIntegration.list(): + if i['tenant_name'] == self.test_new_tenant_name: + delete_tests_pass = False + elif self.not_yet_installed_error in list_output['errors'][0]: + pass + assert delete_tests_pass diff --git a/tests/integration/api/test_gcp_integration.py b/tests/integration/api/test_gcp_integration.py new file mode 100644 index 000000000..570d6e6c3 --- /dev/null +++ b/tests/integration/api/test_gcp_integration.py @@ -0,0 +1,67 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc + +import pytest + + +class TestGcpIntegration: + + test_project_id = "datadog-apitest" + test_client_email = "api-dev@datadog-sandbox.iam.gserviceaccount.com" + + @pytest.fixture(autouse=True) # TODO , scope="class" + def gcp_integration(self, dog): + """Prepare GCP Integration.""" + yield + # Should be deleted as part of the test + # but cleanup here if test fails + dog.GcpIntegration.delete( + project_id=self.test_project_id, + client_email=self.test_client_email + ) + + def test_gcp_crud(self, dog): + # Test Create + create_output = dog.GcpIntegration.create( + type="service_account", + project_id=self.test_project_id, + private_key_id="fake_private_key_id", + private_key="fake_key", + client_email=self.test_client_email, + client_id="123456712345671234567", + auth_uri="fake_uri", + token_uri="fake_uri", + auth_provider_x509_cert_url="fake_url", + client_x509_cert_url="fake_url", + host_filters="api:test" + ) + assert create_output == {} + # Test Update + dog.GcpIntegration.update( + project_id=self.test_project_id, + client_email=self.test_client_email, + host_filters="api:test2", + automute=True + ) + update_tests_pass = False + for i in dog.GcpIntegration.list(): + if (i['project_id'] == self.test_project_id and + i['host_filters'] == 'api:test2' and + i['automute'] is True): + update_tests_pass = True + assert update_tests_pass + # Test List + list_tests_pass = False + for i in dog.GcpIntegration.list(): + if (i['project_id'] == self.test_project_id and + i['host_filters'] == 'api:test2' and + i['automute'] is True): + list_tests_pass = True + assert list_tests_pass + # Test Delete + delete_output = dog.GcpIntegration.delete( + project_id=self.test_project_id, + client_email=self.test_client_email + ) + assert delete_output == {} diff --git a/tests/integration/api/test_synthetics.py b/tests/integration/api/test_synthetics.py new file mode 100644 index 000000000..36d94ddaa --- /dev/null +++ b/tests/integration/api/test_synthetics.py @@ -0,0 +1,146 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc + +import os + +import pytest + + +class TestSynthetics: + + @pytest.fixture(autouse=True) # TODO , scope="class" + def synthetics(self, dog): + # config and options for an API test + self.options = {"tick_every": 300} + self.config = { + "assertions": [{"operator": "is", "type": "statusCode", "target": 200}], + "request": {"method": "GET", "url": "https://example.com", "timeout": 30}, + } + + # config and option for a Browser test + self.options_browser = {"device_ids": ["laptop_large"], "tick_every": 900} + + # create an API test + self.output = dog.Synthetics.create_test( + config=self.config, + locations=["aws:us-east-2"], + message="Test API", + options=self.options, + tags=["test:synthetics_api"], + type="api", + name="Test with API", + ) + + # create a Browser test + self.output_browser = dog.Synthetics.create_test( + config=self.config, + locations=["aws:us-east-2"], + message="Test Browser", + options=self.options_browser, + tags=["test:synthetics_browser"], + type="browser", + name="Test with Browser", + ) + + self.public_test_id = self.output["public_id"] + self.public_test_id_browser = self.output_browser["public_id"] + + yield + + # delete all tests present in the account if any + self.output_cleanup = dog.Synthetics.get_all_tests() + self.public_ids_test_to_delete = [] + for test in self.output_cleanup["tests"]: + self.public_ids_test_to_delete.append(test["public_id"]) + dog.Synthetics.delete_test(public_ids=self.public_ids_test_to_delete) + + def test_get_update_pause_test(self, dog): + # test that both tests are live + assert len(self.output) > 1 + assert "public_id" in self.output + assert self.output["status"] == "live" + assert len(self.output_browser) > 1 + assert "public_id" in self.output_browser + assert self.output_browser["status"] == "paused" + + # get this newly created tests + output_api = dog.Synthetics.get_test(id=self.public_test_id) + assert "public_id" in output_api + assert output_api["status"] == "live" + output_browser = dog.Synthetics.get_test(id=self.public_test_id_browser) + assert "public_id" in output_browser + assert output_browser["status"] == "paused" + + # test that we can retrieve results_ids + output_api = dog.Synthetics.get_results(id=self.public_test_id) + assert output_api["results"] is not None + output_browser = dog.Synthetics.get_results(id=self.public_test_id_browser) + assert output_browser["results"] is not None + + # edit the API test + self.options = {"tick_every": 60} + self.config["assertions"] = [ + {"operator": "isNot", "type": "statusCode", "target": 404} + ] + + output = dog.Synthetics.edit_test( + id=self.public_test_id, + config=self.config, + type="api", + locations=["aws:us-west-2"], + message="Test API edited", + name="Test with API edited", + options=self.options, + tags=["test:edited"], + ) + assert "error" not in output + # test that the new name matches + assert output["name"] == "Test with API edited" + + # edit the Browser test + self.config["assertions"] = [ + {"operator": "isNot", "type": "statusCode", "target": 404} + ] + self.options_browser = {"device_ids": ["tablet"], "tick_every": 1800} + + output = dog.Synthetics.edit_test( + id=self.public_test_id, + config=self.config, + type="api", + locations=["aws:us-west-2"], + message="Test Browser edited", + name="Test Browser edited", + options=self.options_browser, + tags=["test:edited"], + ) + assert "error" not in output + # test that the new name matches + assert output["name"] == "Test Browser edited" + + # pause the API test + output = dog.Synthetics.start_or_pause_test(id=self.public_test_id, new_status="paused") + # output is a boolean + assert output == True + + def test_get_all_tests(self, dog): + output = dog.Synthetics.get_all_tests() + # 2 tests were created + assert len(output["tests"]) >= 2 + + def test_get_locations(self, dog): + output = dog.Synthetics.get_locations() + assert len(output) == 1 + # 13 regions + assert len(output["locations"]) >= 10 + + def test_get_devices(self, dog): + output = dog.Synthetics.get_devices() + assert len(output) == 1 + # 3 devices + assert len(output["devices"]) >= 3 + + def test_delete_test(self, dog): + # delete the test + output = dog.Synthetics.delete_test(public_ids=[self.public_test_id]) + assert output["deleted_tests"] is not None diff --git a/tests/integration/cassettes/test_freezer.frozen b/tests/integration/cassettes/test_freezer.frozen new file mode 100644 index 000000000..5761f086c --- /dev/null +++ b/tests/integration/cassettes/test_freezer.frozen @@ -0,0 +1 @@ +2020-02-03T17:16:35.424545+01:00 \ No newline at end of file diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py new file mode 100644 index 000000000..9198fd654 --- /dev/null +++ b/tests/integration/conftest.py @@ -0,0 +1,162 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +"""Record HTTP requests to avoid hiting Datadog API from CI.""" + +import logging +import os +import time +from datetime import datetime + +import pytest +from vcr import VCR +from dateutil import parser + +from tests.integration.api.constants import API_KEY, APP_KEY, API_HOST, TEST_USER + +WAIT_TIME = 10 +FAKE_PROXY = {"https": "http://user:pass@10.10.1.10:3128/"} + +logging.basicConfig() +vcr_log = logging.getLogger("vcr") +vcr_log.setLevel(logging.INFO) + + +@pytest.fixture(scope="module") +def api(): + """Initialize Datadog API client.""" + from datadog import api, initialize + from datadog.api.api_client import APIClient + APIClient._sort_keys = True + initialize(api_key=API_KEY, app_key=APP_KEY, api_host=API_HOST) + + http_client = APIClient._get_http_client() + try: + assert http_client._session is None + http_client.request(None, None, None, None, None, None, None, None, max_retries=10) + except Exception as e: + assert http_client._session is not None + + return api + + +@pytest.fixture(scope='module') +def vcr_config(): + return dict( + filter_headers=('DD-API-KEY', 'DD-APPLICATION-KEY'), + filter_query_parameters=('api_key', 'application_key'), + ) + + +@pytest.fixture +def freezer(vcr_cassette_name, vcr_cassette, vcr): + from freezegun import freeze_time + + if vcr_cassette.record_mode == "all": + tzinfo = datetime.now().astimezone().tzinfo + freeze_at = datetime.now().replace(tzinfo=tzinfo).isoformat() + with open( + os.path.join( + vcr.cassette_library_dir, vcr_cassette_name + ".frozen" + ), + "w", + ) as f: + f.write(freeze_at) + else: + with open( + os.path.join( + vcr.cassette_library_dir, vcr_cassette_name + ".frozen" + ), + "r", + ) as f: + freeze_at = f.readline().strip() + + freeze_at = parser.isoparse(freeze_at) + + # dt = parser.isoparse(freeze_at) + # tz_offset = dt.tzinfo.utcoffset(dt).seconds / 60 + # os.environ['TZ'] = "UTC%+03d:%02d" % ( + # int( tz_offset / 60), tz_offset % 60 + # ) + # time.tzset() + + return freeze_time(freeze_at) + + +@pytest.fixture +def user_handle(vcr_cassette_name, vcr_cassette, vcr): + if vcr_cassette.record_mode == "all": + assert TEST_USER is not None, "You must set DD_TEST_CLIENT_USER environment variable to run comment tests" + handle = TEST_USER + with open( + os.path.join( + vcr.cassette_library_dir, vcr_cassette_name + ".handle" + ), + "w", + ) as f: + f.write(handle) + else: + with open( + os.path.join( + vcr.cassette_library_dir, vcr_cassette_name + ".handle" + ), + "r", + ) as f: + handle = f.readline().strip() + + return handle + + +@pytest.fixture +def dog(api, vcr_cassette): + """Record communication with Datadog API.""" + from datadog.util.compat import is_p3k + if not is_p3k() and vcr_cassette.record_mode != "all": + pytest.skip("Can not replay responses on Python 2") + + old_host_name = api._host_name + api._host_name = "test.host" + + yield api + + api._host_name = old_host_name + + +@pytest.fixture +def get_with_retry(vcr_cassette, dog): + """Return a retry factory that correctly handles the request recording.""" + + def retry( + resource_type, + resource_id=None, + operation="get", + retry_limit=10, + retry_condition=lambda r: r.get("errors"), + **kwargs + ): + number_of_interactions = len(vcr_cassette.data) if vcr_cassette.record_mode == "all" else -1 + + if resource_id is None: + resource = getattr(getattr(dog, resource_type), operation)(**kwargs) + else: + resource = getattr(getattr(dog, resource_type), operation)(resource_id, **kwargs) + retry_counter = 0 + while retry_condition(resource) and retry_counter < retry_limit: + time.sleep(WAIT_TIME) + + if vcr_cassette.record_mode == "all": + # remove failed interactions + vcr_cassette.data = vcr_cassette.data[:number_of_interactions] + + if resource_id is None: + resource = getattr(getattr(dog, resource_type), operation)(**kwargs) + else: + resource = getattr(getattr(dog, resource_type), operation)(resource_id, **kwargs) + retry_counter += 1 + + if retry_condition(resource): + raise Exception( + "Retry limit reached performing `{}` on resource {}, ID {}".format(operation, resource_type, resource_id) + ) + return resource + return retry diff --git a/tests/integration/dogshell/__init__.py b/tests/integration/dogshell/__init__.py new file mode 100644 index 000000000..b3017a1db --- /dev/null +++ b/tests/integration/dogshell/__init__.py @@ -0,0 +1,3 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc diff --git a/tests/integration/dogshell/cassettes/TestDogshell.test_comment.handle b/tests/integration/dogshell/cassettes/TestDogshell.test_comment.handle new file mode 100644 index 000000000..ad48e494d --- /dev/null +++ b/tests/integration/dogshell/cassettes/TestDogshell.test_comment.handle @@ -0,0 +1 @@ +frog@datadoghq.com \ No newline at end of file diff --git a/tests/integration/dogshell/cassettes/TestDogshell.test_comment.yaml b/tests/integration/dogshell/cassettes/TestDogshell.test_comment.yaml new file mode 100644 index 000000000..bc7a225f6 --- /dev/null +++ b/tests/integration/dogshell/cassettes/TestDogshell.test_comment.yaml @@ -0,0 +1,185 @@ +interactions: +- request: + body: '{"handle": "frog@datadoghq.com", "message": "yo dudes"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '55' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/comments + response: + body: + string: !!binary | + H4sIAAAAAAAAA22MQQqDMBREr1L+uhiNjZJAaa8S/L8xYEybGEHEuzehXboZmBne22HwztG8gNoh + hQkUMFpz/+XD4l20tew5b0TbdzfRyVpyuEKg6FMYqAD6bdnasL8psnPCUYzaFGDzF0xIMWtGPeNU + tlfw5ol60ejN+KmyK78WQZ3IjuMLnFp9lbcAAAA= + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:22:49 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:22:48 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - NclXS5F5t+kukUaODU4jY2oSI1KBdPHFdFhJZNfbXLWDOThxbCLlKKmYvikjdDSg + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/events/5309722153764569092 + response: + body: + string: !!binary | + H4sIAAAAAAAAA21QzYqDMBB+lWXOUn9atQqlfY+lyGBmNRCTdJLISum7N1lhT73M4ftnnkAraQ/9 + EwR6Gma0ljQJ6Mv6XLSnpmvOGaAi9oPfLEEPwREPwSY5ZDCjFirBP2ymWwRRmGl+HEazRNZLn0gd + lMogsIq6/K9wv1cpLvWx6NqqKutj25zqpiu6KhnpN46CzXyJIMglBCcH/fc9A0GrHGnQuPxHW5aG + pd+iRRteUKVlxsWIvZrJmcBj2pmjlfla7gNc/rlexgd8YF6vN4+NKWgwAQAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb_events + Date: + - Mon, 03 Feb 2020 16:22:59 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:22:59 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - 5aYUrRuOc3jD6khpgKSXltN7tduC6zb0gquWpoD5D3WExRBRkTdTrjgZqvI7Huqy + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"handle": "frog@datadoghq.com", "message": "nothing much"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '59' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: PUT + uri: https://api.datadoghq.com/api/v1/comments/5309722153764569092 + response: + body: + string: !!binary | + H4sIAAAAAAAAA22MTQrDIBhEr1K+damJqQkKpb2KqFUhauNPNiF3j5IusxmYGd7bQATnlM/ANihx + BgZIrbWf+bbyRYaOThj3ZJjGJxlpRzHcIaoUShSqAfxn0dqjvymha8KplLhugA/ZWK9vrghTVYZ7 + Obf9G4P+SJ65DNosj+qrr5XALoT7fgCNLd01uwAAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:23:00 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:22:59 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - ucJMu0SEwqvJ36fqkYRsP+glKObktTtdBf6X17lKXJ4+xOn7nFKnx11beu1ycofn + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/dogshell/cassettes/TestDogshell.test_downtime_cancel_by_scope.yaml b/tests/integration/dogshell/cassettes/TestDogshell.test_downtime_cancel_by_scope.yaml new file mode 100644 index 000000000..b5f123cd1 --- /dev/null +++ b/tests/integration/dogshell/cassettes/TestDogshell.test_downtime_cancel_by_scope.yaml @@ -0,0 +1,184 @@ +interactions: +- request: + body: '{"end": null, "message": null, "scope": "env:staging", "start": "1580747115"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '77' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/downtime + response: + body: + string: !!binary | + H4sIAAAAAAAAA2WPsW7DMAxE/4VjoaGyk0r12l9opyAwGJkVBNiSIdEJ2qL/HjJBs3TjEce7xx+o + FLZaKQeCIW/zbIDy9DcuJScudWSMDYYDPMHRQEAxz/TPlB6bUuOoqu+st72BKTU83S4+cW5koDFW + hsHu/bPbOWv3kloJtUrvbO9evbUGVhQyvu3ubJwW+i5ZWOHj/Q0MYOB0Fsl109xQVhEH+eE8SElM + OSryQq1hfHw4lUvWpJG/1N4Z0AbXeet2L15gtnVCpjuMFv9eAac0wrQoAQAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json; charset=utf-8 + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:25:15 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:25:15 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - UH1aMdrlnlnaxy/K+HUi5QUN2T0FBtGPSUC8sLrviqCK1XXfgHsSO5DneAd5J+6F + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"scope": "env:staging"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '24' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/downtime/cancel/by_scope + response: + body: + string: !!binary | + H4sIAAAAAAAAA6tWSk7MS07NyUlNic9MKVayijY3sjA0NzGzMNWBsYxiawG2gHfHJwAAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json; charset=utf-8 + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:25:16 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:25:16 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - xDB9TwFteerR1wCiwj8/TgXRHM8VsESQxiCQvltAxyn4fse47E64CquSvdpyvFXM + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/downtime/728174685 + response: + body: + string: !!binary | + H4sIAAAAAAAAA1WPTU7DMBCF7zJL5EUnaWuTLVeAVYWiwRksS4kd2ZOigrg746JWsPOz3s83X1DY + b6Vw8gxD2ubZAKfp9lxyipLLKBQqDCd4gFcDntQ8s5rw4HZ2bxGPBm7WeA/nEsam+g4d9gamWOnt + mpOysYEqVORPyUGrC1PbazHs7aNDNLCS4sn17xdQ4sKfOSkwvDw/gQHyEs8q32murdjnVdVJLzkP + uhJiCg184Vop3O+c8kdqVaNcmr0z0CZs59Duj05ptnUi4X803z976OAqMQEAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json; charset=utf-8 + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:25:16 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:25:16 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - +UwwYRc+A5vkEib2s1YY/+OMx26FxXkDPMnhrpaIz/kTVseyL62lC12FdLJrU3nv + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/dogshell/cassettes/TestDogshell.test_downtime_schedule.frozen b/tests/integration/dogshell/cassettes/TestDogshell.test_downtime_schedule.frozen new file mode 100644 index 000000000..4c0979373 --- /dev/null +++ b/tests/integration/dogshell/cassettes/TestDogshell.test_downtime_schedule.frozen @@ -0,0 +1 @@ +2020-02-03T17:47:17.193510+01:00 \ No newline at end of file diff --git a/tests/integration/dogshell/cassettes/TestDogshell.test_downtime_schedule.yaml b/tests/integration/dogshell/cassettes/TestDogshell.test_downtime_schedule.yaml new file mode 100644 index 000000000..3af12135a --- /dev/null +++ b/tests/integration/dogshell/cassettes/TestDogshell.test_downtime_schedule.yaml @@ -0,0 +1,300 @@ +interactions: +- request: + body: '{"end": null, "message": null, "scope": "env:staging", "start": "1580748437"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '77' + Content-Type: + - application/json + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/downtime + response: + body: + string: !!binary | + H4sIAAAAAAAAA2WPsW7DMAxE/4VjoCGyE1v22l9IpyAwGJkVBNiSIdEukqD/XjFBs3TjEce7xwck + smtKFCxBH9ZpUkBh/BvnGDzHNDC6DP0ZdnBRYLGYJ/pn8u9NTG4QVVfa6FrB6DNenxdfOGVSkBkT + Q6+PZt8ezKFuS2oilCq503XbGa0VLFjI+Ll7sbGf6R5DYYXP0wcoQMt+K5LTKrk2LkWcyw9bX0qc + D06QZ8oZ3fvDMX4HSRr4JvZKgTS0ldGdbrpGwbqMyPSCkeKfX3FoOcgoAQAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json; charset=utf-8 + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:47:17 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:47:17 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - 3OCRM/4FZbkllI4iloi1acHDABD1SJi2aj2fysEPLLsOVOk5Ki6mi6IOsVG7JIay + X-DD-VERSION: + - '35.2135180' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/downtime/728191696 + response: + body: + string: !!binary | + H4sIAAAAAAAAA2WPsW7DMAxE/4VjoCGyE1v22l9IpyAwGJkVBNiSIdEukqD/XjFBs3TjEce7xwck + smtKFCxBH9ZpUkBh/BvnGDzHNDC6DP0ZdnBRYLGYJ/pn8u9NTG4QVVfa6FrB6DNenxdfOGVSkBkT + Q6+PZt8ezKFuS2oilCq503XbGa0VLFjI+Ll7sbGf6R5DYYXP0wcoQMt+K5LTKrk2LkWcyw9bX0qc + D06QZ8oZ3fvDMX4HSRr4JvZKgTS0ldGdbrpGwbqMyPSCkeKfX3FoOcgoAQAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json; charset=utf-8 + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:47:18 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:47:18 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - RbevWUvO2oQYYDnX/G1lndTh/kTt+ebFIvajU6/3Ivb5c6aUQf49/uD1ICaXyx52 + X-DD-VERSION: + - '35.2135180' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"end": "1580808437", "message": "Doing some testing on staging.", "scope": + "env:staging", "start": null}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '105' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: PUT + uri: https://api.datadoghq.com/api/v1/downtime/728191696 + response: + body: + string: !!binary | + H4sIAAAAAAAAA1WP3UrEMBCFX6XMpQQx7br9uV0fQa8WKWM6hkCblGS6ouK7O7NVQXJ1hnPOd/IJ + mdyWM0VHMMRtng1QnGCw992dvEPTGlhSDJzyyOgLDGe4gWcDDiUyk1j31K8p/F1S9qOqpradbQxM + oeDLNfGKcyEDhTHzjmoPO8plQkVpzjZt31lrYEXZx9fbzuKw0EeKshieHk9gAB2Hi0jOm/a6tIo4 + y08ug0B8iF4nL1QKek09JDlVJS1UMRVWkWL1Y72Vwim9RaWM/K5VtQGlt3Vne3vsjwa2dUKmf0O/ + vgE4k/bYTQEAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json; charset=utf-8 + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:47:18 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:47:18 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - hABsPq9DIvV7yAEiU7rMxs7UCRuTbRH/kYpwue4a0q9qmwd4SUh9bBZ5SHPkBLc6 + X-DD-VERSION: + - '35.2135180' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '0' + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/downtime/728191696 + response: + body: + string: '' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '0' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - text/html; charset=utf-8 + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:47:18 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:47:18 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - GAK1J4mJd/EBZfEK4rqUw9OeB9GOeKgSyrXGtzNUi5zrv5sHYU56xJgA4bcbtgUA + X-DD-VERSION: + - '35.2135180' + X-Frame-Options: + - SAMEORIGIN + status: + code: 204 + message: No Content +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/downtime/728191696 + response: + body: + string: !!binary | + H4sIAAAAAAAAA1WPwWrDMBBEf8XssYhS2Wks+5p+QnMKxajyVghsyUjrhLb037trB0rRaZaZN6Nv + yOjWnDE6hD6u06QA4wi9fjZP/A5Nq2BOMVDKA1lfoL/AA7wpcJYjE96t7YGt5s8a+L7TUvaDqKbW + RjcKxlDs+5ajvKKCQjbT3rdBuM9ltNInMd20ndFawWJ5JG23HUxhxq8UeTacX0+gwDoKV5YfdioC + dmlhdeH/XHtu8SF6GT5jKdZL7CXxqSppxoqwkIgUq7v1kYljukWpGehTULUCqW9rozt97I4K1mW0 + hP+W/vwCL1jyW1MBAAA= + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json; charset=utf-8 + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:47:18 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:47:18 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - 1/ye/L7/S9djtmh0CDbapYOAoYP2Xz5NE904aTai4cgQw/Kmmv343hpHqBIP3PC5 + X-DD-VERSION: + - '35.2135180' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/dogshell/cassettes/TestDogshell.test_event.yaml b/tests/integration/dogshell/cassettes/TestDogshell.test_event.yaml new file mode 100644 index 000000000..362d60172 --- /dev/null +++ b/tests/integration/dogshell/cassettes/TestDogshell.test_event.yaml @@ -0,0 +1,230 @@ +interactions: +- request: + body: '{"aggregation_key": null, "alert_type": null, "date_happened": null, "device": + null, "handle": null, "host": "test.host", "priority": "normal", "related_event_id": + null, "source_type_name": null, "tags": ["tag:a", "tag:b"], "text": "%%%\n*Cool!*\n%%%\n", + "title": "Testing events from dogshell"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '295' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/events + response: + body: + string: '{"status":"ok","event":{"id":5309722366481886321,"title":"Testing events + from dogshell","text":"%%%\n*Cool!*\n%%%\n","date_happened":1580746981,"handle":null,"priority":"normal","related_event_id":null,"tags":["tag:a","tag:b"],"url":"https://app.datadoghq.com/event/event?id=5309722366481886321"}}' + headers: + Connection: + - keep-alive + Content-Length: + - '297' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - propjoe + Date: + - Mon, 03 Feb 2020 16:23:01 GMT + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - SAMEORIGIN + status: + code: 202 + message: Accepted +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/events/5309722366481886321 + response: + body: + string: !!binary | + H4sIAAAAAAAAA3WQwYrDIBCGXyU7kEuRTU3a1AplD/sKvW2X4NZpIhgVNaGl9N1Xk+vuxXHGn28+ + fALOaCLwJ0gRsRuEc2hQAqd7tj3s2iOjBIRGH7v4cAgclLlZIOAx2Mlf86QSTlUzrRZUqPbN9nio + 66Ztd4wy1jY1Tfmoos7hM4aoTF+s4eLm7VhI24cBtU6xyetMXF7X80PJ0z9IvCdzKMvyYjaf1uq3 + zcUsXd4n+gD8K1cu1p7/wDcBibO6YmfEmHTMpDUB55X1Kj4SzFg/iiwy2JDhMem+L3cCKn3LHyav + 1y+qITycRgEAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb_events + Date: + - Mon, 03 Feb 2020 16:23:32 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:23:32 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - 8dI8aUSv3Fzq9LYH3wEHxmDECF/kuDIyQi68LDtl+z4qh8Ctb4ijNpflOPW/2exL + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/events/5309722366481886321 + response: + body: + string: !!binary | + H4sIAAAAAAAAA3WQwYrDIBCGXyU7kEuRTU3a1AplD/sKvW2X4NZpIhgVNaGl9N1Xk+vuxXHGn28+ + fALOaCLwJ0gRsRuEc2hQAqd7tj3s2iOjBIRGH7v4cAgclLlZIOAx2Mlf86QSTlUzrRZUqPbN9nio + 66Ztd4wy1jY1Tfmoos7hM4aoTF+s4eLm7VhI24cBtU6xyetMXF7X80PJ0z9IvCdzKMvyYjaf1uq3 + zcUsXd4n+gD8K1cu1p7/wDcBibO6YmfEmHTMpDUB55X1Kj4SzFg/iiwy2JDhMem+L3cCKn3LHyav + 1y+qITycRgEAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb_events + Date: + - Mon, 03 Feb 2020 16:23:32 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:23:32 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - 3HdHD6wWiRfiP8hQjUNTx9zkEc5LUHfck4mpCo8hdcK7VOwtwatSX4gx0lRj+FOI + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/events?end=1580746982&start=1580745181&tags=tag%3Aa%2Ctag%3Ab + response: + body: + string: !!binary | + H4sIAAAAAAAAA21QQW6DMBD8Cl2JS2SVAAkhlqqq6rm33poIubAYS8a2bIOKovy9NlQ95WJ7dmZ3 + Z3wDnFF5B/TrBh3z2AzMGFTYAc2P9f50qM51ToBJtL7xi0GgIFSvgYBwDePcIg9tQHsmHRLwwsuo + +UTnheLJNj7prR6TTnM3oJShd7IyiLKV3c5X0b0cy/35VBRlVR3qvK6rssiD1uOPD+I0TS9q9661 + fNpd1IoiyXg0H2/KNky/4Uqg1eP4lyygDmfRYqPYGMypSUoCxgpthV/CaKXtyKItpyfbRvsfS/Jm + jAulQbu43Yc8z+ubgMV/XcaMyOZ8i+CyxwFE+MwHzP16/wWgqTgpfwEAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb_events + Date: + - Mon, 03 Feb 2020 16:23:33 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:23:32 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - HuDVQmribOZdwaj1ZjjmjW+fehYQ6D30fA0nzlwp9ktD8umEDGksNeEpk76WUpJ4 + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/dogshell/cassettes/TestDogshell.test_host_muting.frozen b/tests/integration/dogshell/cassettes/TestDogshell.test_host_muting.frozen new file mode 100644 index 000000000..679a74231 --- /dev/null +++ b/tests/integration/dogshell/cassettes/TestDogshell.test_host_muting.frozen @@ -0,0 +1 @@ +2020-02-03T17:24:51.056061+01:00 \ No newline at end of file diff --git a/tests/integration/dogshell/cassettes/TestDogshell.test_host_muting.seed b/tests/integration/dogshell/cassettes/TestDogshell.test_host_muting.seed new file mode 100644 index 000000000..f946b5683 --- /dev/null +++ b/tests/integration/dogshell/cassettes/TestDogshell.test_host_muting.seed @@ -0,0 +1 @@ +80355 \ No newline at end of file diff --git a/tests/integration/dogshell/cassettes/TestDogshell.test_host_muting.yaml b/tests/integration/dogshell/cassettes/TestDogshell.test_host_muting.yaml new file mode 100644 index 000000000..ecb853428 --- /dev/null +++ b/tests/integration/dogshell/cassettes/TestDogshell.test_host_muting.yaml @@ -0,0 +1,337 @@ +interactions: +- request: + body: '{"series": [{"device": null, "host": "my.test.host390d2fee9574d984e77ead0d51836837", + "metric": "metric", "points": [[1580747091.0976782, 1.0]], "tags": null, "type": + null}]}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '173' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/series + response: + body: + string: '{"status": "ok"}' + headers: + Connection: + - keep-alive + Content-Length: + - '16' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - text/json + DD-POOL: + - propjoe + Date: + - Mon, 03 Feb 2020 16:24:51 GMT + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - SAMEORIGIN + status: + code: 202 + message: Accepted +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/tags/hosts/my.test.host390d2fee9574d984e77ead0d51836837 + response: + body: + string: '{"tags":[]}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '11' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:25:12 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:25:12 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - ADT0ms9dQnbDHbbduv4c09ChngZrYY7A/Pgms/qacMOruS4mPwZ1GJWq74I7G11W + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"end": "1580750691", "message": "Muting this host for a test.", "override": + false}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '83' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/host/my.test.host390d2fee9574d984e77ead0d51836837/mute + response: + body: + string: !!binary | + H4sIAAAAAAAAAx3NQQrDIBCF4avIrEPQRDOaO/QMRTqTxIUKdUIppXev6fbx870PxIekWmCF2ylM + MADVV5GU+Z4IVpy8Qbu4MMBRm5SYuaf5PQo3Ga9pDpqmjTk4tBS8ZUSOpMkZPy9+xi5y6ZJxXqPT + SzADZG4t7pfUT1PZlRypqUtTW32qqP46fH93VMjHngAAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:25:13 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:25:12 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - Wn01ZjXucAfzJfwvKAkpy0yFfNtHyWu4ZB2aA4ZDwwhXkyLHirYeUNsx208dZz9p + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"end": "1580751591", "message": null, "override": false}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '57' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/host/my.test.host390d2fee9574d984e77ead0d51836837/mute + response: + body: + string: '{"errors": ["host:my.test.host390d2fee9574d984e77ead0d51836837 is already + muted. To mute this host with a different end timestamp, add + ?override=true to your request."]}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '197' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:25:13 GMT + Pragma: + - no-cache + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 400 + message: Bad Request +- request: + body: '{"end": "1580751591", "message": null, "override": true}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '56' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/host/my.test.host390d2fee9574d984e77ead0d51836837/mute + response: + body: + string: !!binary | + H4sIAAAAAAAAAxXMMQ7DIAxA0bt4jiIIGJvcoWeoUOyqDMAQV1VV9e4l69fX+0I5rI4OO9xepgIL + yHh3q03vVWCnjT3FlPICz3FaL03n2j6r6WnrlUJ2sj1UM1KUzFGJtIgT9BwSB5qi9il5ZEfoMfvf + H/NSxhF1AAAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:25:14 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:25:13 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - FGm8mbL/ixNS/zyX94m5xaWAxszhu9w68KL0QwTbLNqYgp2ZyX2W4rsoYLDoadr+ + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '2' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/host/my.test.host390d2fee9574d984e77ead0d51836837/unmute + response: + body: + string: !!binary | + H4sIAAAAAAAAAxXKOw6AIAwA0Lt0NgblU+AezobYGhmAwRpjjHdX15d3Q1oktwoRploOYYIOqJ1V + cuE5E0Qc/YDGudDB1napqfCXy9UL79L/pIOicWUOFg0FbxiREymyg9fOa4TnBQICZ+dmAAAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:25:14 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:25:14 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - fFk0sZgwwse+ZeEmqVGZPgcNG+SDXdM7Y74n6iOGuvoZenvaYEqZOvpOSMu1XDXx + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/dogshell/cassettes/TestDogshell.test_metrics.frozen b/tests/integration/dogshell/cassettes/TestDogshell.test_metrics.frozen new file mode 100644 index 000000000..260bb5d6d --- /dev/null +++ b/tests/integration/dogshell/cassettes/TestDogshell.test_metrics.frozen @@ -0,0 +1 @@ +2020-02-03T17:23:33.130142+01:00 \ No newline at end of file diff --git a/tests/integration/dogshell/cassettes/TestDogshell.test_metrics.seed b/tests/integration/dogshell/cassettes/TestDogshell.test_metrics.seed new file mode 100644 index 000000000..aef108cb6 --- /dev/null +++ b/tests/integration/dogshell/cassettes/TestDogshell.test_metrics.seed @@ -0,0 +1 @@ +89424 \ No newline at end of file diff --git a/tests/integration/dogshell/cassettes/TestDogshell.test_metrics.yaml b/tests/integration/dogshell/cassettes/TestDogshell.test_metrics.yaml new file mode 100644 index 000000000..d53d7d4e7 --- /dev/null +++ b/tests/integration/dogshell/cassettes/TestDogshell.test_metrics.yaml @@ -0,0 +1,324 @@ +interactions: +- request: + body: '{"series": [{"device": null, "host": "test.host.dogshell5088c10ad5aa256a67363a256ff98cfe5", + "metric": "test.dogshell.test_metric_088c10ad5aa256a67363a256ff98cfe5", "points": + [[1580747013.1737971, 1.0]], "tags": null, "type": null}]}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '232' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/series + response: + body: + string: '{"status": "ok"}' + headers: + Connection: + - keep-alive + Content-Length: + - '16' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - text/json + DD-POOL: + - propjoe + Date: + - Mon, 03 Feb 2020 16:23:33 GMT + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - SAMEORIGIN + status: + code: 202 + message: Accepted +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/search?q=088c10ad5aa256a67363a256ff98cfe5 + response: + body: + string: !!binary | + H4sIAAAAAAAAA6tWKkotLs0pKVayqlbKTS0pykwGMqOVSlKLS/RS8tOLM1JzcvRAvHiIbLyBhUWy + oUFiimliopGpWaKZubGZMYiVlmZpkZyWaqoUq6OUkV8MMhFqDIgHN8uUsP7aWgAmJ+txlgAAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb_query + Date: + - Mon, 03 Feb 2020 16:24:26 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:24:26 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - irP5mbVDZgtaHShJp3dTrMqrmece84T1g9qqxQFFRZW79/d/ivT4r3B9hVIAOkpA + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"tags": ["t0", "t1"]}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '22' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/tags/hosts/test.host.dogshell5088c10ad5aa256a67363a256ff98cfe5 + response: + body: + string: '{"host":"test.host.dogshell5088c10ad5aa256a67363a256ff98cfe5","tags":["t0","t1"]}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '81' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:24:26 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:24:26 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - hvGKayUGXeVy/DmHDcIjD3+gP6x9d+NwveU9CYPD06LgIrg7NUxobVuhZiOcmptK + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 201 + message: Created +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/tags/hosts/test.host.dogshell5088c10ad5aa256a67363a256ff98cfe5 + response: + body: + string: !!binary | + H4sIAAAAAAAAA6tWKklML1ayilYqMVDSUSoxVIqtBQB33G6xFAAAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:24:26 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:24:26 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - wNaVyRyNliLxKeX4pqFHOJTBG1dRCwo1/ihrnAf0GXtGNGahc1XK8Xzj/ssA3R20 + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"tags": ["t2", "t3"]}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '22' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: PUT + uri: https://api.datadoghq.com/api/v1/tags/hosts/test.host.dogshell5088c10ad5aa256a67363a256ff98cfe5 + response: + body: + string: '{"host":"test.host.dogshell5088c10ad5aa256a67363a256ff98cfe5","tags":["t2","t3"]}' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '81' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:24:27 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:24:27 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - FB5oGxuL9E/cplxahdQnU5Nw5E7KX0Smq18it9qYKIt8BXsSloE0IpDRA39tfQwn + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 201 + message: Created +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '0' + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/tags/hosts/test.host.dogshell5088c10ad5aa256a67363a256ff98cfe5 + response: + body: + string: '' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '4' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:24:27 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:24:27 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - rK52fEhHKxisnKo2rembr3YValr4L+BG6/8tD9iNDc8Prg2mnV6DynBkS75++wh/ + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 204 + message: No Content +version: 1 diff --git a/tests/integration/dogshell/cassettes/TestDogshell.test_monitors.yaml b/tests/integration/dogshell/cassettes/TestDogshell.test_monitors.yaml new file mode 100644 index 000000000..ece82f804 --- /dev/null +++ b/tests/integration/dogshell/cassettes/TestDogshell.test_monitors.yaml @@ -0,0 +1,1174 @@ +interactions: +- request: + body: '{"message": null, "name": null, "options": null, "priority": "5", "query": + "avg(last_1h):sum:system.net.bytes_rcvd{*} by {host} > 100", "tags": ["main", + "test"], "type": "metric alert"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '185' + Content-Type: + - application/json + User-Agent: + - datadogpy/0.45.1.dev (python 3.8.16; os darwin; arch arm64) + method: POST + uri: https://api.datadoghq.com/api/v1/monitor + response: + body: + string: !!binary | + H4sIAAAAAAAEA4xSwY7cIAy99ysQpzbNRpCZTHY5VD303FNvVYXY4EmQCMyCM6tolH+vmay0qtRK + 5QR+9vPzMzfuLFeyPfZdd5Ky5jGNuoQOrXyUh5rjegGu+AyY3MCMh4S85sHMJVpVec0IcxMAm+cV + Ies0XG1VsXiFxKpy2KvJdPvCpBCNKFBghlAzArNLcmFkOAFleJORyamqGmowQ86UQT3ogWbMXP3k + s3GhPCEj/1XzlwXSShnmOn4sxVpOn1ReZvVXUbdqY88ru00x48bucogrXtDFQOw3HiK686pD1Nag + 4epsfAYadQ+bxTp8D8KrLkTagjek4SBEzV0Y/GJB73IxLVSdnYcwABl62zYaa/HouNqxIYFBsNoQ + rzw9PgkpW9mLQvUG0XCtaA8Pon+Q4ofs1fGkur4R/VMn289CKCGKV9G6sys9/iebFAN15Sos3tc8 + kZm02aIjRQ/kxB6/JBeTQ5qtI5fKvrzXGUmwfu+3p/6BkojvkX0rDr6NEdPd3fuP2QsmE6wvuz2n + OH4tbts4Ti/NEGcqAlqz/xd4/63HY3eUp2378BsAAP//AwCzFjt6vgIAAA== + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Mon, 10 Jul 2023 17:46:57 GMT + Transfer-Encoding: + - chunked + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://logs.browser-intake-datadoghq.com/api/v2/logs?dd-api-key=pube4f163c23bbf91c16b8f57f56af9fc58&dd-evp-origin=content-security-policy&ddsource=csp-report&ddtags=site%3Adatadoghq.com + strict-transport-security: + - max-age=31536000; includeSubDomains; preload + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '500' + x-ratelimit-name: + - post_monitors + x-ratelimit-period: + - '10' + x-ratelimit-remaining: + - '497' + x-ratelimit-reset: + - '3' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - datadogpy/0.45.1.dev (python 3.8.16; os darwin; arch arm64) + method: GET + uri: https://api.datadoghq.com/api/v1/monitor/124755611 + response: + body: + string: !!binary | + H4sIAAAAAAAEA4xSwY7cIAy99ysQpzbNRpCZTHY5VD303FNvVYXY4EmQCMyCM6tolH+vmay0qtRK + 5QR+9vPzMzfuLFeyPfZdd5Ky5jGNuoQOrXyUh5rjegGu+AyY3MCMh4S85sHMJVpVec0IcxMAm+cV + Ies0XG1VsXiFxKpy2KvJdPvCpBCNKFBghlAzArNLcmFkOAFleJORyamqGmowQ86UQT3ogWbMXP3k + s3GhPCEj/1XzlwXSShnmOn4sxVpOn1ReZvVXUbdqY88ru00x48bucogrXtDFQOw3HiK686pD1Nag + 4epsfAYadQ+bxTp8D8KrLkTagjek4SBEzV0Y/GJB73IxLVSdnYcwABl62zYaa/HouNqxIYFBsNoQ + rzw9PgkpW9mLQvUG0XCtaA8Pon+Q4ofs1fGkur4R/VMn289CKCGKV9G6sys9/iebFAN15Sos3tc8 + kZm02aIjRQ/kxB6/JBeTQ5qtI5fKvrzXGUmwfu+3p/6BkojvkX0rDr6NEdPd3fuP2QsmE6wvuz2n + OH4tbts4Ti/NEGcqAlqz/xd4/63HY3eUp2378BsAAP//AwCzFjt6vgIAAA== + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Mon, 10 Jul 2023 17:46:57 GMT + Transfer-Encoding: + - chunked + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://logs.browser-intake-datadoghq.com/api/v2/logs?dd-api-key=pube4f163c23bbf91c16b8f57f56af9fc58&dd-evp-origin=content-security-policy&ddsource=csp-report&ddtags=site%3Adatadoghq.com + strict-transport-security: + - max-age=31536000; includeSubDomains; preload + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '3000' + x-ratelimit-name: + - get_monitors + x-ratelimit-period: + - '10' + x-ratelimit-remaining: + - '2998' + x-ratelimit-reset: + - '3' + status: + code: 200 + message: OK +- request: + body: '{"options": {"no_data_timeframe": 20, "notify_no_data": true}, "query": + "avg(last_1h):sum:system.net.bytes_rcvd{*} by {host} > 100", "type": "metric + alert"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '156' + Content-Type: + - application/json + User-Agent: + - datadogpy/0.45.1.dev (python 3.8.16; os darwin; arch arm64) + method: PUT + uri: https://api.datadoghq.com/api/v1/monitor/124755611 + response: + body: + string: !!binary | + H4sIAAAAAAAEA2xRwW7cIBC99ysQp9R1LPCu1xsOOfXcU29VhIiZ9SJh2MB4I8vyv3fIJqkqhRPM + vHnz3mPlznIl233fdQcpax7TqEtp18qj3NUclwtwxSfA5AZmPCTkNQ9mKtWqyktGmJoA2DwvCFmn + 4WqrisUrJFaVw15Nptsjk0I0orQCM9Q1IzA7JxdGhmcghDcZmTxXVUMLJsiZELSDHmjGzNUfPhkX + yhMy8qeav8yQFkKY63hXhrU8f1d5ntSXotZqY88LW88x48be5BBXvKCLgdhXHqK2Bo1GN8EpvRls + BVmN6E6Lfu9yhWmGz6qZrUOuTsbnUoRXXei1BW9I2U7QvAuDny3om4nbdHYewgAU87ptZHb26D6Y + hwQGwWpDvPJwfBBStrIXheq9RZZb0e7uRX8vxW/Zq/1BdX0j+odOtj+EUEKUBKN1J1d2fInuxL4/ + HD/RpBhoK1dh9r7miSKm/y46UvRA+dzql+RickjeOsqu/KL3OiMJ1v/23aD/dUnEr8h+Urx8+/YX + AAD//wMAgj4UhnQCAAA= + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Mon, 10 Jul 2023 17:46:57 GMT + Transfer-Encoding: + - chunked + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://logs.browser-intake-datadoghq.com/api/v2/logs?dd-api-key=pube4f163c23bbf91c16b8f57f56af9fc58&dd-evp-origin=content-security-policy&ddsource=csp-report&ddtags=site%3Adatadoghq.com + strict-transport-security: + - max-age=31536000; includeSubDomains; preload + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '500' + x-ratelimit-name: + - put_monitors + x-ratelimit-period: + - '10' + x-ratelimit-remaining: + - '499' + x-ratelimit-reset: + - '3' + status: + code: 200 + message: OK +- request: + body: '{"message": "monitor updated"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '30' + Content-Type: + - application/json + User-Agent: + - datadogpy/0.45.1.dev (python 3.8.16; os darwin; arch arm64) + method: PUT + uri: https://api.datadoghq.com/api/v1/monitor/124755611 + response: + body: + string: !!binary | + H4sIAAAAAAAEA2xRwW7cIBC99ysQp8R1LPDG6yyHnHLOKbeqQsTMepEwbGC8kWX53ztk01SVwgnm + zbx577FyZ7mS7X3fdXspax7TqEtp18oHuas5Lmfgik+AyQ3MeEjIax7MVKpVlZeMMDUBsHldELJO + w8VWFYsXSKwqh72bTLdHJoVoRIECM4SaEZidkwsjwxNQhzcZmTxVVUMLJsiZOsrmGBzGxOazNQiW + MDRj5uoXn4wL5QkZ+e+av82QFhowl/GmcGl5ulV5ntS3GtdqY68LW08x48Y+1BFXPKOLgdhXHqKm + hUajm+CYPvy2gpxHdMdFf6JcYZrhq2pm65Cro/G5FOFdF3ptwRtSthM078LgZwv6auI6nZ2HMJA3 + tW4beZ89ur/MQ4JiWxvilfuHg5Cylb0oVJ8QWW5Fu7sT/Z0UL7JX93vV9Y3oD51sfwqhhCiBRuuO + ruz4tnt/kG0nvrpJMZSwVZi9r3miiOn7i44UPVA+1/o5uZgckreOsiuf6r3OSIL1v33X1v9QEvEc + 2RPFy7cffwAAAP//AwCfdPdNgwIAAA== + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Mon, 10 Jul 2023 17:46:57 GMT + Transfer-Encoding: + - chunked + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://logs.browser-intake-datadoghq.com/api/v2/logs?dd-api-key=pube4f163c23bbf91c16b8f57f56af9fc58&dd-evp-origin=content-security-policy&ddsource=csp-report&ddtags=site%3Adatadoghq.com + strict-transport-security: + - max-age=31536000; includeSubDomains; preload + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '500' + x-ratelimit-name: + - put_monitors + x-ratelimit-period: + - '10' + x-ratelimit-remaining: + - '498' + x-ratelimit-reset: + - '3' + status: + code: 200 + message: OK +- request: + body: '{"priority": "4", "query": "avg(last_15m):sum:system.net.bytes_rcvd{*} + by {env} > 222", "tags": ["main"], "type": "query alert"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '128' + Content-Type: + - application/json + User-Agent: + - datadogpy/0.45.1.dev (python 3.8.16; os darwin; arch arm64) + method: PUT + uri: https://api.datadoghq.com/api/v1/monitor/124755611 + response: + body: + string: !!binary | + H4sIAAAAAAAEA2xRwW7cIBS89ysQp9R1LGDX6yyHnHruqbeqQsR+60XCsIXnjSzL/95HnaaqFE7o + zTBvZli5G7iW6ti17UnKmsc0mjI6KPkkDzXH5QZc818zpIVZDwl5zYOdyrCq8pIRpiYANi8LQjap + vw9VxeIdEqvKYa820+2ZSSEaUaDALKF2BDbMyYWR4RWI4W1GJq9V1dCCCXImBu2YYnAYE5tvg0UY + CEM7Zq5/8Mm6wH/WuzVi2vv4UESMbKfPOs+T/tDdWm3sZWErhPvGnplSijTjDV0MJLvyEA1tsgbd + BJf0J6gSFDmiuyzmDeUa0wzvUzsPDrm+WJ/LEF7NNZKRAbxdqEpB713o/TyA2d3vr7PzEHoKpddt + o9CzR/dXuU9Q8hpLuvL0dBZSKtmJIvUGUWQl1OFRdI9SfJedPp502zWiO7dSfRFCC1GajIO7uLLj + Q/ZZqla172xyDKVlHWbva54gY3J98ZGiB+pnn9+Si8khZTtSd+U3vTcZybD5t2+n/oeSiW+RfaV6 + +fbpNwAAAP//AwB6HTh/ewIAAA== + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Mon, 10 Jul 2023 17:46:57 GMT + Transfer-Encoding: + - chunked + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://logs.browser-intake-datadoghq.com/api/v2/logs?dd-api-key=pube4f163c23bbf91c16b8f57f56af9fc58&dd-evp-origin=content-security-policy&ddsource=csp-report&ddtags=site%3Adatadoghq.com + strict-transport-security: + - max-age=31536000; includeSubDomains; preload + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '500' + x-ratelimit-name: + - put_monitors + x-ratelimit-period: + - '10' + x-ratelimit-remaining: + - '497' + x-ratelimit-reset: + - '3' + status: + code: 200 + message: OK +- request: + body: '{"end": null, "scope": []}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '26' + Content-Type: + - application/json + User-Agent: + - datadogpy/0.45.1.dev (python 3.8.16; os darwin; arch arm64) + method: POST + uri: https://api.datadoghq.com/api/v1/monitor/124755611/mute + response: + body: + string: !!binary | + H4sIAAAAAAAEA2xRTY/bIBC991cgTq3rtYDE8YbDnnruqbeqQqyZOEgYUhhnZUX+7x02260qLSc0 + 7zHvgxv3jmup9kPfH6RsecqTqaOdko9y13JcL8A1/71AXpkNkJG3PNq5DpumrAVh7iJg97wiFJPH + q2salq6QWVMPe7GFbk9MCtGJCkVmCbUTMLdkHyeGZyBGsAWZPDdNRwIzlEIM0phT9JgyWy7OIjjC + 0E6F6598tj7yX+3dGjHtdfpclxjZz190WWb9obtbs7Hnld0gXjf2xJRStDNd0KdIa288JkNK1qCf + 4ZRfgypBkRP602reUK4xL/A+tYvzyPXJhlKH8GLOiYw4CHalKgW993EMiwNzd39/XXyAOFIokm24 + jksI20bhl4D+r8KYoeY2lvbLw+NRSKnkIOrKN4iiK6F2D2J4kOKHHPT+oPuhE8Oxl+qrEFqI2mhy + /uSr1ofso1S96t/Z5Bxq26+eWp6hYPZj9ZFTAOqpem35JfuUPVLGPXVYfzUEU5AMm396d+p/KJn4 + ntg3qplvn/4AAAD//wMA3GGwo4MCAAA= + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Mon, 10 Jul 2023 17:46:58 GMT + Transfer-Encoding: + - chunked + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://logs.browser-intake-datadoghq.com/api/v2/logs?dd-api-key=pube4f163c23bbf91c16b8f57f56af9fc58&dd-evp-origin=content-security-policy&ddsource=csp-report&ddtags=site%3Adatadoghq.com + strict-transport-security: + - max-age=31536000; includeSubDomains; preload + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"all_scopes": true, "scope": []}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '33' + Content-Type: + - application/json + User-Agent: + - datadogpy/0.45.1.dev (python 3.8.16; os darwin; arch arm64) + method: POST + uri: https://api.datadoghq.com/api/v1/monitor/124755611/unmute + response: + body: + string: !!binary | + H4sIAAAAAAAEA2xRwW7cIBS89ysQp9R1LGDX6yyHnHruqbeqQsR+60XCsIXnjSzL/95HnaaqFE7o + zTBvZli5G7iW6ti17UnKmsc0mjI6KPkkDzXH5QZc818zpIVZDwl5zYOdyrCq8pIRpiYANi8LQjap + vw9VxeIdEqvKYa820+2ZSSEaUaDALKF2BDbMyYWR4RWI4W1GJq9V1dCCCXImBu2YYnAYE5tvg0UY + CEM7Zq5/8Mm6wH/WuzVi2vv4UESMbKfPOs+T/tDdWm3sZWErhPvGnplSijTjDV0MJLvyEA1tsgbd + BJf0J6gSFDmiuyzmDeUa0wzvUzsPDrm+WJ/LEF7NNZKRAbxdqEpB713o/TyA2d3vr7PzEHoKpddt + o9CzR/dXuU9Q8hpLuvL0dBZSKtmJIvUGUWQl1OFRdI9SfJedPp502zWiO7dSfRFCC1GajIO7uLLj + Q/ZZqla172xyDKVlHWbva54gY3J98ZGiB+pnn9+Si8khZTtSd+U3vTcZybD5t2+n/oeSiW+RfaV6 + +fbpNwAAAP//AwB6HTh/ewIAAA== + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Mon, 10 Jul 2023 17:46:58 GMT + Transfer-Encoding: + - chunked + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://logs.browser-intake-datadoghq.com/api/v2/logs?dd-api-key=pube4f163c23bbf91c16b8f57f56af9fc58&dd-evp-origin=content-security-policy&ddsource=csp-report&ddtags=site%3Adatadoghq.com + strict-transport-security: + - max-age=31536000; includeSubDomains; preload + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"options": {"silenced": {"host:abcd1234": null, "host:abcd1235": null}}, + "query": "avg(last_1h):sum:system.net.bytes_rcvd{*} by {host} > 100", "type": + "metric alert"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '167' + Content-Type: + - application/json + User-Agent: + - datadogpy/0.45.1.dev (python 3.8.16; os darwin; arch arm64) + method: PUT + uri: https://api.datadoghq.com/api/v1/monitor/124755611 + response: + body: + string: !!binary | + H4sIAAAAAAAEA2yRMW/bMBCF9/wKglOrKAIpW5bDoVPnTt2CgKDFs0yAIl3y5EAw9N97rIIGAaKJ + ePd47+PTnTvLlWz3fdcdpKx5TKMu0q6VR7mrOS5X4Ir/mSEtzHhIyGsezFTEqspLRpiaANicFoSs + 03CzVcXiDRKrysfeTKbTDyaFaEQZBWZoakZgdk4ujAwvQA5vMjJ5qaqGAibImRyUMcXgMCY2X61B + sDRDM2auXvhkXOCv9YZGTnMbv5UlWl6+qzxP6ku4e7Wy08Lul5hxZf+waGe8oouB1t55iOjOiw5R + U6Dh6mx8BnryJpvZOvwQ4U2XRdqCNwuVJkTNXRj8bEFvnJhmup2dhzAQPgWUC8qcBivb3Z6rMHtf + fxK7TVxX6mH26LjalgwJSgXaEIA8HJ+FlK3sRcl8H1ELrWh3T6J/kuK37NX+oLq+Ef1zJ9tHIZQQ + pdxo3dkVmC/cx6aTh052/930NCjFv4MmyJjcUDhS9ECVbQ+4JheTQyphT3WWH+y9zkjA+iNvs36a + EsSvyH6WqteHvwAAAP//AwD/pNRZjgIAAA== + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Mon, 10 Jul 2023 17:46:58 GMT + Transfer-Encoding: + - chunked + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://logs.browser-intake-datadoghq.com/api/v2/logs?dd-api-key=pube4f163c23bbf91c16b8f57f56af9fc58&dd-evp-origin=content-security-policy&ddsource=csp-report&ddtags=site%3Adatadoghq.com + strict-transport-security: + - max-age=31536000; includeSubDomains; preload + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '500' + x-ratelimit-name: + - put_monitors + x-ratelimit-period: + - '10' + x-ratelimit-remaining: + - '496' + x-ratelimit-reset: + - '2' + status: + code: 200 + message: OK +- request: + body: '{"all_scopes": true, "scope": []}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '33' + Content-Type: + - application/json + User-Agent: + - datadogpy/0.45.1.dev (python 3.8.16; os darwin; arch arm64) + method: POST + uri: https://api.datadoghq.com/api/v1/monitor/124755611/unmute + response: + body: + string: !!binary | + H4sIAAAAAAAEA2xRTW+cMBC991dYPrWUIJtdlo0PPfXcU29RZDl4lrVk7K09bIQQ/73jUiWKFE5o + 3uN9sXJnuZLtse+6k5Q1j2nU5XRo5Vkeao7LDbjif2ZICzMeEvKaBzOVY1XlJSNMTQBsXhaErNNw + t1XF4h0Sq8rDXk2mtx9MCtGIAgVmCDUjMDsnF0aGVyCGNxmZvFZVQwYT5EwM8phicBgTm2/WIFjC + 0IyZqyc+GRf4c71HI6a5j1+LiJbXbyrPk/o03Fpt7GVh6zVm3Ni/WKQZb+hiINmVh4jusugQNRka + ri7GZ6DK+9nM1uH7EV51EdIWvFloNCFq7sLgZwt6z4lppq+z8xAGiq/WbaN6s0fH1Y4NCUozbUhX + ns6PQspW9qJI/YeoXCvaw4PoH6T4LXt1PKmub0T/2Mn2uxBKiLJZtO7iiscn7HPTyVMnuzc2JYay + pwqz9zVPkDG5oeRI0QMtsd9vycXkkLodaaXy37zXGSmwfvfbqR9QCvErsp9lwe3LXwAAAP//AwCp + euDvZQIAAA== + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Mon, 10 Jul 2023 17:46:58 GMT + Transfer-Encoding: + - chunked + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://logs.browser-intake-datadoghq.com/api/v2/logs?dd-api-key=pube4f163c23bbf91c16b8f57f56af9fc58&dd-evp-origin=content-security-policy&ddsource=csp-report&ddtags=site%3Adatadoghq.com + strict-transport-security: + - max-age=31536000; includeSubDomains; preload + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - datadogpy/0.45.1.dev (python 3.8.16; os darwin; arch arm64) + method: GET + uri: https://api.datadoghq.com/api/v1/monitor/can_delete?monitor_ids=124755611 + response: + body: + string: !!binary | + H4sIAAAAAAAEA6pWSkksSVSyqlbKz1ayijY0MjE3NTUzNIyt1VFKLSrKLypWssorzcmp5QIAAAD/ + /wMANNjf3ioAAAA= + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Mon, 10 Jul 2023 17:46:59 GMT + Transfer-Encoding: + - chunked + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://logs.browser-intake-datadoghq.com/api/v2/logs?dd-api-key=pube4f163c23bbf91c16b8f57f56af9fc58&dd-evp-origin=content-security-policy&ddsource=csp-report&ddtags=site%3Adatadoghq.com + strict-transport-security: + - max-age=31536000; includeSubDomains; preload + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '12000' + x-ratelimit-name: + - can_delete + x-ratelimit-period: + - '60' + x-ratelimit-remaining: + - '11999' + x-ratelimit-reset: + - '1' + status: + code: 200 + message: OK +- request: + body: '{"monitor_ids": [124755611], "name": "test_slo", "thresholds": [{"target": + 90.0, "timeframe": "7d"}], "type": "monitor"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '120' + Content-Type: + - application/json + User-Agent: + - datadogpy/0.45.1.dev (python 3.8.16; os darwin; arch arm64) + method: POST + uri: https://api.datadoghq.com/api/v1/slo + response: + body: + string: !!binary | + H4sIAAAAAAAEA2yQzW7EIAyE730Mn6NVoJvfU9+jqpATIEEiIQV6WEV599rbbHvoHhnb882wg8aM + 0L/v4DT0MJbWIjYtylZUdpBd3dXVIFt9fW2sGCQUsOJiaDOblFXygZSMUyKLjwKWsLocovpT8hxN + moPXvLFDdoux8ceh0ffbOJkMfVdeSnbil9IubR5vRCEZDjLOt42hpz/fkaA4Ml1pk8botuzCSjs8 + fE5Rv2EevEded48n5LWpqloIAo7RIDWBfj8Lr1/eFzDjqj0nsTFMb/x3Okzz52UMC3HNgs4/Hx6n + pdEKqa6o264UQoqOP0076/4NuLaJkTMw+3j5BgAA//8DAOxHagKsAQAA + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Mon, 10 Jul 2023 17:46:59 GMT + Transfer-Encoding: + - chunked + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://logs.browser-intake-datadoghq.com/api/v2/logs?dd-api-key=pube4f163c23bbf91c16b8f57f56af9fc58&dd-evp-origin=content-security-policy&ddsource=csp-report&ddtags=site%3Adatadoghq.com + strict-transport-security: + - max-age=31536000; includeSubDomains; preload + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '200' + x-ratelimit-name: + - slo_create + x-ratelimit-period: + - '60' + x-ratelimit-remaining: + - '199' + x-ratelimit-reset: + - '1' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - datadogpy/0.45.1.dev (python 3.8.16; os darwin; arch arm64) + method: GET + uri: https://api.datadoghq.com/api/v1/monitor/can_delete?monitor_ids=124755611 + response: + body: + string: !!binary | + H4sIAAAAAAAEAzyOzQrCMBCE7z7FkmMopYn2xx58kVJk22y0qAnsRkWk7256cU7DfN9hvsphQtV/ + VbypfhjXQhFzZNkmYw9tXTfGZKIeMSwpMgz/tdBaPpLoUQZK5fRJJGeeX05riC9i0FvgjZLbCUxV + ldWGAmCmeCFwT17CBdKVsnFHSWCuWpcjLAJMnpjCTA6WAHKP0sMwV94jth3aztR+ssfm2NST7dxh + 33oz2SJfSOcsj2pc190PAAD//wMA2CEI7t0AAAA= + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Mon, 10 Jul 2023 17:46:59 GMT + Transfer-Encoding: + - chunked + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://logs.browser-intake-datadoghq.com/api/v2/logs?dd-api-key=pube4f163c23bbf91c16b8f57f56af9fc58&dd-evp-origin=content-security-policy&ddsource=csp-report&ddtags=site%3Adatadoghq.com + strict-transport-security: + - max-age=31536000; includeSubDomains; preload + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '12000' + x-ratelimit-name: + - can_delete + x-ratelimit-period: + - '60' + x-ratelimit-remaining: + - '11998' + x-ratelimit-reset: + - '1' + status: + code: 409 + message: Conflict +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '0' + User-Agent: + - datadogpy/0.45.1.dev (python 3.8.16; os darwin; arch arm64) + method: DELETE + uri: https://api.datadoghq.com/api/v1/slo/c0ffaa78a2815fb296965b28d437f1b2?return_raw=True + response: + body: + string: !!binary | + H4sIAAAAAAAEA6pWSkksSVSyilZKNkhLS0w0t0g0sjA0TUsysjSzNDNNMrJIMTE2TzNMMlKK1VFK + LSrKL1KyyivNyanlAgAAAP//AwAVF3edOwAAAA== + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Mon, 10 Jul 2023 17:46:59 GMT + Transfer-Encoding: + - chunked + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://logs.browser-intake-datadoghq.com/api/v2/logs?dd-api-key=pube4f163c23bbf91c16b8f57f56af9fc58&dd-evp-origin=content-security-policy&ddsource=csp-report&ddtags=site%3Adatadoghq.com + strict-transport-security: + - max-age=31536000; includeSubDomains; preload + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '100' + x-ratelimit-name: + - slo_delete + x-ratelimit-period: + - '10' + x-ratelimit-remaining: + - '99' + x-ratelimit-reset: + - '1' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - datadogpy/0.45.1.dev (python 3.8.16; os darwin; arch arm64) + method: GET + uri: https://api.datadoghq.com/api/v1/monitor/can_delete?monitor_ids=124755611 + response: + body: + string: !!binary | + H4sIAAAAAAAEA6pWSkksSVSyqlbKz1ayijY0MjE3NTUzNIyt1VFKLSrKLypWssorzcmp5QIAAAD/ + /wMANNjf3ioAAAA= + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Mon, 10 Jul 2023 17:47:00 GMT + Transfer-Encoding: + - chunked + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://logs.browser-intake-datadoghq.com/api/v2/logs?dd-api-key=pube4f163c23bbf91c16b8f57f56af9fc58&dd-evp-origin=content-security-policy&ddsource=csp-report&ddtags=site%3Adatadoghq.com + strict-transport-security: + - max-age=31536000; includeSubDomains; preload + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '12000' + x-ratelimit-name: + - can_delete + x-ratelimit-period: + - '60' + x-ratelimit-remaining: + - '11997' + x-ratelimit-reset: + - '1' + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '0' + User-Agent: + - datadogpy/0.45.1.dev (python 3.8.16; os darwin; arch arm64) + method: DELETE + uri: https://api.datadoghq.com/api/v1/monitor/124755611 + response: + body: + string: !!binary | + H4sIAAAAAAAEA6pWSknNSS1JTYnPzc/LLMkvis9MUbIyNDIxNzU1MzSs5QIAAAD//wMAojMKsSEA + AAA= + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Mon, 10 Jul 2023 17:47:00 GMT + Transfer-Encoding: + - chunked + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://logs.browser-intake-datadoghq.com/api/v2/logs?dd-api-key=pube4f163c23bbf91c16b8f57f56af9fc58&dd-evp-origin=content-security-policy&ddsource=csp-report&ddtags=site%3Adatadoghq.com + strict-transport-security: + - max-age=31536000; includeSubDomains; preload + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - datadogpy/0.45.1.dev (python 3.8.16; os darwin; arch arm64) + method: GET + uri: https://api.datadoghq.com/api/v1/monitor/124755611 + response: + body: + string: !!binary | + H4sIAAAAAAAEA6pWSi0qyi8qVrKKVvLNz8ssyS9SyMsvUUjLL81LUYqtBQAAAP//AwC8H0oMIAAA + AA== + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Mon, 10 Jul 2023 17:47:00 GMT + Transfer-Encoding: + - chunked + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://logs.browser-intake-datadoghq.com/api/v2/logs?dd-api-key=pube4f163c23bbf91c16b8f57f56af9fc58&dd-evp-origin=content-security-policy&ddsource=csp-report&ddtags=site%3Adatadoghq.com + strict-transport-security: + - max-age=31536000; includeSubDomains; preload + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-ratelimit-limit: + - '3000' + x-ratelimit-name: + - get_monitors + x-ratelimit-period: + - '10' + x-ratelimit-remaining: + - '2999' + x-ratelimit-reset: + - '10' + status: + code: 404 + message: Not Found +- request: + body: '{}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '2' + Content-Type: + - application/json + User-Agent: + - datadogpy/0.45.1.dev (python 3.8.16; os darwin; arch arm64) + method: POST + uri: https://api.datadoghq.com/api/v1/monitor/mute_all + response: + body: + string: !!binary | + H4sIAAAAAAAEA2RRwU7DMAy98xXIR9RKTduVbld+AU4IVV7ijkhdUiXOxkD8O061QSVyynt2nt9z + vsAa2DVVv2m6uu5VAUfvLPswZN6laSrAh8OCmlr1qikgMgaGner6baVUXVcFkPvt1ug0TfSHAyFn + uO4/emNH+4+lGPFAt7mo2Z4EcUhUgLER94vuiFMUIpBOIZBMuz1ge6RP7wTDy/MTFDCj1HkVRb/b + yaxxNncNq9p206qugDQbcbzegPFnl8UHvsyiLoFlB5yiDLqa/Nsb40H4V3iANyET0zDaEHkQu/5E + 4TI4z5JdI1vvYHcNs5CXQfa4zFgU6GO2QXYkOqtynkxLHSeSf8jFezGMcjtjcLk9pfx7oDe6NU2n + SzW221IponLfjlQafERTbSs5tbyK2udUi+Xvux8AAAD//wMADCzjmBICAAA= + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Mon, 10 Jul 2023 17:47:00 GMT + Transfer-Encoding: + - chunked + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://logs.browser-intake-datadoghq.com/api/v2/logs?dd-api-key=pube4f163c23bbf91c16b8f57f56af9fc58&dd-evp-origin=content-security-policy&ddsource=csp-report&ddtags=site%3Adatadoghq.com + strict-transport-security: + - max-age=31536000; includeSubDomains; preload + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '2' + Content-Type: + - application/json + User-Agent: + - datadogpy/0.45.1.dev (python 3.8.16; os darwin; arch arm64) + method: POST + uri: https://api.datadoghq.com/api/v1/monitor/unmute_all + response: + body: + string: '' + headers: + Connection: + - keep-alive + Date: + - Mon, 10 Jul 2023 17:47:00 GMT + content-security-policy: + - frame-ancestors 'self'; report-uri https://logs.browser-intake-datadoghq.com/api/v2/logs?dd-api-key=pube4f163c23bbf91c16b8f57f56af9fc58&dd-evp-origin=content-security-policy&ddsource=csp-report&ddtags=site%3Adatadoghq.com + strict-transport-security: + - max-age=31536000; includeSubDomains; preload + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + status: + code: 204 + message: No Content +- request: + body: '{}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '2' + Content-Type: + - application/json + User-Agent: + - datadogpy/0.45.1.dev (python 3.8.16; os darwin; arch arm64) + method: POST + uri: https://api.datadoghq.com/api/v1/monitor/unmute_all + response: + body: + string: !!binary | + H4sIAAAAAAAEA6pWSi0qyi8qVrKKVvLNz8ssyS/KzEtXyCxWyMsvUUguLSpKzSvJqVRIz8lPSswB + MnJLS1JTlGJrAQAAAP//AwAgKisAOQAAAA== + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Mon, 10 Jul 2023 17:47:00 GMT + Transfer-Encoding: + - chunked + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://logs.browser-intake-datadoghq.com/api/v2/logs?dd-api-key=pube4f163c23bbf91c16b8f57f56af9fc58&dd-evp-origin=content-security-policy&ddsource=csp-report&ddtags=site%3Adatadoghq.com + strict-transport-security: + - max-age=31536000; includeSubDomains; preload + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + status: + code: 400 + message: Bad Request +- request: + body: '{"message": null, "name": null, "options": {"thresholds": {"critical": + 200.0}}, "query": "THIS IS A BAD QUERY", "tags": null, "type": "metric alert"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '149' + Content-Type: + - application/json + User-Agent: + - datadogpy/0.45.1.dev (python 3.8.16; os darwin; arch arm64) + method: POST + uri: https://api.datadoghq.com/api/v1/monitor/validate + response: + body: + string: !!binary | + H4sIAAAAAAAEA6pWSi0qyi8qVrKKVgrJSFUoS8wpTVUoKMovy0xJTVFIyy9SKEgsSsxNLUktUlAv + LE0tqlRXyCxWyMwDqsxMUYqtBQAAAP//AwDyEmwTQgAAAA== + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Mon, 10 Jul 2023 17:47:01 GMT + Transfer-Encoding: + - chunked + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://logs.browser-intake-datadoghq.com/api/v2/logs?dd-api-key=pube4f163c23bbf91c16b8f57f56af9fc58&dd-evp-origin=content-security-policy&ddsource=csp-report&ddtags=site%3Adatadoghq.com + strict-transport-security: + - max-age=31536000; includeSubDomains; preload + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + status: + code: 400 + message: Bad Request +- request: + body: '{"message": null, "name": null, "options": {"thresholds": {"critical": + 90.0}}, "query": "avg(last_1h):sum:system.net.bytes_rcvd{host:host0} > 200", + "tags": null, "type": "metric alert"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '185' + Content-Type: + - application/json + User-Agent: + - datadogpy/0.45.1.dev (python 3.8.16; os darwin; arch arm64) + method: POST + uri: https://api.datadoghq.com/api/v1/monitor/validate + response: + body: + string: !!binary | + H4sIAAAAAAAEA6pWSi0qyi8qVrKKVnLMSS0qUSjJKEotzsjPSVHQsDTQM9BUSMlPLVbIyy9RyE0s + Sc4AyieWKJQWp6YoZOYBOakKhaWpRZUKGkYGINV6SrG1AAAAAP//AwARblSWVAAAAA== + headers: + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Mon, 10 Jul 2023 17:47:01 GMT + Transfer-Encoding: + - chunked + content-encoding: + - gzip + content-security-policy: + - frame-ancestors 'self'; report-uri https://logs.browser-intake-datadoghq.com/api/v2/logs?dd-api-key=pube4f163c23bbf91c16b8f57f56af9fc58&dd-evp-origin=content-security-policy&ddsource=csp-report&ddtags=site%3Adatadoghq.com + strict-transport-security: + - max-age=31536000; includeSubDomains; preload + vary: + - Accept-Encoding + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + status: + code: 400 + message: Bad Request +- request: + body: '{"message": null, "name": null, "options": {"thresholds": {"critical": + 200.0}}, "query": "avg(last_1h):sum:system.net.bytes_rcvd{host:host0} > 200", + "tags": null, "type": "metric alert"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '186' + Content-Type: + - application/json + User-Agent: + - datadogpy/0.45.1.dev (python 3.8.16; os darwin; arch arm64) + method: POST + uri: https://api.datadoghq.com/api/v1/monitor/validate + response: + body: + string: '{} + + ' + headers: + Connection: + - keep-alive + Content-Length: + - '3' + Content-Type: + - application/json + Date: + - Mon, 10 Jul 2023 17:47:01 GMT + content-security-policy: + - frame-ancestors 'self'; report-uri https://logs.browser-intake-datadoghq.com/api/v2/logs?dd-api-key=pube4f163c23bbf91c16b8f57f56af9fc58&dd-evp-origin=content-security-policy&ddsource=csp-report&ddtags=site%3Adatadoghq.com + strict-transport-security: + - max-age=31536000; includeSubDomains; preload + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + status: + code: 200 + message: OK +version: 1 diff --git a/tests/integration/dogshell/cassettes/TestDogshell.test_screenboards.frozen b/tests/integration/dogshell/cassettes/TestDogshell.test_screenboards.frozen new file mode 100644 index 000000000..7cf3c4f34 --- /dev/null +++ b/tests/integration/dogshell/cassettes/TestDogshell.test_screenboards.frozen @@ -0,0 +1 @@ +2020-02-03T17:24:34.386383+01:00 \ No newline at end of file diff --git a/tests/integration/dogshell/cassettes/TestDogshell.test_screenboards.seed b/tests/integration/dogshell/cassettes/TestDogshell.test_screenboards.seed new file mode 100644 index 000000000..35e707521 --- /dev/null +++ b/tests/integration/dogshell/cassettes/TestDogshell.test_screenboards.seed @@ -0,0 +1 @@ +44244 \ No newline at end of file diff --git a/tests/integration/dogshell/cassettes/TestDogshell.test_screenboards.yaml b/tests/integration/dogshell/cassettes/TestDogshell.test_screenboards.yaml new file mode 100644 index 000000000..37b56c94b --- /dev/null +++ b/tests/integration/dogshell/cassettes/TestDogshell.test_screenboards.yaml @@ -0,0 +1,1158 @@ +interactions: +- request: + body: '{"board_title": "/tmp/1580747074.387375-0.8449636613109909", "description": + "Description for /tmp/1580747074.387375-0.8449636613109909", "widgets": [{"definition": + {"requests": [{"q": "testing.metric.1{host:blah.host.1}"}], "viz": "timeseries"}, + "title": "test metric graph"}]}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '277' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/screen + response: + body: + string: !!binary | + H4sIAAAAAAAAA5WQwU6EMBCGX4X0KnSntFtozz6CN2MISws7CVC2rW6U8O4WMXoz8TDJzPx/vsw/ + K7m41psmYhwt0eQUp+XEzjVUokpFeV3x6lwArYVQkkvJOAOlQJGceNuaxs3jO9F9OwabE2ND53GJ + 6ObEevydst757D/sLsGjNYlSQgkFlAXwJyZ1KTQXVHFWc/EAoAHSIbO9N7h7RRkKVfaFUG1aT85g + j39CBPxA7mgGGwPRz2vK0eOMR4yVvOFHYkecbLAebfiKfnu14dt929U04TzQyUaPHWXr1YWoL2N7 + pXtH2Ua2ly1PlOPPuz87zNng2+Wa5JzsIZSUQtTbJ4pHCeuZAQAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:24:34 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:24:34 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - nL/U8Nu7782wU68M7elx8MY/T+2opB0U5/flvjGsH/qXfYEORYWxwdDpQFq78Mxt + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/screen/966448 + response: + body: + string: !!binary | + H4sIAAAAAAAAA5VSTY/bIBD9KxaHXhpjbOMPLK3aQ6/bU29VZWHA8UjGJECy2kb57x3Hbntb7R6Q + GObNmzdvuJHBSa/7CHE2pCNZtKcsr1rW8AYPLdumbKqU0ZZzUZd1nZc5E4IJciDeSN27ZX4l3Sjn + YA5Em6A8nCK4Bbm+/Y+S0fnkI9wKyaPRyFKwgqWsSFn5I6+7gnclp6LM25J/ZqxjDIUs5qWHFcuL + kIpiTLmQ+GydhhHeJOHsH8nesR9wnBvREOQwr7X7aJNc9MOhBdTkZhmoveB801cto9TuOJ2pcnYV + I+0K+77DkucHDBO7xctlng8EQi+1BbQp+gs6591KvuWkUiaEfnsiCMPiq/H7LBveWAnzO9SAeqxi + ivEUuiwLRl28oUcvryjbr5Kz7YpLV3wYJJN13QxVKRomikqYsaj0KEXFvoQn3n7ST95E78j9QF5A + H00MpPuJdpkRFtj2fiNX+I09I1gTULYJj79yRh929HnNYgTLkVqkA0Xz2+RC7IZZTnS90fxO7r+w + y9+PueKTDZyg/NOEabQRFyTqmvP2/ge12dYqygIAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:24:35 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:24:35 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - 6TICFxDFBNq65Lw6aA0hO1z7nxUSiTzUAT0k7ln4UasEU6/emXomwtYWMJdIuxUV + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"board_title": "/tmp/1580747074.387375-0.8449636613109909", "created": + "2020-02-03T16:24:34.931834+00:00", "created_by": {"access_role": "adm", "disabled": + false, "email": "nicholas.muesch@datadoghq.com", "handle": "nicholas.muesch@datadoghq.com", + "icon": "https://secure.gravatar.com/avatar/15c4bba0a667b539709259ef25dfa950?s=48&d=retro", + "is_admin": true, "name": "Nicholas Muesch", "role": null, "title": null, "verified": + true}, "description": "Description for /tmp/1580747074.387375-0.8449636613109909", + "modified": "2020-02-03T16:24:34.931840+00:00", "new_id": "42s-92f-49a", "read_only": + false, "title": "screenboard title 793bb5a385c51b206bd69d4f9a604508", "widgets": + [{"definition": {"requests": [{"q": "testing.metric.1{host:blah.host.1}"}], + "viz": "timeseries"}, "title": "test metric graph"}]}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '806' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: PUT + uri: https://api.datadoghq.com/api/v1/screen/966448 + response: + body: + string: !!binary | + H4sIAAAAAAAAA5WSTY/bIBCG/4rFoZcmBPNlY2m1e+i1PfVWVRYYHCPZJgGyqzbKf+/4Y9tTpfaA + BMzLM8M7c0cm6Gjb7PPoUINOebqcSlGTilewMKsrVokjwTXnSjIpS1YSpYhCBxSdtm2Yxx+o6fWY + 3AFZl7roL9mHGVif/pyKPsTif9jv9QDPuXmtsVjvikoxY4RmtehEaSiRxkplea+0JFyQGgqDNzo7 + CyVQQsmR0CNhX0vZUN4wjhUra8Y/EtIQAuLZvbV+0XKajor2R640XE/B+t7/BSIwlVzQ6jdkz9ga + 8OKOXl3c3+Z4A1tmPS3efvHdEEadis83cGaAJPsv59s4HpDuOpdSG8PaCG0nEFiftBmXKnaHfWoh + 4sHeDT3o2a76eYfjaYW/WJ21DefhiruwkDbslslN2o//8MZ3ax+HnC+pOZ2S627R4XPUrwCPC/i0 + bWFiOm6MJlrKygimKqKoUK6nwvZaCfKcnnj9wT5Fl2NAjwN68/bsckLNtztMTe9nvw0NmOd/Qs7s + J5fARpfWQbvCp3b1dYnCyc9nPAHOd7i8DyHlxox6wMsOlw/0+A5Z3qdo0RebuIDyLwOED2jpupKS + 8/rxC8qUd9UHAwAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:24:35 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:24:35 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - J5PL0LnJukdy69mckjXi3cjye/YJX2hkoCBkqKQi+tYjrsXYELx6DfDD11fhyjYF + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/screen/966448 + response: + body: + string: !!binary | + H4sIAAAAAAAAA5VSy67bIBD9FYtFN00I5mVj6apddNuuuqsqCwyOkWyTAMlVG+XfO37cdlWpXSAN + M2fOHA7zQCboaNvs8+hQg055upxKUZOKV3AwqytWiSPBNedKMilLVhKliEIHFJ22bZjHH6jp9Zjc + AVmXuugv2YcZuD79uRV9iMX/cL/pAT7n5lVjseaKSjFjhGa16ERpKJHGSmV5r7QkXJAahEGPzs6C + BEooORJ6JOxrKRvKG8axYmXN+HtCGkIAPLvX1i9YTtNR0f7IlYb0FKzv/V9IBKaSC1r9Jtkntga8 + eCDrkzbj0rv7MujZrvbOvhvCqBOebmDO8NHqrG04D1fchWkRo6cF9mWHFZ9XGBR2P+bbOB6QT622 + kwePc7yB7TEs5FtNd51Lqd1SCGDQfHdxf8uGd5P24z+o8d36j0POl9ScTsl1t+jwOeo7yI6L5NMW + wsZ03BhNtJSVEUxVRFGhXE+F7bUS5EN64fU7+xJdjgE9D+jV27PLCTXfwC7X+9lvS/NAd/8TZmY/ + uQSyXVoX7Qo+7OjrUoWbn894Ajrf4fIxhJQbM+oBLxEun+j5Haa8bdGCLzZwAfIvA5TBRvggJSXn + 9fMXFADoAQcDAAA= + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:24:35 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:24:35 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - 562ySu37xnxKxbTr0NFd7oH3+L3JO3D7GcG/Lb1Dr0vgKuyocJBk1SrO7ogLRZuZ + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"board_title": "new_title", "description": "new_desc", "height": null, + "template_variables": [], "widgets": [{"definition": {"requests": [{"q": "avg:system.load.15{web,env:prod}"}]}, + "title": "blerg"}], "width": null}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '218' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: PUT + uri: https://api.datadoghq.com/api/v1/screen/966448 + response: + body: + string: !!binary | + H4sIAAAAAAAAA3WQ0W6DMAxF/8WvCygNGSv5jr1NFQqNAUtpQpMUVCH+fWGje9ubfX107esVOq+D + aRMli6DA4XLUDAJq03pnn6B6bSMyMBivgaZE3h3srgCDa2YTmiwKLnjBRcGrz1OthFSVLJvqdK7k + G+eK8wzvO2hnpYhFI/pCNjrLN2+op39MPspG1FxWfyYj0jAmUO5hLYOFTBpfTcLbZPM57awD6c5i + BPV1+YEGTHuz5iQ9OfoNsuak9wfGY3TPl+l5UPEZs1NpvTbl6X1dsGPoZjUFbzbYLhuD19PyijBk + icEeq6lrKc/bN7O/h4FaAQAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:24:37 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:24:35 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - ztq+F8HwxRthTKNo0l2MCEDK5uwvgQzF00nWu49lHsBM51hGZBm/pPILDqupy+Xd + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/screen/966448 + response: + body: + string: !!binary | + H4sIAAAAAAAAA4WSwY6bMBCGX6XyoZclxDGGBEur9gV2T71VKzTgASwZnNgO0SrKu3e8kN6q3uyZ + 3zPf/OM7ax143UQTLTLFZrxt54x5BN242X4y1YMNmDGNofPmHI2bN22KsIx1pI2oKSi44Dsudrz4 + daiUkKqQeV0cToV84VxxTuLUwyStFGFXi34na6Dw5LTpzT+KHPNaVFwWf4tsHZuW6O5MmwCtTW83 + 0hFmvQ5kutFZCPl0JdTxp4YI2g3jJe/clGBgSnO/b7Jvb18ySmyOzFdrM2ZCA3oyNHX0VzLCu1R8 + zUHXYQjNGmIko8cL+m2WVY8TGJss+w+N6b6cHWM8B7XfB+yuHvPBw0LYPiHv1+P+UHaybYFDVR3b + sqiPvBZljb0odQ91yX+EV3n6rl89Ru/YI2MjmmGMT+ib0XF8XiJOZ0v7axbwJvkYmPr9kTESDRjT + hRzG3sxm3fydvsaFfNpSFyKGZVDhM1Cl3DrQ+aG837DNcF7U2Tv9YI8PYnj+MmrhBwqRsbSyuqqk + PD3+AClTtj+LAgAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:24:38 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:24:38 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - NclXS5F5t+kukUaODU4jY2oSI1KBdPHFdFhJZNfbXLWDOThxbCLlKKmYvikjdDSg + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/screen/966448 + response: + body: + string: !!binary | + H4sIAAAAAAAAA4WSwY6bMBCGX6XyoZclxDGGBEur9gV2T71VKzTgASwZnNgO0SrKu3e8kN6q3uyZ + 3zPf/OM7ax143UQTLTLFZrxt54x5BN242X4y1YMNmDGNofPmHI2bN22KsIx1pI2oKSi44Dsudrz4 + daiUkKqQeV0cToV84VxxTuLUwyStFGFXi34na6Dw5LTpzT+KHPNaVFwWf4tsHZuW6O5MmwCtTW83 + 0hFmvQ5kutFZCPl0JdTxp4YI2g3jJe/clGBgSnO/b7Jvb18ySmyOzFdrM2ZCA3oyNHX0VzLCu1R8 + zUHXYQjNGmIko8cL+m2WVY8TGJss+w+N6b6cHWM8B7XfB+yuHvPBw0LYPiHv1+P+UHaybYFDVR3b + sqiPvBZljb0odQ91yX+EV3n6rl89Ru/YI2MjmmGMT+ib0XF8XiJOZ0v7axbwJvkYmPr9kTESDRjT + hRzG3sxm3fydvsaFfNpSFyKGZVDhM1Cl3DrQ+aG837DNcF7U2Tv9YI8PYnj+MmrhBwqRsbSyuqqk + PD3+AClTtj+LAgAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:24:38 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:24:38 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - +UwwYRc+A5vkEib2s1YY/+OMx26FxXkDPMnhrpaIz/kTVseyL62lC12FdLJrU3nv + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '2' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/screen/share/966448 + response: + body: + string: !!binary | + H4sIAAAAAAAAAwXBWQ6DIBQAwLu871awLCKXMW+xamMDFYgxTe/emS9QwkOmTSCO3lsbbpAb7RtP + 7dghwlprLlGp3AlWlLSsn47TWxVSTyyv6xLi5Tz50fjunTHa+HF2YZBBW0LukUc9Sx+cJQe/P1E3 + w31vAAAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:24:38 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:24:38 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - Wpac2a5DsHa/eqG3DjQhOxPXeBQRcLxZ18fT3wn3gFeruJMdJwvfZxTA9hAiHLHZ + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://p.datadoghq.com/sb/fasjyydbcgwwc2uc-65330369e587d704bac1ac90ed1854b5 + response: + body: + string: !!binary | + H4sIAAAAAAAAA618aXPjyrHl9/4Vmp6ICb+RKe5bu/vGaF9au1qr5wUDKwkRm7CQIu373+dkVQGq + BKHrG/Y4wldNEihUZeVy8mQWvnz58v1/HFzt/3q+PtyaZYH/Gz43Gl8m4n9bW/Lv5JO/W+qC4u/k + yz/xz62t/7v1T3Gj+CT/oz6Xv4vr/kk/0i9f5PVNebG6pbyZrhP34/diHPn3y5cv/dPjXna4fdSZ + Npdxcz06mJ6u0mx4Fh/sXQ3tp+nz23BqXgzuosuZOX33evdnu8vW1fv+hf92vn78demYT7PZ6NRz + 05XbGZz1r4b3Zwfng/3xqnXZ81eLd2d0jcuG28bJreUn8TD7dT29uTQu9k8fg9PW6rJ3Y/ev5vPn + qwvn5H0ULFbT0WVz9/A0eE4u3NDp3Vg321H+ftz6dXVxslxeN3tLe32++9zP1vOluT59eFw0H09n + zw/r0XgwbC77J8cHo5v0drVtHsSXfWtvun9mW5e9zuX57vrnnv1gZW+rWfgev7fu093ucR7YibFw + ny63D15+evH53l2UJP5Td+/aiG4ejp+Hy0e/Ex52B7tD/+Ds6W7PfXiZ3txEv7zgV3Z78DZqnV8H + l3vH2wfPxoE/v3Iv28Hw9f7NMsz77nQU3Y/2H7L7eN4Nk4OrfPt82lueL72Od7a4e/HcvJdFz+3w + 6s1szrYPh2+5ebG+7OX3xsvoct5qrRbHB55v3PivZ/Gh1Vrmd49Xz7Ndd+o/XL3NLy7zbds/XXZ7 + p2bn+OaqE63XL8Pr/mNy6zycPzjjq+Ofl+/5yf7dQfP0KA+S88w6bdrb+OXl4W191w4PLt4vTpJw + 2r06egqHT/vto5OnxBz544fr/DFdZcfx0eV+pxUYd0mar73Xvef3+Wq8P2r+bHc6o2nz7vTn0037 + xnu6eh1e7B6cvMyCxfPl6m2+nHuHs5Pcukj9t7Z/dfTTCdzz9ez6pW0Pbxb265WRLvzT+6h5e3x2 + 3HtZHGT3N9ZelroPuR0/v7XO3x6Or29/2un++GD37rK9229fHZ38GphH69aqMzi+n/0Mmnm2e7Z3 + cH36dLo8eX60V/3YfEvydTuM1z/D3l5grbr5zXl7vvf28273bN9p/zSbJ72r8Gf6dr5a3B4tg/ub + +VO/E4fJ4bV3Oo3G21fNVbt/fdZ9t9KH3mt3+/n8MT64uj/xbueDrH3v/cpvzWjpNTtX7vrqMb12 + z29yb743Xpvn48fkfXgVjHo/Bwdns9eD6cWD1x7tnbdebX+WHk1PW+bl9jQ8W+8/d8L44i39tX6a + 32aLWd65OZq9xWfPHetwdLvfOTqZ9XrnD79cKzJuo6AfXdwu2ndvQbu5e9q/WQVenqfW9u5s7/bn + 6flp1p+Gv4yr0+PlQzfaj557zfvg1H14+GUfRrZjtttHp4+d+/D0sP/YvbMPehfbyfZTdHg5+3k1 + cKKec3M/c1+Hw5fe+PbxzV2fvw3j25vz9Y3/dNV5HOQ33q9mvJgd9s+S3WHn6WU8PJ7Om0frh+br + 3r773Dvd89ot9/bR3o+azSh1txf3y/PgxguTzHo8eP3VfbLe3fwqeeh6t+Ht1IiW52vzeP9xf7oY + ZfuHR9fz4VG65w+85+Tn3TQZ9JdWf/2+OL44SrP28nRwfXh9drbyDt2ni86BZS5PBsfXs64VL/eH + xvs4vRqs1nvu9nbrzdnN7fS883Kfj/rP56uH2/N1lhnn/tvuKnk8Mo6PR8HP1lturea/Lta77k9r + 9/3FPW7Pz56ut58GmXmQttq7g/Zpb3gXv5/G47HRWx3Mfw2z1/ne6806fjzYG3q3Nzfnt9OHG+/w + ueOcvvxaDnzbHo+3LfNiPDqyT99ur7bbWRoFv977fnp8cbu6OTLuVhdPg+ZRs3U+v+o/OAPv8fzn + 5XD58DDbvfv1Yu0/vbx5Z/34ZZ4dvj/ne71uYLwszfxm9pS1nenJxfbdvek+zbony8Fs0IpezD3v + 8Gz3fRcWmnSOzOfmzcVzK7gZ9N7c5f7Vbeuy+/j2cuX07y9ezt3YtFrm7uLYnx8enR+6r1b0du0e + 7h8565ve6vH25PnkeHV77fr+3p61v7SvDw5vu7fXj/uzhX/+mh+fXT4dvXX2r+KHk8frw927+OEs + e0iv+ndv5rtlNC/Ob8Z7x/u7U+P44XU8XyxuWs2gtTvt/UBoaTQQDWVQpFj0feYY9m9fROSij5mX + +c7H562t0FlOxJdb/yyvOjAyw46mH3c1K7d9971wvjVLHPfH11mWxem3ZjPNjMyzdmx57+xtx4oC + 9WVz0ez2dzrtbm/c6jZf02biGFbWsKLE2Qm8cMdK069bgWN7xo+vqZU4Tvh1K3F8fMhWvpPOHCf7 + upWtYufH18x5z5ri+qa2qsDJjK3QCHBBEplRhp+tKMycMPvxNYy80Hbe2Q3lyj7+AdGVQvoPV4eF + B1H4H6/sPxWybaQzMzIS+z+aSY2ohCI1PxTruxnZqy3LN9L0x1dxfRhNAifMv3zVNQ032d5iS+zp + j6+2l8a+sfqGa0Pnb19/++6FcZ5tefaPrxMjz2bYO8+CRkXhJIvmpBFyfz/5UWrHzLNtunRh+Dme + QXv+vYmHfkwD2/wdKubFmfxuYSRbtoOJbP3Yane2/vfWoIX/tFut1t/ESlInA+Zwojz7i5uHFs3n + L/+19Y9SKksoV7Tc8SM51x2obWTYf/kveffvf5WD4+P3ZvFYzIDkoOQVG1MH/3aMxPXeIa8v3wPD + C4UcxOY1jDiGdJr0Lf0ql4NlqGH+tTilRYnBJlFMK4B5SGHW/lQryn/8r//Z7f1N2O2dk2VeOE3F + N9+25C9OuCi++Lv8hp6gLhF/cMU32mvx4a9b4k/q51P9mo3fnWThWc7EivIwUxeGue///t9qANtx + jdzPJhh88ulg2AVxr+tk1uzaSDCxzEn4/HVJCH3Tp+Ua6etqZZvWdLm0OrnVGPS73VZ3MHb6o6E9 + bPVMw2ob1rjl2O1Rv2f2xc3FY2dGaPuOaSS1gsucAHaQOZOYJsYn9f9pImr5QgPUsuQWSZ0Qvp3t + QhESxJfqbmw91Cf0V+pK1/BTp9wFqdtQreo4tpNa+jDw7lhsMQ3xS6fVaTVanUar+6s9+Nbpfev2 + dsbd9qjb2261vrVa+u1BZHuu96/uH+6MO4NWr7t5/8zxpjNdk9QKlp6dzdTcScHU1+XmwE14holY + pK75e6GAuHHqZBXVh1Z6oUeWpi4vrOctd9LqxW+6yIzF9FuKVNAJ4FIMe6fd/8fSMf9KthMnkf27 + uPb3/y5Ua2PrMMVEWlRpIl4h7PFg0OuNfi/utYvwAM1b0cPYXMMo89zVxPfSGmnRppIL/RP7Ta48 + SiZVZ3DpWbMIAWPrAhKxpOT/tMy9dPJH2lguWCyo10kb447b6I0N8bl4yp9S+jzx9d1pljJrasM2 + AaE+NkKNr/R8YhTiE+P8O6r+L4f4XNuV8KUD0hcSKvHvBEL8/4eBNl1MFfUutB7xEhFxQpFCH9YF + dBOfCx3LUyeZxICJTuKEVmk9/yh+lw7IC91IDSMNZepHpuEDlpYRRLNJ5bQqj05nRqLcgtoBaIk2 + ziR1fMfKyIYRLOhPoe/Ckf3+++8aWNCgoBapF4gZBEfCPDCdZKv4iLDt2F9/24/iVULOZUuh579u + nYbWzhZt+VZj6wP/bjVK8CCAlFHB0cvlsgKifWdq+M048RaGtWp+/e1a/mvrOvI9a/W9afz2745p + RdHcc1KMuS/+pQ35gYYV4qDJignr8Kn4UsGgg4OJEsuPrx8rlphQ3FuCoOLGD1ELDm/r4GDr7G6L + khiJkv41vAHQd71pgxxuAWvYV38AZxC255HrMt0DwPEWhVZL3VAKtfDS3PC9tQCl6Y6L7MUy0gya + lUCGE/j2hBxm4BUm3wWKLJ2a7wB1JKudzJhOzNWEPMmk9FQsmEZLQN/ASXcCIwQ4nABfyi8mceT7 + Ew95TQKAq2Y9KB6RepmDoDFNMYeFM8kMz5+kuWlHhBx1K6WfxYW6oVMW18ET30m94V5ZpAIcVuug + ae8ADtGqjcSaTQR2Zqvu9IuLgVwnFQ9aJIv4qaLk+mTEU2gusWfN4UGQTWWzdGJMCzfR7tfvCRI3 + 5/P9GHzsh4R7O14ottvLVsLZkE8rIdgYV/N1yx1OnDSeuEFWaLoSbad+RvBKxucz6n88gYTqr3Y+ + kVdFWrqwSAfh1EjYPPJ1R+NxuzMcDTrDwbg90u8xk2hJrhnJtBOSU9vJwxjG4NiVMebGNLaR3Eyj + wJqCErDj8DW0DMuxIzMMHAbtKvaB/DALjPjzxbc7moA3Z2TlCYJGVp3Q3AwtN7btmR3M3cCAU5+G + 8Qwe0po6rxGL8YgrCYST5FaWg2nIY4Q54GyYa2RjWnLy35Dx1W8dEHv6B7OnHLG4kzQDPmDHhKhI + iGTqnlWgQiF5XfwVSaXIHWHTsR/9gTOR0mJ2WCQ/iA9OaCOs4aFljP2wWeEYNt3ApzFfWp8XCneD + RK/qcfRtE9ci9NpwGYQhJ4aLlSiT6JaeIEPwle4vNZDuOJMEO6Guau206+UPhgALMnPBAXzqZJkW + wVF4WZSkOw4xAJI9mCZGPJssOhMrz+Ds4XlXxcYM1YOhEoGXkh3w/KsYTsBNNV3xp9frtQe9kdWw + R2a70W4744YxHBqNtgmbM5APdls9fcNJTAK3SOSqD2WPW+5g1G017K490oYyu71Wz+k7Y8fp6EMJ + D79MsKfwjrFDHo/p2aiNTLRnGQ3bbndovFEDaeqwMRgOjKHbMh3HHX82XhIBo6WQnz7BUc8wuobT + 4wOOGkZ32DX67Z5l2SxFM2xQevoAY4zQMW0M0DV7ckZG23UhMXcwaNm9bquzOaM4NwFukPhTcIdz + 8/QR28a449hOpzGwTLHGcWM8tgaNjtk1ekbLst0WcwRCZrARh9SuzjjHw7HtDpGGGo491AY0ho45 + wjrbgz4bsFSMIjbrs+vZIAX67qiqHI45tkawCbvVk5BDaR+I09D+SM3FUJCZ3XFbpBWlzGB3DdPs + DdutsTMej4YbuyiSIkF2QskzOeFvkvroO4NWf9Dv8E3sNrrDPr6G1g5bjJixPxRW6Jq+PqWx7arG + 9ly7023ZoEkGTFjaWGpXBVivGRLTqxiBOzKQyQ/d0YDZk2YEzrvl5wKXu56v0TpieJAz1nhkwE51 + Yxg0xt1hq9UfIzR2pF2rrdDGJXQDoMRNYWi2WjBtPty4YZmO2XeHbagQ29lSTTZk2Bs5uN6BlXIH + Mu66vdaoN2wZ3KjExAT5sZI7zCc26FgW+KjKOjFPczwe9wcwsfGmEylhor4TA9gkcJLDl9hvDN1u + y4X29Z1hV1z/QaoZFF0nrm+UlODfxRWB9x4boSOBqhIwiGFSTaxhAt8+nSIvtD9IBnWR8x5HCZCY + dOZiLPUL3EGaswGVRhFjj2ANvULA0u8wAmMN3QgM7CawIfGsYfWSNc2f6gUAygBFXkgWWWSr6slL + A9whABCeES0naXeSIs4U0ZbNrvBZUEdmUuBvyLvS8DEy0UkVgRtiGumbPwGkTkF+I/5GbK3lHASw + mVCCywxtGkVTxFbLj3J7AsEA/euiAIcVeinjWmA8IAeQYyC8InHTrzZz25ZcX60EAieIKN1xjHmd + KBC6PQnW1d1ydWLvXWBu3Ks/LF0B4DtYc5mzl4TbJPAQwUuqSQ2nhNlAfhRmDSo95WljQ9sIQSB9 + moJC059WZm5ugk3Xf7EpKwMohnTFVNMoTyqKYIRIqHxK+KhyhqGCSe0wiqYQhINkUdiC30A6VEU+ + i1KBmIFVMigPy8+k/KwoDaLUZquRvyBV9FcpVIKGBWXObhbeo3BolH0mUWV4GI4Zsf13FgS+KSsi + iKHPHVOE1eYW+5J8pX6RnFXoZMsomUsEiX1nl5TqXEbm5QyhpqQcmeYgvQEeUIgOUXxpcAUi8Mu/ + ETpBsvSmIVL1ieFPIwC5BElexQEAqs8mEibGKBYWAFY9Pw+RIK4nNiQhUiWIOGZPKrZNLyFAvOlH + EFYjzWyXbYtnBpOlwb6StltUmrCZIslGJqLLNskDrrSZQTNceEkmCAq4VGXWm4oH1UY9ZRYF8BMz + x5pXpS1cC30JVniK9BhcPRILW8hFfyjNPZDUtVqdneQq/1RfzFdRFmUrcEwSDhb7KT2yWOmG5Sgn + KJbN1EneBEIzmiBdorRE/1luXmAkxBbkEisWj0NeO2N6l6UKKoDq8JmDVcFiigKR77lsvaYncyHU + athqJCeANDRRvH6x1x+2XIiPbbTy1cJsye/pq3k1I2jP0vNtV5Vb1KCCXCriotJYmRgxYehhIIVT + RFBjWxcKsoCq1IqqUsN/7L2ABnoSq66Q2mNH8NPyn0pBa5fBwpHrgTeB22c7IfzSDMqGUIpNhQY7 + SyaJ1I9AH0dwNmwz8GxUWOY1CqJCZ3XLFZSYSFYeTkG/9QOIUC228iNbRGakcza/mRfD0tjccpRF + YFzS8uB0fIILRRmTLA7fQVn0CcQzEEoxs+liwjV5LLszR3ME9w2eMefaFEQLSuLh8v2cdh2VVOj/ + uz6MlWMLwPqBD2UyFYjENinKTYIVoIn+q9g8SajD+7DhBEqbuCCAMICwNh4rYBtUEJrYSRRTBsUe + KiSmZkTOWEV/6a6g9FEe88goXUP6xn2rxz66+Xq9qtPoMggBK4hYnYZGDIyX4UnoQ/GL2l7h5Mqi + WRpZHlSWuiD4DhSWDRNzQrhQtnRRVvPgpAqVQDSC0rG5cqhpLBG6fbMOZNmEVgpYqeEVXZyigulk + gMAqDBJqoYjFHkkqKwELJliHV4i1TnJENQoLHA3kuEGRp/qDmeGYyOGFp2RTixDSEWgQYPlsZBCQ + AGSa8KACw1AssNoR9hwSggv90R/jhYuoCogKXFJdDPHecFQTyxI6n8dQPpstV/p7tCrBZ01iwHhj + OuXeNQiwIIZ9JdITkIAqZBPqYVKh/VOJEZmGGhjykGqo0737Iv40tErPzMefGR7ZgFGyA4VbX0JL + kuidbY8Khg73JoW4owDdOlRKq2q4LK5NUNwF7lsj9jkp4T9V4dswoyz2AfWpRohOG6aTIBQdI3Um + pJHsBxU1pjPKtCRLbNUCMmA/RZHqYpDJgtDjiq+nJ5mog+lXW15qRRPDYpgiVTyhWo3U10UgkAnl + Usy7I/mYYVC2BJ6pof2D8gj9scVUBIasyUJmKH8mC7a7Xjbj+Z6M0BEulLCZXC7lB1r3jlpADH9Q + CQnl7rNQaZfuT2BmkXXrk0DDSh6w0KLGieKUEgF2cfEIle1iuiFkXfGm3BlmVgwIL66cvOYBigub + WfiGQ9iAHa9ewvJmPhHTweiZ4TM1oDIXTBGbCzBpC0dYjWoGykLwK7o4UP4m6hheIoumlJqDzIYr + 1i+Zw5/pn2m/CcwrDwOexE3ZBQUygFFkZBsIJzItqpNtkLJVkLG+o1KK0JamaL9EzgRUwMNXaiwA + +SlCgDSJ9anJvhTGl6ncM4ZbEbVKpEhMAGwvkDTyoIz+Txi5LKdCsFFMOAWR5g+ioRXO6vYcTQzw + laJQAB1PKyHqI9XAvbgKtLLNNoH0n2TCVBkRr8AnqrYLiovjthV6W9hAYlNqeCulYXAFtfk5JVO2 + ySzNN9BtOAMtPOcpAAiovOBjlfWWvoJ0MzBkcaBOG4D6UVFxDOZpjDgQTEadXxAMhw+3IfIrpra5 + +WELLP7N6ZcP1VKKwS4pQpCI/BPfMB2fTTddeiDbJqJ5BL0jgi5iWyNyWJV6wEKwc4Kn0a8J0BXo + SWihGjYEV4NCWz37JxCwJP+oiYPNJ0DBk6eQGvbFXGcwbzgM/fGK/tDMcwLvzEZNDNP0Mp5DV6Cf + pKBrvIb9AUM9uxCF/vxSJygnJohN7YWQJxOR5FehUAGuWhUpC0+OsNL1pELl2YuJBGeq7Fk3KPkv + uKeP5gUmHcqA0EyoaAr9J3KZKrWkbBFkplfxKaWM6FLilJDPoz5UieXK4lD8RTMdypI2aq5ciySM + cPxYiKaKsqCacE8cXVV4YgXhN7ddQHHfmGNjqhkvzw6lktb4CzE3pDrgVkW9lCkOWu/YMwPQQ+wb + lQRvTkxUaCWq5Krgk86bAGsJ8/hcHYtqhj7uCpzy5nNA4FOEQc4JjMjnKtj40ImznCdGhpeYCUSm + X22XWl4kZ1LtKERtwlvX4f1fEBND4R8EP4I3lcpS1gimnOlm8mWvAJwiZME1WAOgmWAAiIvUA0OE + PpLq/IVvLXI01U0HdyEypk3qRmktImU6qxBQ0qPYzgJQSn/ImkV44X0VcSNCDd9nJ0lQ3aXnpxuE + qXyAh8y3giPVpPCL/tw4j2OOdtR1VjSlIvymVsyd1WRB3drsJ4QfiKXOh8kJCdQw2fDAZpZwMlX2 + z9Qom+8wQKqVGkRHHZKYonmfyRV7BjnOnJxHUmTj6BzJURNdKS6CpFlEIHYt6GYYArIbfVLCskX9 + kwpAxMXrGE6/UoA1ZE4CrGkRVb8GDTKiT7dm2dUcRbIkS4MVKF0oGVgO1FjCIo0H/au6LpVFiBnT + f5wMOAiBTn9Y6qNraHOrqYpFXtBEAqf/CqarUWG6lK/Qr9IbP2CkZYFXdtcwccpqzIoOhUTkUjZr + Mao9SmeSXvmWanz0LGXYyHI46ikSBZ7HUCaZ2DkUwkOKgPMnHD38QW2LiUY2PNjE6aCjHCSBOhcA + BSLeAPycyREOzgtU8ScWm6KZgKc3YgMTJ0qQWmnS1MvlaquBZp2VaaCjnQkZnGEtG0/9QUz1uPMs + UHWd4xRzItdI9Amat2CFEp7pIpE4AMIgzEbQPSlODhSJt6L+MgcEqjqdU7e3quTK439RapAnTExu + 50jkAD24qqmHLZkFiciZxSwrSr1pygkDkbGKWiGyJC+CUlN1DbhloK+3ZKn1vIhy7E2tRu2cryad + 0+EWKrRVgqjSWVv1NXLJiTN3CIwIE8wrS5ZLnKIqqFgr8ovGoII1UAVtwaQYPipy5NmI7v7oZ2Ou + QdVwkIWsQkY0J+BHABX0i2lSEXPcUhsMurf0txutA3JToV1I8pDu6CNGyBoQquvcEktTqVJR9Vv6 + BXU0eJU553lvXgORUBioo6OBxSNyth4/NAOeCxkO8C5XK7nanPLdCcxeumeOFUUvQGkbRZMyWR8R + nEKm1JIACrpaYt9EQmE9Da1wcMhpV6V3wWbNYAoSh3XXFDqJ2GqDnQSgQs0Rz7LBtMQf3ZQklxp9 + 4hkU38o6n8BYBxcVQOgrUf360GwUFMrgympJflmurSAjje4VpgEkQfgQvhV12XrGUtWlCgfAUzAl + SeThps0bq1Z17O97wkvjBUcjOnE3WgPmhjtng5bRjFsjIkDVKD4UhNo5PkEoRcj0Wc+CIon1QF+z + s+kG77uuQnraGWqFrMtNShis8luqYcigV0n2bNn1IZqOwEKxkMb4fuKoQOJRozSsbTMFYWqDmoeo + jukLK9g9SQqSBapCvXS11AH8qRpmca7/RsGTJq7aSkUlJQrFp8rypBzKNJ/YF3QziNO3dULHZpLW + UkyuNFmQgMDiMq9MnB16iz+dNTLzuoisyumf3kZZ1UbyLO10grhpUjfWZkUJfR7Um7OGl3fEudHy + 2FkFMMw8x2chGy0VDSBRhLgKG1o2JulTJY8AHlFmfLIwWSNJQXij+6DSyCOjYBF5Q7SC6fdmoOes + Sh6paoTU/8e0CYMnDHynEYcssbcgYmoSV1h6TrfJwCp7ympq/vJADDQGx13I0GtWmjhzkfTBx9X8 + SuEpACrhHF7RES4yYcmSs3vfcs7RC/JlGi2Ey9WvjG3GTNJLBWxen1EZDGoT4HqKHmc2RIngS3pf + /7lwy6pyxL3zZpik0FXDFUAhIb5K9l7eLaQsYDD1pJF7CQ1W33k1FjzzAp0IigPpfiVaCEFRg4dy + 93Q22sOgVW6+qIEWrrM6jtTTOfWXcxPm4IYoHrazXCAyi43RolsnE71lavOwXgWvywk5VgcNWNxP + FswDqQa02WOYwyQQC+IXb7SoYZEFBUcnErIoRjfjwvGF0+BUQLREgoeSEywAKKxuGGqzkcUF6txk + d5fyKNvcpKnBeHFqtDxzoDxUkpvMwrDFDcalJKZPBlsxJy3JFPQEkCLMADkGehQk21Ue3VYP2pwW + mlqRKG2w39I9JGgbIUOtcxCaRyP0Vtk24i6o5wQhzkP8q/oISaoSwV7yVh/Hp3noYO0odCCbN0Iy + vSQDrJSBlJYQvkd5q46XBDqr+uMUDQHM3ciTjmWfJfNZkF4l4KtnokvEV+fLlfiFT6LOdVgPYDgM + nnNH6IBDSY07OmLwJXFI5GDZB0ZdU/o0NF0QJ/WoYMp0igmKIAk5RX0EkfBz4YrEpPRNtQAVzYJ1 + qEHqj1ivRsLojyvmU3Zgb3YVSBArwj6TuKLkRfBBy1odwlV5vih8IUuq6bcDqAt9mxc7tVmTTusP + LS0HoQgbskby6qDITgCYtIeKkOIU1OQ1MvGl6L+par00GTTS5eSWuaiJO6/jT0F/ycBS0WvZFShr + MfYnxDIpGGYkjrShYmYZ4YI3nhJxnCAkIBv84+Kd0mjIJwT5x+l9KbQi7mw2n2K9Fb+FnIYoq4pr + lls2o8Mswh9tbrlqSisoJt6ZVhDHFYBbsPDyhIwoAlQMQ94oe9pqHMhGCxwpDkABZ0BFh1pRTPs4 + 6aIlufL0FivGICFGfDGo1csDZ1oHmfmXusNP65pYnYCzVxIWgCZFKKzXxrSry5mqaypzEHOqtQBB + 91CHGiuxbxyM0CarznnqoxVaI1qd6xI5arEkMK3fhHdWLIlDQfla/zoFYYAkFzGauU7R1A1T1C8V + yET/ouzjpkXRqXG4E3pnAI9n1M+k30UdFJX+bI6A7IScdQ0gLLhravdA+wLAS4U4VnlrTcgt6mEl + YYAQIPuB9amJRtMiwxSMo/7rIsWxWd6Pq2wbGHWy0a0jE+GNOkxxAE74HdT2WFj9cCr6g6cytrG9 + UOcECKQgi0WDOoIiBtwoRmj9sOLkuT4IWWONrAqzl574fTNrLIRYbJs+pn40F37TLhNuJmfpzUW1 + s8+MCNx2AAJXv1hqO+9Rl4nFpr+E4ZmooUjr0gcp+uqqzccbXM+k2q9eQGWkkraDfLcOGAiXBrUF + K0ycG+yE8W4UytGUxgJxqfKiWgryVTQQlRC3MsKMEAfvNixdDals0YNb7dXgpyPKVy4UbKbCGDVc + pvhFcj9UckMXiDhYzOsp4sz2J92oTH/pjFmUEQ2hDgVQp0qNQjiVCiUSEcR6NpTezMmp0vl4kqKY + VHEthTLLvvKypM28s6iY1p0RWjD/V5en1XipwjrwajXqxkC0hpCqkFNplSA16D0CFYSCHlJ0aiRE + KbHV8+MHrmFiA/QL3qlfp3hTlwqgKXhm6C1DvB/tywpRM3mkYcAKASp06A8isQrHUDnRB1GiNMom + jTILzweKg4FIuSltN4leowI74gadEtt4MxA6mivqI09AMEvM3kpyniAEUXqkuNJ58ISEFbRlg0SU + xDjWhxJipc1bmMHUiTwmD5mKyCMANf6TZQsqkG6kSZIk4lS8nIthgpUqisvwGwFvX0BAyXg7i8zy + yhaaWsSuzvvItywAJbo4JlI9aip9ITYV6X8NmFuEHO8YVM1ExX6joVnpdtEGoqsNYSSAtUm8pPcf + VPp+1X0O7/qKpyayYaviL8Sa1VFWD9UR26aiv/4oWTdk3yDko32SWww1vxPKrvFGU7ifyuTRXkcH + AasnpmQ4o1ZcM8chpJqx8CID/VvuSmTHRo0rEQfOJp5bZq5IQ2Av9A4ZJB7sOQilaPxK6qu6yAmQ + xqFsg9awenZ1zqudr04IHos7SZyT0ZfA4g+EX2lTQShECWca4gWKTjhbwVGx6YYVZramfuoFVTpC + bLpoH6pRT6U9qPCjVqJPtMiuKs25cLGitZWtic5K8EBgf7RUoSlQpCBWynhGpYXUUYeGHX04WUBl + 2iCWIO280jSE8kIecnIa1qVORXH6oIhpZE3ysK5oAq49digdYH2dY5PR0g8p11Fb6L80c6SknIHH + q254ny9tPFI69ClQbi+TAl0MoAUjKAjcM96Ii5TI59ahcu9Z8EnmDRAhqENQpPJQQlsfnDC01qrd + kI3WDG4oNKjfFeNgmP7Zgk9IIrBsFVxQlOS9OuZGiyzon6Qj8eULRPWxWf6mteNOFnIh5asSVaTc + Kd5bQx17O4iZYjC8CPbjvYuVF+R8nLmG9te/a0t7k5INHd+hDaH8UL30R2si/rbV3mGv0ZEvja29 + stsqLy20i88e+6Y6J9j8UU7awf8BZD3qkvvsLVjsVW8NlRrLM+3i7cnEzcEiGlSdytMd3rcklqne + YAZmjuJo+b7J1k6neENWeQAMr6alFxwU3rbsXit0Vu0C3rTRaukvw/uTL+2VfgNn8LNZ8Ro29lXt + a9jEM//oBdLiAu3NfH9yMrLpC4jMs8t3wonXZO3G8Sm+qp1M5X1Z/+5Ti3enUZCi8498ArfyzWp3 + H7/WzkV/GWHxqjz14r2tNLH+vbdu04lf9V7qV7wCWD5YvFCbikvytX70qmH9BX//8UNJTUVVWryF + +k89t+aV0/yr70164zTec96ULzr/f6e8FXERZAAA + headers: + Accept-Ranges: + - bytes + Age: + - '0' + Cache-Control: + - max-age=300 + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Length: + - '9435' + Content-Security-Policy: + - report-uri https://api.datadoghq.com/csp-report + Content-Type: + - text/html; charset=utf-8 + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:24:39 GMT + Pragma: + - no-cache + Strict-Transport-Security: + - max-age=15724800; + Vary: + - Accept-Encoding + Via: + - 1.1 varnish + X-Cache: + - MISS + X-Cache-Hits: + - '0' + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - cYNsy3QDuOaYo2clO/PharSNtCykS9KtUfiNevH3xDbHJlRyddWkNpuDhMgHWZ43 + X-DD-VERSION: + - '35.2134903' + X-Served-By: + - cache-mad22051-MAD + X-Timer: + - S1580747079.790537,VS0,VE454 + status: + code: 200 + message: OK +- request: + body: '{}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '2' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/screen/share/966448 + response: + body: + string: 'null' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '4' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:24:39 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:24:39 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - BQaWiIhDCyK3JbvyPZudxtMuoedbvOKE6tb5GJMfo6GT4EOQ8qx9lqgA4UCxp88q + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://p.datadoghq.com/sb/fasjyydbcgwwc2uc-65330369e587d704bac1ac90ed1854b5 + response: + body: + string: !!binary | + H4sIAAAAAAAAA618aXPjypXl9/oVHE1Mh90yxX0rV1W09oXaqJVSTwciASRIkNiIhRRp+7/PyQVU + JgiV3a/HEX4qkkAi8+Zdzj33Jr59+/bjf53cHT+93Z9Wpqnv/cLnavWbwf9XqYi/xhd/K/KC/K/x + 7e/4Z6Xyfyt/5zfyT+I/8vP2d37d39mP7Jdv4vqauFjesr2ZXcfvx+/5OOLvt2/f4uV9/zh4uRwe + Pm7IRe3yIZ7fH/bqH8nNbSc4O2vNBuGzTZ6e747S8+V9Y+y/WfZzZ5Ucuc7xg2+PF/e+f3z6fDn1 + 36h1GN2sXh9f34/Ojho3+411/ejOfP0423QC68Jr3Dbmp+sb+v606L7Q13N6fvz8vFmYSTp+be37 + jVZ0Z95Mo1F3OBkOZl4c33Vbl4PNeH90ZiVRPAvSxdpfb+6PR6/tXrtZ98Pr2+aJ/768fL4+aT7O + J3H7/v122B93Osft5GrQjq+Wk8BvzCy7tbhpjFvZ8Xv94XB4WB+TjXfy0DtKhrevi7PBU/vs4n39 + /OLP+8taeviyuO2fPZzZd6PR1DebSdZphMP0NTn7OH5odupHi6ebQbPeXB0/L+LxU6N/s/A7Q/f8 + /X3cv0r3Lzp19+71+nh2c0GHV0/ri+fp2cND/95aju7cy3j+Ynn+aDxedl8HC5pFycP5o7u8xqyP + 92vuIG3fDbpvxzPzqTGY3fvnydVsHJ+8DmaXm7VVz0aPnVfHOTwdvofUeTxsXi1ncZa0joPb/SDu + Xd75yWqUzuOrRZi9DxZnzdl82RmtetMbZ9K0u/uX5+lrZL3fnNRp+uwO392n197V8O7Furodu92X + twntEDIcdqeLj8ur9fvLzLyPrsx0+fZKPgb3qxZpN9/rQ/f+pHPlvM9fIvN6eTUZkqNHMz0dxovJ + WfftyHndfxzNVutGrfnU6D2e3rp+ffY0ed2vvQRvT4tOi7xNbzNclXrzh/VH8EFPPvavO9nb5PFq + Pn6i9XHWi9qUXA7eXxaPk8Q+t64b7Y/VXf1o9dCm4+ldNj4i5shtjE4f72cXjZU9fGq+udNGYvXf + xvF+7HaGi4fbZhKFWThq3saD6MNtrrvZyd3Z06JRmyezm3Vyi9Ev7k/Xh9Q/v6u164O3s4fLl/D5 + +XVx0wlu7P1uY964zd4Go5Phw/vd4/Xd5fON9/5wYXfco4fsJA3m76vRKb0YxVP37r1zddNu+Ulv + nQzmt/2H4+XHw3P79rT9HLxdPUbzdtoaZIPX8clqFPZH3qO/WQ7Oz3pk9BIOPybLdXDU7t3MXs6u + W+9ndDI/OU9v1u7w4my+vnlY3+w/vS82p87bebezPFncZsP9e9NbDsbWMM7iK6c+GfvHjUbj+Hbp + +5tjZzHCTh5PJ/T9il7eHL2Q1XEnMW89Z/J6l21c+4R0Hm6d9vvtiASJ07ibRE/v0+XqIjm5f1wQ + 2puQi+f6cdO5JKejxa0X2zVz8phZt6PR5fKqtjlMB2+j/ulR596j8fwomPXjSWffOe9GSbKo37RO + Rtct2+qN4/Hb2L16X1j9fhq+TE4Xl7NRcLd8Pn2bBYezGl1E5Pok6d/HT144eZ4eXT45g9bp5u55 + 5rxOe9N1+Hy3Wr2/pY/De4t0Z8Hq4eiOrs3gKN48nK7G/sUiGwa90A5Ob+vmYxztv/jZ/fnZ6BEW + GS2i3sfyvek8+G/Lh8XweDi+OHrzT6nzNGzaN9HluO0N6Ll39WBdtF/PDw9vPuLXY/OiaS0m68vX + 1fOLZW3e7oPlKmtf1q4WvaujdncxatTdqOeP31dP62FzcpusyE3zcnN7/LG8ql+9DV/7k+xmaD43 + 1ulgsxguxp10GjYW83V9vze+vFrcHj3f39boZnh6ebK2h/Wu83oMl7Qim/23h+FH2KL10eq5f/H8 + enhYu+1nH7V26LSSwf7ydhwfXc9HL9HwbLS6bLw3w1rUPov319OnS2+wWaVkcHc3OV6e90/Gbtgz + RzM/7SS1w7l5+tJedmtPs2593hmvWk/7jfOjq34n6T1Fp877c/ZwPvkYNUnfOwzuNlZ7+WquE2ty + Grr285js37209jfD9+m7R7pH9et0/jS6PNrUHxO6OLl7C9+OZic3l/vJ0n148lf1xqJx3ozejqPD + 09PwaJaRl81TjQ6CbuBvrOXzyeOrd+rPR9nV/eLs5P5qslg9ruL27OPl7HHy9Hbmb+JLMnld0+f+ + OJ2M3m+izSjZPLx2Zv5oeVYP1t3p/fX52bNDreB0ktZqWTSbX21GZHPtpqe9+jIcWev6sZsmqfUG + f/h4u36NR6/Xs761vOnbN9Z83O02n07S483D9Ojjej0ZpWF83g9m3dfQ6l63Hrtx/7pDO0/T2F2+ + v3beB92LXny+eRuMh539/gDG1pjVzWsbPqFN7HqyWLxcrYLOdbS57feaUeDVL1cPE6fnHi2vu+33 + NmmcPjbu3ZFLTy5Wh+d3vf3l2aWzHAxIx9x/uUo/HMuyN6/PbXo6GR0drl+v+/P6w+nhjFz0otFt + F8/uzsfDDxJfPkzrr2eXHety0JxPzj5uwtpy3WgnjadW5+iwc3F2Y/WvTh8HtYVlDpvR8Hz+cfx+ + 58/2X8Pr9mEtisNZe+y1jhZp89a5eHBvGq2zi/FVa3Z15AzTdnLoXV9ZL8/eQzQZ33Wck87ZZmz3 + LwbO4YVL+yF89rDzdG9f3Uzv6FPw5M2aLT/rAT5Uq0A8AvgwvPFjSon96xtHJ+yjT1NSCYhPf+4t + XbqKwjjdq1hhkNIg/bm3cu10+tOmS9eiVf7hLxU3cFOXeNXEIh792dhTBkvStUc/P7MH/Ec+auVv + 24fmD+cDfq+ow/915yLtcd8rDf2Kf2jXm6G9rvytEhHbdoMJLo4+/lrRL9m5/sAm8bzqhzatRK6V + ZjE98Mk8NOSHiutPtC8Mz51M05LV2G4SeWT9vRKEAf1ns/wehOmfPh/953/x2Wyu/91HayveWQ0b + 8S+/Ewru+M8ktv79554R0yWNE2rv/VfJHBzXS2n8vWLGTD4BTZI/9ZsHg3aj0eu2e//nz19J5Eet + oDU/UjfVtQjTO43jMK60621A5ROSEjucfKv8fTvv7Ve5WtcKg/zw3GBemcbU+bk3TdMo+V7Dc0nq + WtgEPtx0cWCFvvyytqy1OgfNRqs9qLdqs6QWU2KlVStk2uEGB1aS7FV8arvk515ixZQGe5WYevjA + LCCZUgorStcRrCqlH2mNX1/7tIztxL/4x4/ap5n+4FpteSRJfu59U8wNxv3th+0u898ok5ERkQnF + N5TEjvvBr969Bnsqx/mRq7kcX1X97aN+4PoK8eAPpJwr1+EkrPzpkdiVI3jzSjqlFYjwz/AcYprq + MMJg9ipQoj8m+4TYVROPOUiWk72KcEl7zV53rzKlTNd+7jUHHUW8/8PpMov4/zDbrbX8a9P+UZM7 + wXeshm0t3zo/+dy6XNrY6gr0m1Yb9QpTt2ribmh10BUfmDOPSZJW+/JXM/Rs8ZN0WQ6x6N6vw9g5 + wCyEkv6YNvLRd0du9/d+MVO8DdPKWZgFNtS1Ie/LkGDL0OK5n0MsaZWZWwYzcO2fe1xV829+/ah5 + rnLXrx9EWmpt71eucRehT7lq/1tM4vivP2rk69tyA7dDKymY95R6kToqPv4rI9aIZWGhaS2haYrQ + knxO7JGmWVQ5IolrJaVD/agJmchNlX84C8EWhPicfwV7zkMjs20enxX7Zs7XDQMjyHyTxpX8I8wc + LvnXcRituevN/eNfKpeBdVBBIlyvVCuf7qxS3T5lK+hcYqvVqiAwj06IV3Pg4qppzMI+meDfPtAB + xHjGvn5iX1cO86/ZzvyRR0SAdMRaY9R78a/Kfei51vqPjmeF4dylCcY75v9ShhOSFTamynwrcO7F + f+7pEX3v1w83iLKUK3ArqpIoSqoMnexJ/KR/J7z/1LVtFhyWxMsw4t/+7X+32n9NIL14bdhJwD9+ + r/A/+R5Ydr/dajR7SHk77WZnQOoWsu1ev9Wz7Ubfcv4DDz6Y0FSMwoNWp95qd/kgfxFjySckxI88 + asQkpeqT6ge9jnq1bRvwEBY1ZolheS5mZ6ThnGqzizKTOi1iOl3a6nVos2X1u81es253SNdqmyZx + 7K+GpAExPSp+lmtN40zMSM7XBd6MsRIjS2hsTEkylfMNMs+T12xoYNNkblhTkspf2TDFESAdw9Ue + 9jHI6ovuvPnV/PBIhm9VCa2oWc3cr27IYk+9ON84iAgKCzOBKKvsS2hHSub0AOlzwQ+pI4ceAs7u + gtRNocFSfaCGWL6apE3NbKLe5RAv0YQuHDLzQS7gLvujia0x7642URLNnMGAj/MPFmO3vurTUzFP + Vjk5qVw9VliyITDJPzchxCXHnWgWpH31GwMyiTUPHUeuThgV8Jm7zDeRr/UfUjWWbpIRz90A7YVB + cuAAxFmIhwb2HQ7CWGQUxui5vpurVater8t7U+rB2cFaD1IyMUwYLXTzU7/4c+SldrgKUtenCdKF + ANI0tl8YUeh5Btdx+AE5627+iMRNhYpgDktqpMT1jCQz7dAnrmaC7GeuS+qWMz1r4okfLCwAc2A1 + SSquQP6zXQeb9oEjVk1ia2rYFLmKtmowp3IlzIS+0HHme3TArOkfewqbC7DMHIbsA3pME4NMQrno + Rqd8TxjG+Ho/up/7IcD2gRvw7XbTtcFEHmap4eeLHuBqfd1ih2OaRIbjp4YMnHJKzfIZESD+r2fU + +XwCE6q3PvhCXgVpqcJiOgjnvOuvWv3BAP6f+dfuoNFX7zHjcMU8JEAeDRgYOMiCCMZA7YLPm5NJ + ZHuhNQl9awJobkfBLLDgmOzQDHxqqYMW7AOZR+qT6OvFN5qKgHdnZGVxzEKI7k3mczOwnMi2p7Y/ + d3xQB9YkiKZAF9aEzkKiTsgNHAZY40yk41kEjaNGBHMNbUxLTP57ZWtBhQWYJE5+M3sYxVY9mGbA + BxyYEBUTIjN14DipGvzPbyQFBiSFTUde+BtnIqSVm5bNLMSmDsm81AC2QkwzGBrAJORDP22WO4Zd + N/C19+fW5wbc3QCkFj2Oum18HlMS23AZXkhsgzhYiZxCa+sJPt3fLpSoHzTKTQewCQsyM+Zuf7MP + 6nTgKFxQfckBZUCJO2pjEpNoaiybhpWlcPbwvOtcRj35YKiE7ybMDvJfRCzIh8PiiBbT2u12o9vu + W1W7bzaqjQYdVEmvR6oNEzZH6m2zVW+rG87EZIYQU7IzlD2oO91+q161W3ZfGcpsITWiHTqgVEMc + zHEbqxh7Cu8YUebxND3rNxrEalukCpTXZOP1qwPa6VW7vS7pOXWTUkeEYbl2dbw4tMC3QH5yA/mf + fpuQFqFtfcB+lbR6LdJptMFs1tW1Ehu6ow4wwAhN08YALbMtZkQajgOJOd1u3W636s3dGQkQZIhI + DucmQJSEfQ0yaFKbNqtdy+RrHFQHA6tbbZqoXAHn2k5dcwR8jbARyhBsmXEOegPbAQqtEmr3lAFJ + j5oAzK1Gt6MNuFWMPDar623b/Z7dcfpF5aDmwOrDJux6W0AOuQPATwHIAk3BIDO76dSZVmxlBkWv + mma716gP6GDQ76lC5ytkWgpjtekHlDwVE5YS69BuvdPtNPVNbFWBwfE1tLZX1zCdorBc19T1SY1t + FDW27djNVt22TaurCUsZS+5qAo9RNiSmVzACp0+a3XrP6Xc1e1KUln5YXsbzWcEcarrbs3vWoE9g + p6oxdKuDVq9e7wwQGptauqGMy9ANgJJuCj2zXodp68MNqpZJzY7Ta0CFtJ3dqsmODNt9iusprFR3 + IIOW06732706qg47+wty2XXWYof1iXWbllVvdQvrxDzNwWDQ6cLEBrtOZAsT1c3twiaBk6i+xE61 + 57TqDrSvQ3stfn2Oih2EeZDdhuMRiSi/V/6TX+G7HxEJqACqUteDkKsm1oBU0Z1MaIxo6cHRq4ul + H6x8wFAfc+bqL3AHSaYNKDWK0bII1tArBCz1DuKTDXTDJ9hNYEPGZwbFSzZs/ozeAlAGKHIDZpHw + hOo4K5JaUwAgPCNcGUnLSDC1PNrKxYnZ5T4L6qiZFEozzLuy4SNqIWoXEDjh00gWngFInYBRRvwN + tbVu58CBjeHCp2mGNgnDCZCg5YWZbUAwQP/qEpZs6TIjljOG8UxJgByD5a4ybsufzMy2119LwKd+ + yNIdSuZlosCOugKsy+HE6vjegyaE7LSxkzUAPsWasSQvNIlnYLIuS/cTw3cRwVkkVycjhQkWCahH + EoCgwQraxhAE0qcJ0nD15m3m5sTYdPUXm2VlAMWQLp9qEmZxQRFIgIQK5EZiBHRlYCjfKB2Gh3tk + YViDAa4ZRIv6IGzzrsinYcIRM7BKCuXR9E/IzwoTP0xsbTXiF6SK3jqBSkgWQruZu7XcobHsMw4L + w8NwTN3UMBHYZib5DLmLzCOqyxDPDmi6CuO5wImMBC5Vm238XU0RUIo2L1cRRaA+JG5DrF4RXU04 + 4aXJkZPBTGLuJEBCbhBvEiIQxkjlCmYOQD6VYDAKeRKtDpQFSAM3ho0skCdEEGSkPSnfHJE5it2F + EJPPUJvLyHY04bumb6yI9pWwUCcLkHxybMtTaeQb6ozizNdVMyVshks3TjkNAccpjXdXvQQNNwXX + jWuoNS9KmzsQ9iXja5AEpxgMKQSXi/pQNnd/oU0jzmRCJpc7X4dpmK5Bk2oGKv0uX+mOfUhXx5et + qZNw1gRA3WBl4cIOCiTvg+aCl9ZpNYLsdarpXZpIQABCw9PcqJzaBMVqz3W09ZquyHhQgNVWIzJ/ + JJux5ETyvf602Fx82kZLj8yNk3k3dbEzM4T2rFzPdjxN0ziFlEc/qbGCY1Jv15x9AteH0KVtXcAp + AZChOSEl5/y59xwAqKmqvEJoD6t6SD5XKmjpMrR5OC7YETh3bSe495lC2RAwsanQYLrSJJF44A3j + EM5G2wxMgzquoDTzKCIURAbI4pZLwGCgkMpw/lKDPJ9wg1GUhR+1RaQkmWvzm7rRliyWE8kixhdJ + xw6n4zFQIPhfYXH4Dsqibkg0BW0UaTadT7gkW9XuzFAJ1n2DS+a6NvnhkqXqcOxexnYdRSbo/4dm + XRxg2CYLWoa/RghSf+W7JMIV3Iz6i4Q1Euhxs9L2l3lG1vlh2HEYsYRIeygXjZVh930eIGQwl1Ka + wFFHeqATW5wsdCfqah+dbLNZl6nuFiIh9PPQmwQkAmRLYUeornsod6izs/O8HJHeQvnJYOVyXdTC + PsR2bkcrxhZ0YbiOC6+U6wDCD7RMe5aOIMkKEdkzy7ATmxSzB44WFRiiTjylrCKTInjIuMfACNsI + 7ZFMR0WkwgTLYAgjo+OMoSxmFOoD/Aw3SE5U/V6zFNOdCNpbm1qIGI7Igoiqz0Z4ffR+QDUnsR5F + YAmS3JUGpj2HCcGBHqmPcYNliGqLtt4ciBQXw+hseCbDsrjuZxGU0NaWKxw8GjHgpIwI6JxMJro7 + 9X0sSIO0AsBxDMDqmgYrmctY/qXEGEcGeIv0ohjbVOy+jL6MpcIV6+NPictsgWyT/tyPr6Alcfih + RRcZ/ajuPnJxhz4qkxRYAKhAE5Aovhm8yGTkJTSEEk0i4AApSajBtE3bGBkCJlOWHAli1ypFVwBy + ktVUlygqTFxHC46bPclEXVa92nITKzSIpdXdEknt5YGE+8Klz2EGS380V418YYpBtSXoyZVHUgb9 + 1cfmU+EmW5I4TNHGE4v6W67jbjrVUzQReUNcKDAwc6sM0otmAfVpEWy96N9lcLS0uGdvXRwHwDxR + VgdChTbz9TghxgmjhKF67eJceWSCiukGkHXBY+qOLrUi4HF+pTHLfNQDdhPnHWPfwRAzN9ZSXX0i + JsXoKfE0NWCVKZgZNhfI0OZOrhi5CCo58BmqOFA4ZjwaPEAaTlg2Df4Zbla9ZA5fpX7Oozh0PmWq + j0ggUpgy0fmJNklmZx+oXSI6JQn6wpDfIILrESghS8Bz5txBY0Tqk5N1gkCgMVh5HgWPwKuHrMNC + vUUTNRI8Pa6iMQ02LAqckFsYMUyBIPGbQGYF07ItpcywhBJDhZNCdPlMC3Av654zLFvP77FaJhNN + UxGscoghq60gnXSMtUYjtjYQ35QSJkkqECy9NGNmiY9taobkEeRrU9bVpcN1UEJZzpBKw966AqZ6 + PhF0fZk2sN6PFOqmORIS+ZxbKDN7zjl48Ao8F9K0Eo0VW1XXQtec/fKpWlIxtEvy6MGDtuERk3ra + dJOVC/pLdFWg95ETONrW8LYPmSaA28POceZEMxOUqlyBCmTrEWdPUPoq5+M4ZBR0HGtH0ubjowSp + p3sKfMVcp7Be+AP18ZKqUMzTgPPVRo2Jif5APd8toDZBCpc4BfsTSbp2Lgr1+VudYPkrQ8kIZ2iC + 1bdBMJ5QKB9XrfP0Qk9ksNKNUUqu0SV8trZmloMYAImCKFB/2q6LOTzG2SBfRpWlEF6llaCECkeD + 4p6NEqm+8yKyo/2NL6cIaqBOcCk6mCmwrRIx724VR74e+l64e9FUSc++fK5YJTbO54YMAwwlb2DS + ZGPFGj4gPugX7RuZZO5OjNc5BYjTt493dJsgAmLNS+sqlNcE1HHXYGZ3nwManEUF5HSAZNrPIkAH + NEozHaURNzZjiEy92t5qZp4TCQjOwsoumnSo7LCSrgxi0iDeJ02OeMoKTgnA4mdflrxrN9ex18Ay + IZLPkvAPjMoiM4iBxAUDg26M4vy5P8xTIkF3IVkNeYKyS41IrUV0S6YFgmejRV/uGSUBwsOAvp+8 + u5Q/R/TB7W6Ri8SyAOHkw/GLenmURZEONOR1VjhhVY7doed0DRIcqq39hNCA5Zf5F+HfeEQ3dryj + mcY6KSm6TUqUyqMaFlSIed5YidwALQ4CIGjri+GrQeFnuv9Bkos+iwwVxDUzRKT6TJp5dNCuBW0L + hQftrw7LLZhXC1m5hDHXKr5Sr+RACgQDB1JKtFOvQTsJc/9l6VAxPRAWtiJaOc+BMoFEQEUiyLNj + 0Ki+Zm18xuw/NAVGQRBSJ5B46LHZ3WpW82HezkTupAl1GlULjJH0CepVapsEjHFbDhW9KJo4Re1i + bTCClbmO3cqFbCZSiZpZGXDhAG6aaLjFojoiyTG6nkKwJC62MyiEC3SOplY9sv+mEqSJhvdIQmqg + SlBaQe4NB89JP9a0A8LZN0wdfRCkVAVsiMUmKL3rmQXfwJiGMbIaRZpqcVk6OYyGY4oEpR1NyODe + Sllt1k2jqZ7uJFlCx5S3zEHyOTEXyFgJtDphnQI6qSIRuSOEwfAUg9UxQrambJJZSymISFl7KNtb + WaDU43xO2fMWV+Af7UbkUKg66qomH7bSLIhHyDTSppW4k0TP1XmyyCtryGDcEErNOCPgE603GeVj + fYrJnDKQKpplSxZuy9a+nAIQM+SnbxDV4Ps1VysYId5vndOXVujlvTFyjDwkcWaCeChXMXfFuODP + lq6SmTDYvw605rkYfAPivHoxm1SoeWOxxYTdu3WiO9VzsVNQGdgHHqSOGAKmI86W+ZodGr/ojNQL + yqjjGM206rP0RDMrwTdgzcsoXIDfkHlQVxuO8UZIKQBWdV0Rq81YgmnAloXP1YEeL4dvFT7v02Um + xchALlNWlQddW6wy78KYoJyylSA20ClK6QD9XZ59AlJEazDJdRIB0waTBzSEghyeZYO5iD4bCplc + SvRJT1n0rSwzdC3Nd1Aeg74yevzL/UMVCf5J93PaYwpwR6FGuWnAThi4g8NE0bKcAZRFm5xM0XMe + KUkkvqat9xaty5jSj1ivG+ekCG9GZQ5MU5A5cebaoNsQpVsj3HrRKD4VhHU0fAE78jjoaWV7Saiq + 0btkZ5MdHnVTxONsZ1g3YFliscW2MqFkfL+IZIVMzRaND7zvBrSPFqc0bpyRQvD/rFcY1rabP2ha + gfoAryipC2POh9WxBcnGLFBWsYWrZU2wX6phGmXqbywisonLzkpedQgD/qmwPFnEyfNqRneg1M/P + 2ZYJHZvJtJYF2kIHAhMQWFHNKzOSDO21X84aaXVZmJW15i9vYynRTuYr7NRAMDRZQ9Ju9QVNEKw9 + ZQMvz09bI4TqAFVqIzIj6mlxGP0GVcBLhLgC/bjtzVGnyjwC0gCRroliXokkOYGM0nyhl0WPvAG6 + odR7U/BhViEJlPU0KLlet8Hg8tCL9KBJqOOQyF0yJsiICqy3zm+JwCraqqTNaKvhZ0KgMTjxwQy9 + ZKUxnfNMrtCnlEubMbHIy3XSLD8TwtNYYRDayItM57w5czIJlxx8q1dGtkYFsuPFtl7vkGkJuH4Q + NXmbrzbEFpZv6XL159wty0qM7p13wyQLXSWJPhQS4iuQVNu7+VEMjm1ZWxZzLwHR6iUzstTTKfB3 + 4CeQwxeiBRcU636Q7h4UBdpmQhxY0MnwvF6Yu87iOEJP56zFWjdhHdwwfkbbWV0gIjWN0KVaJhO1 + n0j2vnF1SKAdRZo1jyRWE91Jup/M6QSmGtBmV8McJgOxYFpxPr2EtuX8GTvfl4YRGvqW1ONOQ/NZ + UbhC1oYSDiwAKKxsGNaDIth81ryo3b2Vx7YHTJgajBfn67dt99KE48zULAxbXNXLjKbHDLZgTkrm + yDkHIEWYAXYcdX1BVYXBFwnYdlro60T2s0M3C/cQo6eC7UyZg1A8GkNvhdyJERKsIQMhzkX8K/oI + wYgyRntLRqEIyl9YkZ9IyPGh1sLBWiT1XkBNL5kBFuouUktYmw7qSWWkItBZ0R8nKJ5r7maFHnOc + H8qtS/NZkF4h4MtnorPCSzU3xX0Sa96G9QCGw+A1pZmgPQw1LN3RMcpcsH6M2ds2SbGWInUaii7w + w2qsAKnplCYoBkmYU1RH4Fm8LlyemGx9UylARSddGWoQ+sPXqzAr6uPy+WybkHcr8ALE8rCvSVzy + 6Tz4IEiVIVyZvPNKE7KkkmY0gLrAs/XqojJrptPqQ7cGjVCEDdkgeaUoWjMAzLSHNTXyg0DGLDTx + Je9VKWq9MBl0mWXMLeuiZsR3GSkKTksEloJei5Y5cQoLTbelrDBTMMyIn+pCicoiwVLvymRscIyQ + gGzw99UyqdGQTwBGT+fmhdDyuLPbmYn1FvwWqzyAhyq4ZrFlU3aeg/uj3S2XjVw5b6R3c+VscAHg + 5nyFOCTCGfyCYYgbRR9YiQPZaRtjigNQoNOanOrNq1efhz2UJFccYNIqKUiIEV8Ia4tyQYSWQWb9 + S9XhJ2UdntTXKSkBC8B9IhSWa2PSUuXMSmMyc+BzKrUATvewbi6tpr1zNkCZrDzqqI6Waw3vAy5L + 5Fj/IQPT6k0RWa8Yh4J6sfp1AsIASS5itOY6ecczTFG9lCMT9YttkzNbFDs4XQpD2PF59S7WslBo + XtYRkB0zZ10CCHNCmiWE6BcAeCmwwTJvLQm5eTFrSxggBIhmWXVqvAszzzA5jaj+ukxwclQvLUrb + BkY1drpfRCK8U1zJz4Bxv4PDJlpY/XQq6oNlbNP2QjbRM5CCLJYaUwRFDLhTYVCaRfnha3UQZo0l + sspllO+KeosoBrInsaOEmuyEh+blx45mGCChfVytXiw0WG/KFsnCrg+EMZkodgiLUQfZeTWDBntU + QF9s0M7hL9JDmyKHLQv23E1BFcH0Mh4Nuq9xaSw8o3FLC65bNeblSxCqvAtnC1sLI0wZitC77WRc + Z1Pfjej6MYDtGwRyZlLihRJekv8ieBxWE0MLBT8nqxc8+BHkL7owNV1kR6bClFEKsvudvTyjRBFo + oYSIpAJxWxtKbWLUac/5wEhQ7Sm4iTweiQbqbW1Z87S8pFl25GWp+bKynKvE4+SmgBeqsJNSiLwQ + UhE+Sm3ipQF2LL6ANtA7iZaJmNFD2urlix+k0joErzDTMOUHa3bBy3fUmxJwxtBXDb0yYxRtuxId + F+7wNVJfhgF1UCZWTg0VDqhBlKhdapNGyUTH9vk5N6gsS8FNRpWxCrjo2oNLwssGfq8+otVfs8B0 + sSXaGRxg9BxTXOE09ORCqzgL5xTGEU6pocZXaG/mZjChoavJQ6QVouG8xBfqtLUIijspj3gJiE6r + i7kQEwxTXv2Fv/D1jhsEh1TvKxEZ27aXpRR9y4Mt4qUBQHwOe49K4SybAGbYVKTyJcBsGejYhbBy + I0rqO428Urfzfgx1KxneAfAyohU7zl/oiZX3Ub1lKpqYyGytgr/ga5YnM11UOmybVeXVR4nCnvYN + wjd6D3WLYU3fDDGrF8qJTOB+CpOPxbm24tEgEcZYm6qZ4bRNyVg4l69+q7sS0VJR4koQF3EOMC6v + pQK0I89CXQWvQSqnP+d6OXJGAxBNuufDKQ91XlssyjANJFpoDkFcQ41lEhgMBUzX8D7aWoMCdVpS + 4HT9Il/Ad5I355TonNwJ1NVRzFAnmqc/hXZV+E3e7KmtiTX+697d/mxYQpsczxGsRCMCpWqxfjW0 + yajDiQqntsV8CcJ4C6064P+zQJMwO4kk8EqBCswDFTMRcaCUt8WWHpoTXq28ELHLhKkHacu4J3Qk + mhlyRp0ix+tY9M5XtvHIudAdwJJvQR6qYgBvF0JB4HPx8krkLJ6u8jI5nrIzOmWpMZAB5/bAYYou + /IY6OAO5SvNyVbQeaxhCQjv1rgjHmtTPFgw9DkGDFYK9sF+22SUFGSVcoDuRHdu2of87xJmWYCkN + qsZSLOS/ZNDO92f7bhXWD3eAQMgnineyfr5HrPC2mM9zwdD+8vdBKW/7YTp+wDaEJXDyxTRKWy1e + uXSgvepFvP+t9MpWfXtp+eyxb7K1QZs/6j0H+D/ONLmsN+2rNzVpr/KrytxVnLvm74xj5BksosrK + R1lyoHcL8WXKt2yBOmPBUbZnf6/UD5r5W5y2p5kOwEfgED57qxR4GHvbM5brrNwFvA2iXldfI8Zf + b/jPXxUm36yGwtU0f9me9lXpq8L4M3/3rld+gfJOs39xMqLVCjDLtfPJiK8Oo+gSX5VOpvBOpz/6 + 1Pz9XixIsbc36BN4EG//evz8tXQu6mvc+CsPWTEmSv/4C1p5Sfjz5bgzvORTefctq/6IJ+ANijXx + L7wlTryEUX48OcErvdwUEvzTn/Fuz+1Fny+Z4xOtsVfh4nXWNfE+6/8HSb9az9xjAAA= + headers: + Accept-Ranges: + - bytes + - bytes + Age: + - '0' + - '0' + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - report-uri https://api.datadoghq.com/csp-report + Content-Type: + - text/html; charset=utf-8 + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:24:39 GMT + Pragma: + - no-cache + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + Via: + - 1.1 varnish + X-Cache: + - MISS + X-Cache-Hits: + - '0' + X-Content-Type-Options: + - nosniff + X-DD-VERSION: + - '35.2134903' + X-Served-By: + - cache-mad22024-MAD + X-Timer: + - S1580747080.719678,VS0,VE141 + status: + code: 404 + message: Not Found +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '0' + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/screen/966448 + response: + body: + string: !!binary | + H4sIAAAAAAAAA3WQ0W6DMAxF/8WvCygNGSv5jr1NFQqNAUtpQpMUVCH+fWGje9ubfX107esVOq+D + aRMli6DA4XLUDAJq03pnn6B6bSMyMBivgaZE3h3srgCDa2YTmiwKLnjBRcGrz1OthFSVLJvqdK7k + G+eK8wzvO2hnpYhFI/pCNjrLN2+op39MPspG1FxWfyYj0jAmUO5hLYOFTBpfTcLbZPM57awD6c5i + BPV1+YEGTHuz5iQ9OfoNsuak9wfGY3TPl+l5UPEZs1NpvTbl6X1dsGPoZjUFbzbYLhuD19PyijBk + icEeq6lrKc/bN7O/h4FaAQAA + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:24:42 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:24:39 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - PKDIrz8Hcluof9oNzY3q1BouPTowe6nlZ4slm6KLsMEc/9DaK1hteKVCh6mza/IQ + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/screen/966448 + response: + body: + string: !!binary | + H4sIAAAAAAAAA6tWSi0qyi8qVrJSiFYKzUtMyklVKMlXSMvMS1EITi5KTc1Lyk8sSlFIyy9SyExR + sDQzMzGxUIqtBQAa5+wKOAAAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:24:42 GMT + Pragma: + - no-cache + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 404 + message: Not Found +version: 1 diff --git a/tests/integration/dogshell/cassettes/TestDogshell.test_service_check.yaml b/tests/integration/dogshell/cassettes/TestDogshell.test_service_check.yaml new file mode 100644 index 000000000..9264c3215 --- /dev/null +++ b/tests/integration/dogshell/cassettes/TestDogshell.test_service_check.yaml @@ -0,0 +1,47 @@ +interactions: +- request: + body: '{"check": "check_pg", "host_name": "host0", "message": null, "status": + 1, "tags": null, "timestamp": null}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '106' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/check_run + response: + body: + string: '{"status": "ok"}' + headers: + Connection: + - keep-alive + Content-Length: + - '16' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - text/json + DD-POOL: + - propjoe + Date: + - Mon, 03 Feb 2020 16:25:16 GMT + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - SAMEORIGIN + status: + code: 202 + message: Accepted +version: 1 diff --git a/tests/integration/dogshell/cassettes/TestDogshell.test_timeboards.frozen b/tests/integration/dogshell/cassettes/TestDogshell.test_timeboards.frozen new file mode 100644 index 000000000..df4eaf54e --- /dev/null +++ b/tests/integration/dogshell/cassettes/TestDogshell.test_timeboards.frozen @@ -0,0 +1 @@ +2020-02-03T17:24:27.456297+01:00 \ No newline at end of file diff --git a/tests/integration/dogshell/cassettes/TestDogshell.test_timeboards.seed b/tests/integration/dogshell/cassettes/TestDogshell.test_timeboards.seed new file mode 100644 index 000000000..da9ab8020 --- /dev/null +++ b/tests/integration/dogshell/cassettes/TestDogshell.test_timeboards.seed @@ -0,0 +1 @@ +55513 \ No newline at end of file diff --git a/tests/integration/dogshell/cassettes/TestDogshell.test_timeboards.yaml b/tests/integration/dogshell/cassettes/TestDogshell.test_timeboards.yaml new file mode 100644 index 000000000..d43e8b030 --- /dev/null +++ b/tests/integration/dogshell/cassettes/TestDogshell.test_timeboards.yaml @@ -0,0 +1,508 @@ +interactions: +- request: + body: '{"description": "Description for /tmp/1580747067.457344-0.15756870482229213", + "graphs": [{"definition": {"requests": [{"q": "testing.metric.1{host:blah.host.1}"}], + "viz": "timeseries"}, "title": "test metric graph"}], "title": "/tmp/1580747067.457344-0.15756870482229213"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '272' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: POST + uri: https://api.datadoghq.com/api/v1/dash + response: + body: + string: !!binary | + H4sIAAAAAAAAA5VSy46cMBD8FeRDLuFhzGtAWmUPe01OuUURamwzWAuYsQ1RdjT/nvbAahQpUpKb + 7a7uLlfVlQiwA2muxEgQrZ7Hn6TpYbQyJGcDy2BJ8+1KhOzVrJzSs4du6o00xKlJWmmUtCTE7ssq + rdvRF1/Fm5rP8SSdUTxOr4O2rulGGGJ/itMbuX2/hTjFjfLABzs4uC/GMlbltIzgZLuBUdCNuKuZ + 13EMkZHlRi07JfLyuAW9NkHipiVJixOt8oqWVZwXVZbnEY3ToirKU0XzE2OsZmmG3N8p/FcTR72c + FMicUUYjyiKafU3LhuUNq+I6ress/UhpQylumOWPVnnsJX+N3kQfvUKPz/4pRTo1oyE5BrYdGoCC + K+u/i4DDjAFmcRdqVnzQI9h4QsH58CzAgdDn4RJzPfldMHnYlwMWfL7DHt/c5VO2BTEptNOZFb02 + 2g/fa8C5tLbdnwjCsHlDo3vl+ex4OYEa/4GN4j4yZHBusU2SWMlXI2M0eEPaxlNO9iO6xfOuAwpl + WXVFVle0ZkUte1aIHuqCfrJP+emDeDIYKE0wOZMWB6W/W4Dw1Xi+iY877rqL7g1/hOQ9I3+KiJFW + r4Z7YRNYVLKlvw0it19SOjBtSAMAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:24:27 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:24:27 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - EE74ncTR989SomsonUvABJWdGDkXBs7Emqj3HVDpp6NYddpvHp95kXsnHux1Es9E + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/dash/1568920 + response: + body: + string: !!binary | + H4sIAAAAAAAAA5VSy46cMBD8FeRDLuFhzGtAWmUPe01OuUURamwzWAuYsQ1RdjT/nvbAahQpUpKb + 7a7uLlfVlQiwA2muxEgQrZ7Hn6TpYbQyJGcDy2BJ8+1KhOzVrJzSs4du6o00xKlJWmmUtCTE7ssq + rdvRF1/Fm5rP8SSdUTxOr4O2rulGGGJ/itMbuX2/hTjFjfLABzs4uC/GMlbltIzgZLuBUdCNuKuZ + 13EMkZHlRi07JfLyuAW9NkHipiVJixOt8oqWVZwXVZbnEY3ToirKU0XzE2OsZmmG3N8p/FcTR72c + FMicUUYjyiKafU3LhuUNq+I6ress/UhpQylumOWPVnnsJX+N3kQfvUKPz/4pRTo1oyE5BrYdGoCC + K+u/i4DDjAFmcRdqVnzQI9h4QsH58CzAgdDn4RJzPfldMHnYlwMWfL7DHt/c5VO2BTEptNOZFb02 + 2g/fa8C5tLbdnwjCsHlDo3vl+ex4OYEa/4GN4j4yZHBusU2SWMlXI2M0eEPaxlNO9iO6xfOuAwpl + WXVFVle0ZkUte1aIHuqCfrJP+emDeDIYKE0wOZMWB6W/W4Dw1Xi+iY877rqL7g1/hOQ9I3+KiJFW + r4Z7YRNYVLKlvw0it19SOjBtSAMAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:24:28 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:24:28 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - ADT0ms9dQnbDHbbduv4c09ChngZrYY7A/Pgms/qacMOruS4mPwZ1GJWq74I7G11W + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"description": "Description for /tmp/1580747067.457344-0.15756870482229213", + "graphs": [{"definition": {"requests": [{"q": "testing.metric.1{host:blah.host.1}"}], + "viz": "timeseries"}, "title": "test metric graph"}], "template_variables": + [], "title": "dash title eff0fd731299544d9519fc0cf2ad8767"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '299' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: PUT + uri: https://api.datadoghq.com/api/v1/dash/1568920 + response: + body: + string: !!binary | + H4sIAAAAAAAAA41Su47cMAz8FUNFmqxtSX7IMnBIirRJlS4IDFmPtXB+rSRvkFvsv4eyNzkckOI6 + kRySoxnekBJ+QO0NOS1Ut8zjb9QaMXp9Qmcn1sGj9scNKW3sbINd5gi92hfUomAn7bWz2qMTdF82 + 7cOBvsQqRHY+Z5MOzsqM3IbFh7YfxZDFV0bu6P7zfoIpYdQPfHKAk30xlKGqp3UUQXdX4azoR9jV + zts4noCRl86uByX05TVKzOKSPExrTqoGs5LhmmVlxYqyTHFGKlbVDcNlQynllBTA/S+FqESyB4k2 + BhvFCkI5r8pS8YpwI7E0VKiG1Qy6JAgWtALqFFOcYpri4jupW1q2lGWccF6Qjxi3GAN41r86G7GX + 8jl9USZ9FgbSMUWAD6f438CuBwdAcevjfwHwcGMQs9qVmq0cllH4bALF5fBZiSDUch4umVymuEtM + EfbtAUu+7rDXfx76Wd8JNVnwM7gNzHZLHH7UhJTa++5IIYBB8xWcNjbyOfB6EnZ8Bxsr482gIYTV + t3nutdyczsDhK9B2kXJ+PMEuWfa9wKKuWV8VnGFOK64NrZQRvMKf/FPZfFBPDi5qQXA606IelP5j + QZMVtC5Y8bAA4JuLfPPoMuzaRd+DdLc8fYflTvtlczKqm4vV5lfyZhq6/wEj29lOTgMAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:24:29 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:24:28 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - gH++OYwf8a2QZXnzDsHHnXqPhHbI48oqNvFjE/0p0ObpMBY4290QCI5SB0tU0MAF + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/dash/1568920 + response: + body: + string: !!binary | + H4sIAAAAAAAAA41Su47cMAz8FUNFmqxtSX7IMnBIirRJlS4IDFmPtXB+rSRvkFvsv4eyNzkckOI6 + kRySoxnekBJ+QO0NOS1Ut8zjb9QaMXp9Qmcn1sGj9scNKW3sbINd5gi92hfUomAn7bWz2qMTdF82 + 7cOBvsQqRHY+Z5MOzsqM3IbFh7YfxZDFV0bu6P7zfoIpYdQPfHKAk30xlKGqp3UUQXdX4azoR9jV + zts4noCRl86uByX05TVKzOKSPExrTqoGs5LhmmVlxYqyTHFGKlbVDcNlQynllBTA/S+FqESyB4k2 + BhvFCkI5r8pS8YpwI7E0VKiG1Qy6JAgWtALqFFOcYpri4jupW1q2lGWccF6Qjxi3GAN41r86G7GX + 8jl9USZ9FgbSMUWAD6f438CuBwdAcevjfwHwcGMQs9qVmq0cllH4bALF5fBZiSDUch4umVymuEtM + EfbtAUu+7rDXfx76Wd8JNVnwM7gNzHZLHH7UhJTa++5IIYBB8xWcNjbyOfB6EnZ8Bxsr482gIYTV + t3nutdyczsDhK9B2kXJ+PMEuWfa9wKKuWV8VnGFOK64NrZQRvMKf/FPZfFBPDi5qQXA606IelP5j + QZMVtC5Y8bAA4JuLfPPoMuzaRd+DdLc8fYflTvtlczKqm4vV5lfyZhq6/wEj29lOTgMAAA== + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:24:29 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:24:29 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - +6muH0vWWhHE6JfE/xHkdpoFSNgX/+wCvqEMuEDvglDKir3htwvCDYdHi0bPaPF0 + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: '{"description": "new_desc", "graphs": [{"definition": {"requests": [{"q": + "avg:system.load.15{web,env:prod}"}], "viz": "timeseries"}, "title": "blerg"}], + "template_variables": [], "title": "new_title"}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '201' + Content-Type: + - application/json + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: PUT + uri: https://api.datadoghq.com/api/v1/dash/1568920 + response: + body: + string: !!binary | + H4sIAAAAAAAAA41SQW7bMBD8SsFDL5UlSrbskECQfKA99VYEwopcWUQoUSYpBYnhv2dpOS0C5NAb + MZwdzs7wzDSEnskz8wi6caN9ZbIDGzBjRw9TH5j8c2YaOzOaaNyYqIt5Y5JFM2BAbzCwjKZPM4a4 + sk90C8tRhtcQccitA52X9fkF2wzHRU7e6Qu7PF0y0ogWid1a9EeCCMFhshCxWcAbIJwkx9najDwE + 5c20mmAjvjQJobc/RBK0njOmaJ2ImqQrXvENrzZ8+7vcy2onq0MuSiG25Q/OJeckkAZN4p52z5s3 + 3W2eoSM4QWW9vxMV/yvYtJQP5WFC8kaEW1Y9jPq6yWhU7yyEfKA8VP+oIYJ2x/6UKzekt2BItF83 + 2refV9q/LdZdTWhAD4bSjn6mKrxL4usdKIUhNCvEiEbDC/XQmeRn5eMAxv6HG6NSo6yPcQqyKAKq + 2WNOxS9k2yfLxXosylrt2hY47PeHtt6KAxdVLbCrat2BqPlDuN/dfdf3HqN3jKodnL5Z+qICkZPA + 9iBuFRB99slvkX4jvXUNvaBePvr0GNzsVYqugMkUS/mJyi7vCrbkW8kCAAA= + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:24:31 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:24:29 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - ty7T8eIeXOfZhM7KDN5nGo8JS7ZSIWAqBNFeZshTg3LLDJJa7mPU5wqGt0nOPCpy + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/dash/1568920 + response: + body: + string: !!binary | + H4sIAAAAAAAAA41SQW7bMBD8SsFDL5UlSrbskECQfKA99VYEwopcWUQoUSYpBYnhv2dpOS0C5NAb + MZwdzs7wzDSEnskz8wi6caN9ZbIDGzBjRw9TH5j8c2YaOzOaaNyYqIt5Y5JFM2BAbzCwjKZPM4a4 + sk90C8tRhtcQccitA52X9fkF2wzHRU7e6Qu7PF0y0ogWid1a9EeCCMFhshCxWcAbIJwkx9najDwE + 5c20mmAjvjQJobc/RBK0njOmaJ2ImqQrXvENrzZ8+7vcy2onq0MuSiG25Q/OJeckkAZN4p52z5s3 + 3W2eoSM4QWW9vxMV/yvYtJQP5WFC8kaEW1Y9jPq6yWhU7yyEfKA8VP+oIYJ2x/6UKzekt2BItF83 + 2refV9q/LdZdTWhAD4bSjn6mKrxL4usdKIUhNCvEiEbDC/XQmeRn5eMAxv6HG6NSo6yPcQqyKAKq + 2WNOxS9k2yfLxXosylrt2hY47PeHtt6KAxdVLbCrat2BqPlDuN/dfdf3HqN3jKodnL5Z+qICkZPA + 9iBuFRB99slvkX4jvXUNvaBePvr0GNzsVYqugMkUS/mJyi7vCrbkW8kCAAA= + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:24:32 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:24:31 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - YKF8+1vTI0wiWlB3VWhiMVnZ1RLtV3h2yAW6/TGe9qIMWdYXxsNpy3J4QxfrJoDD + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '0' + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: DELETE + uri: https://api.datadoghq.com/api/v1/dash/1568920 + response: + body: + string: '' + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Length: + - '4' + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:24:34 GMT + Pragma: + - no-cache + Set-Cookie: + - DD-PSHARD=233; Max-Age=604800; Path=/; expires=Mon, 10-Feb-2020 16:24:32 GMT; + secure; HttpOnly + Strict-Transport-Security: + - max-age=15724800; + X-Content-Type-Options: + - nosniff + X-DD-Debug: + - tp1qdVxoUmtlsVp6hgBWraWfL5vEbA116VZkaWKWIZtgPr5Ima8zysCBv+o2WoZ/ + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 204 + message: No Content +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Cookie: + - DD-PSHARD=233 + User-Agent: + - python-requests/2.22.0 + method: GET + uri: https://api.datadoghq.com/api/v1/dashboard/1568920 + response: + body: + string: !!binary | + H4sIAAAAAAAAA6tWSi0qyi8qVrJSiFZySSzOSMpPLEpRKM8syVDwdFEwNDWzsDQyUMjLL1FIyy/N + S1GKrQUAEnTLMzMAAAA= + headers: + Cache-Control: + - no-cache + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Security-Policy: + - frame-ancestors 'self'; report-uri https://api.datadoghq.com/csp-report + Content-Type: + - application/json + DD-POOL: + - dogweb + Date: + - Mon, 03 Feb 2020 16:24:34 GMT + Pragma: + - no-cache + Strict-Transport-Security: + - max-age=15724800; + Transfer-Encoding: + - chunked + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-DD-VERSION: + - '35.2134903' + X-Frame-Options: + - SAMEORIGIN + status: + code: 404 + message: Not Found +version: 1 diff --git a/tests/integration/dogshell/test_dogshell.py b/tests/integration/dogshell/test_dogshell.py index 9fb96fbbb..69c22389b 100644 --- a/tests/integration/dogshell/test_dogshell.py +++ b/tests/integration/dogshell/test_dogshell.py @@ -1,107 +1,183 @@ -# stdlib +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc from hashlib import md5 import json import os import random import re -import socket import subprocess import time import tempfile -import unittest -import requests +import sys -# 3rd -from nose.plugins.attrib import attr +import pytest +import requests -# datadog -from datadog.dogshell.common import find_localhost from datadog.util.compat import is_p3k, ConfigParser +from ..api.constants import API_KEY, APP_KEY, MONITOR_REFERENCED_IN_SLO_MESSAGE + +WAIT_TIME = 11 def get_temp_file(): """Return a (fn, fp) pair""" if is_p3k(): fn = "/tmp/{0}-{1}".format(time.time(), random.random()) - return (fn, open(fn, 'w+')) + return (fn, open(fn, "w+")) else: tf = tempfile.NamedTemporaryFile() return (tf.name, tf) -class TestDogshell(unittest.TestCase): - host_name = 'test.host.dogshell5' - wait_time = 10 +@pytest.fixture # (scope="module") +def dogshell_config(): + config = ConfigParser() + config.add_section("Connection") + config.set("Connection", "apikey", API_KEY) + config.set("Connection", "appkey", APP_KEY) + config.set("Connection", "api_host", os.environ.get("DATADOG_HOST", "https://api.datadoghq.com")) + return config - # Test init - def setUp(self): - # Generate a config file for the dog shell - self.config_fn, self.config_file = get_temp_file() - config = ConfigParser() - config.add_section('Connection') - config.set('Connection', 'apikey', os.environ['DATADOG_API_KEY']) - config.set('Connection', 'appkey', os.environ['DATADOG_APP_KEY']) - config.set('Connection', 'api_host', os.environ['DATADOG_HOST']) - config.write(self.config_file) - self.config_file.flush() - # Tests - def test_config_args(self): - out, err, return_code = self.dogshell(["--help"], use_cl_args=True) +@pytest.fixture # (scope="module") +def config_file(tmp_path, dogshell_config): + """Generate a config file for the dog shell.""" + filename = tmp_path / ".test.dog.ini" + with filename.open("w") as fp: + dogshell_config.write(fp) + return str(filename) + + +@pytest.fixture +def dogshell(capsys, config_file, dog): + """Helper function to call the dog shell command.""" + import click + from click.testing import CliRunner + + runner = CliRunner(mix_stderr=False) + + @click.command(context_settings={"ignore_unknown_options": True}) + @click.argument('args', nargs=-1, type=click.UNPROCESSED) + def main(args): + from datadog.dogshell import main as run + orig = sys.argv + try: + sys.argv = list(args) + run() + finally: + sys.argv = orig + + def run(args, stdin=None, check_return_code=True, use_cl_args=False): + cmd = ["dogshell", "--config", config_file] + args + if use_cl_args: + cmd = [ + "dogshell", + "--api-key={0}".format(dog._api_key), + "--application-key={0}".format(dog._application_key), + ] + args + + with capsys.disabled(): + result = runner.invoke(main, cmd, input=stdin, prog_name=cmd[0]) + return_code = result.exit_code + out = result.stdout_bytes + err = result.stderr_bytes + if check_return_code: + assert return_code == 0, err + assert err == b"" + return out.decode("utf-8"), err.decode("utf-8"), return_code + return run + + +@pytest.fixture +def dogshell_with_retry(vcr_cassette, dogshell): + def run(cmd, retry_limit=10, retry_condition=lambda o, r: r != 0): + number_of_interactions = len(vcr_cassette.data) if vcr_cassette.record_mode == "all" else -1 + + out, err, return_code = dogshell(cmd, check_return_code=False) + retry_count = 0 + while retry_count < retry_limit and retry_condition(out, return_code): + time.sleep(WAIT_TIME) + + if vcr_cassette.record_mode == "all": + # remove failed interactions + vcr_cassette.data = vcr_cassette.data[:number_of_interactions] + + out, err, return_code = dogshell(cmd, check_return_code=False) + retry_count += 1 + if retry_condition(out, return_code): + raise Exception( + "Retry limit reached for command {}:\nSTDOUT: {}\nSTDERR: {}\nSTATUS_CODE: {}".format( + cmd, out, err, return_code + ) + ) + return out, err, return_code + return run + + +@pytest.fixture +def get_unique(freezer, vcr_cassette_name, vcr_cassette, vcr): + if vcr_cassette.record_mode == "all": + seed = int(random.random() * 100000) + + with open( + os.path.join( + vcr.cassette_library_dir, vcr_cassette_name + ".seed" + ), + "w", + ) as f: + f.write(str(seed)) + else: + with open( + os.path.join( + vcr.cassette_library_dir, vcr_cassette_name + ".seed" + ), + "r", + ) as f: + seed = int(f.readline().strip()) - def test_find_localhost(self): - # Once run - assert socket.getfqdn() == find_localhost() - # Once memoized - assert socket.getfqdn() == find_localhost() + random.seed(seed) - def test_comment(self): + def generate(): + with freezer: + return md5(str(time.time() + random.random()).encode("utf-8")).hexdigest() + return generate + + +class TestDogshell: + host_name = "test.host.dogshell5" + + # Tests + def test_config_args(self, dogshell): + out, err, return_code = dogshell(["--help"], use_cl_args=True) + assert 0 == return_code + + def test_comment(self, dogshell, dogshell_with_retry, user_handle): # Post a new comment - cmd = ["comment", "post"] + cmd = ["comment", "post", user_handle] comment_msg = "yo dudes" post_data = {} - out, err, return_code = self.dogshell(cmd, stdin=comment_msg) + out, _, _ = dogshell(cmd, stdin=comment_msg) post_data = self.parse_response(out) - assert 'id' in post_data, post_data - assert 'url' in post_data, post_data - assert 'message' in post_data, post_data - assert comment_msg in post_data['message'] + assert "id" in post_data + assert "url" in post_data + assert comment_msg in post_data["message"] # Read that comment from its id - time.sleep(self.wait_time) - cmd = ["comment", "show", post_data['id']] - out, err, return_code = self.dogshell(cmd) + cmd = ["comment", "show", post_data["id"]] + out, _, _ = dogshell_with_retry(cmd) show_data = self.parse_response(out) - assert comment_msg in show_data['message'] + assert comment_msg in show_data["message"] # Update the comment - cmd = ["comment", "update", post_data['id']] + cmd = ["comment", "update", post_data["id"], user_handle] new_comment = "nothing much" - out, err, return_code = self.dogshell(cmd, stdin=new_comment) + out, _, _ = dogshell(cmd, stdin=new_comment) update_data = self.parse_response(out) - self.assertEquals(update_data['id'], post_data['id']) - assert new_comment in update_data['message'] - - # Read the updated comment - time.sleep(self.wait_time) - cmd = ["comment", "show", post_data['id']] - out, err, return_code = self.dogshell(cmd) - show_data2 = self.parse_response(out) - assert new_comment in show_data2['message'] - - # Delete the comment - cmd = ["comment", "delete", post_data['id']] - out, err, return_code = self.dogshell(cmd) - # self.assertEquals(out, '') - - # Shouldn't get anything - time.sleep(self.wait_time) - cmd = ["comment", "show", post_data['id']] - out, err, return_code = self.dogshell(cmd, check_return_code=False) - self.assertEquals(out, '') - self.assertEquals(return_code, 1) - - def test_event(self): + assert update_data["id"] == post_data["id"] + assert new_comment in update_data["message"] + + def test_event(self, dog, dogshell, dogshell_with_retry): # Post an event title = "Testing events from dogshell" body = "%%%\n*Cool!*\n%%%\n" @@ -110,488 +186,504 @@ def test_event(self): event_id = None def match_permalink(out): - match = re.match(r'.*/event/event\?id=([0-9]*)', out, re.DOTALL) or \ - re.match(r'.*/event/jump_to\?event_id=([0-9]*)', out, re.DOTALL) + match = re.match(r".*/event/event\?id=([0-9]*)", out, re.DOTALL) or re.match( + r".*/event/jump_to\?event_id=([0-9]*)", out, re.DOTALL + ) if match: return match.group(1) else: return None - out, err, return_code = self.dogshell(cmd, stdin=body) - event_id = match_permalink(out) - assert event_id, out + out, err, return_code = dogshell(cmd, stdin=body) - # Add a bit of latency for the event to appear - time.sleep(self.wait_time) + event_id = match_permalink(out) + assert event_id # Retrieve the event cmd = ["event", "show", event_id] - out, err, return_code = self.dogshell(cmd) + out, _, _ = dogshell_with_retry(cmd) event_id2 = match_permalink(out) - self.assertEquals(event_id, event_id2) + assert event_id == event_id2 + + # Get a real time from the event + event = dog.Event.get(event_id) + start = event["event"]["date_happened"] - 30 * 60 + end = event["event"]["date_happened"] + 1 # Get a stream of events - cmd = ["event", "stream", "30m", "--tags", tags] - out, err, return_code = self.dogshell(cmd) + cmd = ["event", "stream", str(start), str(end), "--tags", tags] + out, err, return_code = dogshell(cmd) event_ids = (match_permalink(l) for l in out.split("\n")) event_ids = set([e for e in event_ids if e]) assert event_id in event_ids - def test_metrics(self): + def test_metrics(self, dogshell, get_unique, dogshell_with_retry): # Submit a unique metric from a unique host - unique = self.get_unique() - metric = "test.dogshell.test_metric_%s" % unique - host = self.host_name - self.dogshell(["metric", "post", "--host", host, metric, "1"]) - - # Query for the metric, commented out because caching prevents us - # from verifying new metrics - # out, err, return_code = self.dogshell(["search", "query", - # "metrics:" + metric]) - # assert metric in out, (metric, out) - - # Query for the host - out, err, return_code = self.dogshell(["search", "query", - "hosts:" + host]) - # assert host in out, (host, out) + unique = get_unique() + metric = "test.dogshell.test_metric_{}".format(unique) + host = "{}{}".format(self.host_name, unique) + dogshell(["metric", "post", "--host", host, metric, "1"]) # Query for the host and metric - out, err, return_code = self.dogshell(["search", "query", unique]) - # assert host in out, (host, out) - # Caching prevents us from verifying new metrics - # assert metric in out, (metric, out) + dogshell_with_retry( + ["search", "query", unique], retry_condition=lambda o, r: host not in o or metric not in o + ) # Give the host some tags + # The host tag association can take some time, so bump the retry limit to reduce flakiness tags0 = ["t0", "t1"] - self.dogshell(["tag", "add", host] + tags0) + out, _, _ = dogshell_with_retry(["tag", "add", host] + tags0, retry_limit=30) + for t in tags0: + assert t in out # Verify that that host got those tags - out, err, return_code = self.dogshell(["tag", "show", host]) - for t in tags0: - assert t in out, (t, out) + dogshell_with_retry(["tag", "show", host], retry_condition=lambda o, r: "t0" not in o or "t1" not in o) # Replace the tags with a different set tags1 = ["t2", "t3"] - self.dogshell(["tag", "replace", host] + tags1) - out, err, return_code = self.dogshell(["tag", "show", host]) + out, _, _ = dogshell(["tag", "replace", host] + tags1) for t in tags1: - assert t in out, (t, out) + assert t in out for t in tags0: - assert t not in out, (t, out) + assert t not in out # Remove all the tags - self.dogshell(["tag", "detach", host]) - out, err, return_code = self.dogshell(["tag", "show", host]) - self.assertEquals(out, "") + out, _, _ = dogshell(["tag", "detach", host]) + assert out == "" - def test_timeboards(self): + def test_timeboards(self, dogshell, get_unique): # Create a timeboard and write it to a file name, temp0 = get_temp_file() graph = { "title": "test metric graph", - "definition": - { - "requests": [{"q": "testing.metric.1{host:blah.host.1}"}], - "viz": "timeseries", - } + "definition": {"requests": [{"q": "testing.metric.1{host:blah.host.1}"}], "viz": "timeseries"}, } - - self.dogshell(["timeboard", "new_file", name, json.dumps(graph)]) + dogshell(["timeboard", "new_file", name, json.dumps(graph)]) dash = json.load(temp0) - - assert 'id' in dash, dash - assert 'title' in dash, dash + assert "id" in dash + assert "title" in dash # Update the file and push it to the server - unique = self.get_unique() - dash['title'] = 'dash title %s' % unique + unique = get_unique() + dash["title"] = "dash title {}".format(unique) name, temp1 = get_temp_file() json.dump(dash, temp1) temp1.flush() - self.dogshell(["timeboard", "push", temp1.name]) + dogshell(["timeboard", "push", temp1.name]) # Query the server to verify the change - out, _, _ = self.dogshell(["timeboard", "show", str(dash['id'])]) + out, _, _ = dogshell(["timeboard", "show", str(dash["id"])]) out = json.loads(out) - assert "dash" in out, out - assert "id" in out["dash"], out - self.assertEquals(out["dash"]["id"], dash["id"]) - assert "title" in out["dash"] - self.assertEquals(out["dash"]["title"], dash["title"]) + out["dash"]["id"] == dash["id"] + out["dash"]["title"] == dash["title"] new_title = "new_title" new_desc = "new_desc" - new_dash = [{ - "title": "blerg", - "definition": { - "requests": [ - {"q": "avg:system.load.15{web,env:prod}"} - ] - } - }] + new_dash = [ + { + "title": "blerg", + "definition": {"viz": "timeseries", "requests": [{"q": "avg:system.load.15{web,env:prod}"}]}, + } + ] # Update a dash directly on the server - self.dogshell(["timeboard", "update", str(dash["id"]), new_title, new_desc], - stdin=json.dumps(new_dash)) - - # Query the server to verify the change - out, _, _ = self.dogshell(["timeboard", "show", str(dash["id"])]) - out = json.loads(out) - assert "dash" in out, out - assert "id" in out["dash"], out - self.assertEquals(out["dash"]["id"], dash["id"]) - assert "title" in out["dash"], out - self.assertEquals(out["dash"]["title"], new_title) - assert "description" in out["dash"], out - self.assertEquals(out["dash"]["description"], new_desc) - assert "graphs" in out["dash"], out - self.assertEquals(out["dash"]["graphs"], new_dash) + out, _, _ = dogshell( + ["timeboard", "update", str(dash["id"]), new_title, new_desc], stdin=json.dumps(new_dash) + ) + out = json.loads(out) + # Template variables are empty, lets remove them because the `pull` command won't show them + out["dash"].pop("template_variables", None) + assert out["dash"]["id"] == dash["id"] + assert out["dash"]["title"] == new_title + assert out["dash"]["description"] == new_desc + assert out["dash"]["graphs"] == new_dash # Pull the updated dash to disk fd, updated_file = tempfile.mkstemp() try: - self.dogshell(["timeboard", "pull", str(dash["id"]), updated_file]) + dogshell(["timeboard", "pull", str(dash["id"]), updated_file]) updated_dash = {} with open(updated_file) as f: updated_dash = json.load(f) - assert "dash" in out - self.assertEquals(out["dash"], updated_dash) + assert out["dash"] == updated_dash finally: os.unlink(updated_file) # Delete the dash - self.dogshell(["timeboard", "delete", str(dash["id"])]) + dogshell(["--timeout", "30", "timeboard", "delete", str(dash["id"])]) # Verify that it's not on the server anymore - out, err, return_code = self.dogshell(["dashboard", "show", str(dash['id'])], - check_return_code=False) - self.assertNotEquals(return_code, 0) + out, _, return_code = dogshell(["dashboard", "show", str(dash["id"])], check_return_code=False) + assert return_code != 0 - @attr('screenboard') - def test_screenboards(self): + def test_screenboards(self, dogshell, get_unique): # Create a screenboard and write it to a file name, temp0 = get_temp_file() graph = { "title": "test metric graph", - "definition": - { - "requests": [{"q": "testing.metric.1{host:blah.host.1}"}], - "viz": "timeseries", - } + "definition": {"requests": [{"q": "testing.metric.1{host:blah.host.1}"}], "viz": "timeseries"}, } - self.dogshell(["screenboard", "new_file", name, json.dumps(graph)]) + dogshell(["screenboard", "new_file", name, json.dumps(graph)]) screenboard = json.load(temp0) - assert 'id' in screenboard, screenboard - assert 'title' in screenboard, screenboard + assert "id" in screenboard + assert "board_title" in screenboard # Update the file and push it to the server - unique = self.get_unique() - screenboard['title'] = 'screenboard title %s' % unique + unique = get_unique() + screenboard["title"] = "screenboard title {}".format(unique) name, temp1 = get_temp_file() json.dump(screenboard, temp1) temp1.flush() - self.dogshell(["screenboard", "push", temp1.name]) + dogshell(["screenboard", "push", temp1.name]) # Query the server to verify the change - out, _, _ = self.dogshell(["screenboard", "show", str(screenboard['id'])]) + out, _, _ = dogshell(["screenboard", "show", str(screenboard["id"])]) out = json.loads(out) - assert "id" in out, out - self.assertEquals(out["id"], screenboard["id"]) - assert "title" in out, out - self.assertEquals(out["title"], screenboard["title"]) + assert out["id"] == screenboard["id"] + assert out["title"] == screenboard["title"] new_title = "new_title" new_desc = "new_desc" - new_screen = [{ - "title": "blerg", - "definition": { - "requests": [ - {"q": "avg:system.load.15{web,env:prod}"} - ] - } - }] + new_screen = [{"title": "blerg", "definition": {"requests": [{"q": "avg:system.load.15{web,env:prod}"}]}}] # Update a screenboard directly on the server - self.dogshell(["screenboard", "update", str(screenboard["id"]), new_title, new_desc], - stdin=json.dumps(new_screen)) + dogshell( + ["screenboard", "update", str(screenboard["id"]), new_title, new_desc], stdin=json.dumps(new_screen) + ) # Query the server to verify the change - out, _, _ = self.dogshell(["screenboard", "show", str(screenboard["id"])]) + out, _, _ = dogshell(["screenboard", "show", str(screenboard["id"])]) out = json.loads(out) - assert "id" in out, out - self.assertEquals(out["id"], screenboard["id"]) - assert "title" in out, out - self.assertEquals(out["title"], new_title) - assert "description" in out, out - self.assertEquals(out["description"], new_desc) - assert "graphs" in out, out - self.assertEquals(out["graphs"], new_screen) + assert out["id"] == screenboard["id"] + assert out["board_title"] == new_title + assert out["description"] == new_desc + assert out["widgets"] == new_screen # Pull the updated screenboard to disk fd, updated_file = tempfile.mkstemp() try: - self.dogshell(["screenboard", "pull", str(screenboard["id"]), updated_file]) + dogshell(["screenboard", "pull", str(screenboard["id"]), updated_file]) updated_screenboard = {} with open(updated_file) as f: updated_screenboard = json.load(f) - self.assertEquals(out, updated_screenboard) + assert out == updated_screenboard finally: os.unlink(updated_file) # Share the screenboard - out, _, _ = self.dogshell(["screenboard", "share", str(screenboard["id"])]) + out, _, _ = dogshell(["screenboard", "share", str(screenboard["id"])]) out = json.loads(out) - assert out['board_id'] == screenboard['id'] + assert out["board_id"] == screenboard["id"] # Verify it's actually shared - public_url = out['public_url'] + public_url = out["public_url"] response = requests.get(public_url) assert response.status_code == 200 # Revoke the screenboard and verify it's actually revoked - self.dogshell(["screenboard", "revoke", str(screenboard["id"])]) + dogshell(["screenboard", "revoke", str(screenboard["id"])]) response = requests.get(public_url) assert response.status_code == 404 # Delete the screenboard - self.dogshell(["screenboard", "delete", str(screenboard["id"])]) + dogshell(["--timeout", "30", "screenboard", "delete", str(screenboard["id"])]) # Verify that it's not on the server anymore - out, err, return_code = self.dogshell(["screenboard", "show", str(screenboard['id'])], - check_return_code=False) - self.assertNotEquals(return_code, 0) + _, _, return_code = dogshell(["screenboard", "show", str(screenboard["id"])], check_return_code=False) + assert return_code != 0 # Test monitors - - def test_monitors(self): + @pytest.mark.admin_needed + def test_monitors(self, dogshell): # Create a monitor query = "avg(last_1h):sum:system.net.bytes_rcvd{*} by {host} > 100" type_alert = "metric alert" - out, err, return_code = self.dogshell(["monitor", "post", type_alert, query]) + tags = "main,test" + priority = "5" + out, _, _ = dogshell(["monitor", "post", type_alert, query, "--tags", tags, "--priority", priority]) - assert "id" in out, out - assert "query" in out, out - assert "type" in out, out out = json.loads(out) - self.assertEquals(out["query"], query) - self.assertEquals(out["type"], type_alert) + assert out["query"] == query + assert out["type"] == type_alert + assert out["priority"] == int(priority) monitor_id = str(out["id"]) + monitor_name = out["name"] - out, err, return_code = self.dogshell(["monitor", "show", monitor_id]) + out, _, _ = dogshell(["monitor", "show", monitor_id]) out = json.loads(out) - self.assertEquals(out["query"], query) - self.assertEquals(out['options']['notify_no_data'], False) + assert out["query"] == query + assert out["options"]["notify_no_data"] is False # Update options - options = { - "notify_no_data": True, - "no_data_timeframe": 20 - } + options = {"notify_no_data": True, "no_data_timeframe": 20} + out, err, return_code = dogshell( + ["monitor", "update", monitor_id, type_alert, query, "--options", json.dumps(options)], + check_return_code=False + ) - out, err, return_code = self.dogshell( - ["monitor", "update", monitor_id, type_alert, - query, "--options", json.dumps(options)]) + out = json.loads(out) + assert query in out["query"] + assert out["options"]["notify_no_data"] == options["notify_no_data"] + assert out["options"]["no_data_timeframe"] == options["no_data_timeframe"] + assert 'DEPRECATION' in err + assert return_code == 0 + + # Update message only + updated_message = "monitor updated" + current_options = out["options"] + out, err, return_code = dogshell( + ["monitor", "update", monitor_id, "--message", updated_message] + ) - assert "id" in out, out - assert "options" in out, out out = json.loads(out) - self.assertEquals(out["query"], query) - self.assertEquals(out['options']['notify_no_data'], options["notify_no_data"]) - self.assertEquals(out['options']['no_data_timeframe'], options["no_data_timeframe"]) + assert updated_message == out["message"] + assert query == out["query"] + assert monitor_name == out["name"] + assert current_options == out["options"] + + # Updating optional type, query, tags, and priority + updated_query = "avg(last_15m):sum:system.net.bytes_rcvd{*} by {env} > 222" + updated_type = "query alert" + updated_tags = "main" + updated_priority = "4" + + out, err, return_code = dogshell( + ["monitor", "update", monitor_id, "--type", updated_type, "--query", updated_query, + "--tags", updated_tags, "--priority", updated_priority] + ) + + out = json.loads(out) + assert updated_query in out["query"] + assert updated_type in out["type"] + assert updated_message in out["message"] # updated_message updated in previous step + assert monitor_name in out["name"] + assert current_options == out["options"] + assert int(updated_priority) == out["priority"] # Mute monitor - out, err, return_code = self.dogshell(["monitor", "mute", str(out["id"])]) - assert "id" in out, out + out, _, _ = dogshell(["monitor", "mute", str(out["id"])]) out = json.loads(out) - self.assertEquals(str(out["id"]), monitor_id) - self.assertEquals(out["options"]["silenced"], {"*": None}) + assert str(out["id"]) == monitor_id + assert out["options"]["silenced"] == {"*": None} # Unmute monitor - out, err, return_code = self.dogshell(["monitor", "unmute", monitor_id], check_return_code=False) + out, _, _ = dogshell(["monitor", "unmute", "--all_scopes", monitor_id], check_return_code=False) out = json.loads(out) - self.assertEquals(str(out["id"]), monitor_id) - self.assertEquals(out["options"]["silenced"], {}) + assert str(out["id"]) == monitor_id + assert out["options"]["silenced"] == {} # Unmute all scopes of a monitor - options = { - "silenced": {"host:abcd1234": None, "host:abcd1235": None} - } + options = {"silenced": {"host:abcd1234": None, "host:abcd1235": None}} - out, err, return_code = self.dogshell( - ["monitor", "update", monitor_id, type_alert, - query, "--options", json.dumps(options)]) + out, err, return_code = dogshell( + ["monitor", "update", monitor_id, type_alert, query, "--options", json.dumps(options)], + check_return_code=False + ) + + out = json.loads(out) + assert out["query"] == query + assert out["options"]["silenced"] == {"host:abcd1234": None, "host:abcd1235": None} + assert "DEPRECATION" in err + assert return_code == 0 - assert "id" in out, out - assert "options" in out, out + out, _, _ = dogshell(["monitor", "unmute", str(out["id"]), "--all_scopes"]) out = json.loads(out) - self.assertEquals(out["query"], query) - self.assertEquals(out["options"]["silenced"], {"host:abcd1234": None, "host:abcd1235": None}) + assert str(out["id"]) == monitor_id + assert out["options"]["silenced"] == {} - out, err, return_code = self.dogshell(["monitor", "unmute", str(out["id"]), - "--all_scopes"]) - assert "id" in out, out + # Test can_delete monitor + monitor_ids = [int(monitor_id)] + str_monitor_ids = str(monitor_id) + out, _, _ = dogshell(["monitor", "can_delete", str_monitor_ids]) out = json.loads(out) - self.assertEquals(str(out["id"]), monitor_id) - self.assertEquals(out["options"]["silenced"], {}) + assert out["data"]["ok"] == monitor_ids + assert out["errors"] is None + + # Create a monitor-based SLO + out, _, _ = dogshell( + [ + "service_level_objective", + "create", + "--type", + "monitor", + "--monitor_ids", + str_monitor_ids, + "--name", + "test_slo", + "--thresholds", + "7d:90", + ] + ) + out = json.loads(out) + slo_id = out["data"][0]["id"] + + # Test can_delete monitor + out, _, _ = dogshell(["monitor", "can_delete", str_monitor_ids]) + out = json.loads(out) + assert out["data"]["ok"] == [] + # TODO update the error message template + # assert out["errors"] == { + # str(monitor_id): [MONITOR_REFERENCED_IN_SLO_MESSAGE.format(monitor_id, slo_id)] + # } + + # Delete a service_level_objective + _, _, _ = dogshell(["service_level_objective", "delete", slo_id]) + + # Test can_delete monitor + out, _, _ = dogshell(["monitor", "can_delete", str_monitor_ids]) + out = json.loads(out) + assert out["data"]["ok"] == monitor_ids + assert out["errors"] is None # Delete a monitor - self.dogshell(["monitor", "delete", monitor_id]) + dogshell(["monitor", "delete", monitor_id]) # Verify that it's not on the server anymore - out, err, return_code = self.dogshell(["monitor", "show", monitor_id], check_return_code=False) - self.assertNotEquals(return_code, 0) + _, _, return_code = dogshell(["monitor", "show", monitor_id], check_return_code=False) + assert return_code != 0 # Mute all - out, err, return_code = self.dogshell(["monitor", "mute_all"]) - assert "id" in out, out - assert "active" in out, out + out, _, _ = dogshell(["monitor", "mute_all"]) out = json.loads(out) - self.assertEquals(out["active"], True) + assert out["active"] is True # Unmute all - self.dogshell(["monitor", "unmute_all"]) + dogshell(["monitor", "unmute_all"]) # Retry unmuting all -> should raise an error this time - out, err, return_code = self.dogshell(["monitor", "unmute_all"], check_return_code=False) - self.assertNotEquals(return_code, 0) + _, _, return_code = dogshell(["monitor", "unmute_all"], check_return_code=False) + assert return_code != 0 - @attr('host') - def test_host_muting(self): - hostname = "my.test.host" - message = "Muting this host for a test." - end = int(time.time()) + 60 * 60 + # Test validate monitor + monitor_type = "metric alert" + valid_options = '{"thresholds": {"critical": 200.0}}' + invalid_options = '{"thresholds": {"critical": 90.0}}' + + # Check with an invalid query. + invalid_query = "THIS IS A BAD QUERY" + out, _, _ = dogshell(["monitor", "validate", monitor_type, invalid_query, "--options", valid_options]) + out = json.loads(out) + assert out == {"errors": ["The value provided for parameter 'query' is invalid"]} + + # Check with a valid query, invalid options. + valid_query = "avg(last_1h):sum:system.net.bytes_rcvd{host:host0} > 200" + out, _, _ = dogshell(["monitor", "validate", monitor_type, valid_query, "--options", invalid_options]) + out = json.loads(out) + assert out == {"errors": ["Alert threshold (90.0) does not match that used in the query (200.0)."]} + + # Check with a valid query, valid options. + out, _, _ = dogshell(["monitor", "validate", monitor_type, valid_query, "--options", valid_options]) + out = json.loads(out) + assert out == {} + + def test_host_muting(self, freezer, dogshell, get_unique, dogshell_with_retry): + # Submit a metric to create a host + hostname = "my.test.host{}".format(get_unique()) + dogshell(["metric", "post", "--host", hostname, "metric", "1"]) - # Reset test - self.dogshell(["host", "unmute", hostname], check_return_code=False) + # Wait for the host to appear + dogshell_with_retry(["tag", "show", hostname]) + + message = "Muting this host for a test." + with freezer: + end = int(time.time()) + 60 * 60 # Mute a host - out, err, return_code = self.dogshell( - ["host", "mute", hostname, "--message", message, "--end", str(end)]) - out = json.loads(out) - assert "action" in out, out - assert "hostname" in out, out - assert "message" in out, out - assert "end" in out, out - self.assertEquals(out['action'], "Muted") - self.assertEquals(out['hostname'], hostname) - self.assertEquals(out['message'], message) - self.assertEquals(out['end'], end) + out, _, _ = dogshell(["host", "mute", hostname, "--message", message, "--end", str(end)]) + out = json.loads(out) + assert out["action"] == "Muted" + assert out["hostname"] == hostname + assert out["message"] == message + assert out["end"] == end # We shouldn't be able to mute a host that's already muted, unless we include # the override param. end2 = end + 60 * 15 - out, err, return_code = self.dogshell( - ["host", "mute", hostname, "--end", str(end2)], check_return_code=False) - assert err + _, _, return_code = dogshell_with_retry( + ["host", "mute", hostname, "--end", str(end2)], retry_condition=lambda o, r: r == 0 + ) + assert return_code != 0 - out, err, return_code = self.dogshell( - ["host", "mute", hostname, "--end", str(end2), "--override"]) + out, _, _ = dogshell(["host", "mute", hostname, "--end", str(end2), "--override"]) out = json.loads(out) - assert "action" in out, out - assert "hostname" in out, out - assert "end" in out, out - self.assertEquals(out['action'], "Muted") - self.assertEquals(out['hostname'], hostname) - self.assertEquals(out['end'], end2) + assert out["action"] == "Muted" + assert out["hostname"] == hostname + assert out["end"] == end2 # Unmute a host - out, err, return_code = self.dogshell(["host", "unmute", hostname]) + out, _, _ = dogshell(["host", "unmute", hostname]) out = json.loads(out) - assert "action" in out, out - assert "hostname" in out, out - self.assertEquals(out['action'], "Unmuted") - self.assertEquals(out['hostname'], hostname) + assert out["action"] == "Unmuted" + assert out["hostname"] == hostname - def test_downtime_schedule(self): + def test_downtime_schedule(self, freezer, dogshell): # Schedule a downtime scope = "env:staging" - out, err, return_code = self.dogshell(["downtime", "post", scope, - str(int(time.time()))]) - assert "id" in out, out - assert "scope" in out, out - assert "disabled" in out, out - out = json.loads(out) - self.assertEquals(out["scope"][0], scope) - self.assertEquals(out["disabled"], False) + with freezer: + start = str(int(time.time())) + out, _, _ = dogshell(["downtime", "post", scope, start]) + out = json.loads(out) + assert out["scope"][0] == scope + assert out["disabled"] is False downtime_id = str(out["id"]) # Get downtime - out, err, return_code = self.dogshell(["downtime", "show", - downtime_id]) - assert "id" in out, out - assert "scope" in out, out + out, _, _ = dogshell(["downtime", "show", downtime_id]) out = json.loads(out) - self.assertEquals(out["scope"][0], scope) - self.assertEquals(out["disabled"], False) + assert out["scope"][0] == scope + assert out["disabled"] is False # Update downtime message = "Doing some testing on staging." - end = int(time.time()) + 60000 - out, err, return_code = self.dogshell(["downtime", "update", - downtime_id, - "--scope", scope, "--end", - str(end), "--message", message]) - assert "end" in out, out - assert "message" in out, out - assert "disabled" in out, out - out = json.loads(out) - self.assertEquals(out["end"], end) - self.assertEquals(out["message"], message) - self.assertEquals(out["disabled"], False) + with freezer: + end = int(time.time()) + 60000 + out, _, _ = dogshell( + ["downtime", "update", downtime_id, "--scope", scope, "--end", str(end), "--message", message] + ) + out = json.loads(out) + assert out["end"] == end + assert out["message"] == message + assert out["disabled"] is False # Cancel downtime - self.dogshell(["downtime", "delete", downtime_id]) + dogshell(["downtime", "delete", downtime_id]) # Get downtime and check if it is cancelled - out, err, return_code = self.dogshell(["downtime", "show", downtime_id]) - assert "id" in out, out - assert "scope" in out, out + out, _, _ = dogshell(["downtime", "show", downtime_id]) out = json.loads(out) - self.assertEquals(out["scope"][0], scope) - self.assertEquals(out["disabled"], True) + assert out["scope"][0] == scope + assert out["disabled"] is True - def test_service_check(self): - out, err, return_code = self.dogshell(["service_check", "check", "check_pg", - 'host0', "1"]) - assert "status" in out, out + def test_downtime_cancel_by_scope(self, dogshell): + # Schedule a downtime + scope = "env:staging" + out, _, _ = dogshell(["downtime", "post", scope, str(int(time.time()))]) out = json.loads(out) - self.assertEquals(out["status"], 'ok') + assert out["scope"][0] == scope + assert out["disabled"] is False + downtime_id = str(out["id"]) - # Test helpers - def dogshell(self, args, stdin=None, check_return_code=True, use_cl_args=False): - """ Helper function to call the dog shell command - """ - cmd = ["dog", "--config", self.config_file.name] + args - if use_cl_args: - cmd = ["dog", - "--api-key={0}".format(os.environ["DATADOG_API_KEY"]), - "--application-key={0}".format(os.environ["DATADOG_APP_KEY"])] + args - proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, - stderr=subprocess.PIPE, stdin=subprocess.PIPE) - if stdin: - out, err = proc.communicate(stdin.encode("utf-8")) - else: - out, err = proc.communicate() - proc.wait() - return_code = proc.returncode - if check_return_code: - self.assertEquals(return_code, 0, err) - self.assertEquals(err, b'') - return out.decode('utf-8'), err.decode('utf-8'), return_code + # Cancel the downtime by scope + dogshell(["downtime", "cancel_by_scope", scope]) + + # Get downtime and check if it is cancelled + out, _, _ = dogshell(["downtime", "show", downtime_id]) + out = json.loads(out) + assert out["scope"][0] == scope + assert out["disabled"] is True - def get_unique(self): - return md5(str(time.time() + random.random()).encode('utf-8')).hexdigest() + def test_service_check(self, dogshell): + out, _, _ = dogshell(["service_check", "check", "check_pg", "host0", "1"]) + out = json.loads(out) + assert out["status"], "ok" def parse_response(self, out): data = {} - for line in out.split('\n'): - parts = re.split('\s+', str(line).strip()) + for line in out.split("\n"): + parts = re.split(r"\s+", str(line).strip()) key = parts[0] # Could potentially have errors with other whitespace val = " ".join(parts[1:]) if key: data[key] = val return data - -if __name__ == '__main__': - unittest.main() diff --git a/tests/integration/dogstatsd/test_statsd_fork.py b/tests/integration/dogstatsd/test_statsd_fork.py new file mode 100644 index 000000000..c856376e0 --- /dev/null +++ b/tests/integration/dogstatsd/test_statsd_fork.py @@ -0,0 +1,90 @@ +import os +import itertools +import socket +import threading + +import pytest + +from datadog.dogstatsd.base import DogStatsd, SUPPORTS_FORKING + + +@pytest.mark.parametrize( + "disable_background_sender, disable_buffering", + list(itertools.product([True, False], [True, False])), +) +def test_register_at_fork(disable_background_sender, disable_buffering): + if not SUPPORTS_FORKING: + pytest.skip("os.register_at_fork is required for this test") + + statsd = DogStatsd( + telemetry_min_flush_interval=0, + disable_background_sender=disable_background_sender, + disable_buffering=disable_buffering, + ) + + tracker = {} + + def track(method): + def inner(*args, **kwargs): + method(*args, **kwargs) + tracker[method] = True + + return inner + + statsd.pre_fork = track(statsd.pre_fork) + statsd.post_fork_parent = track(statsd.post_fork_parent) + + pid = os.fork() + if pid == 0: + os._exit(0) + + assert pid > 0 + os.waitpid(pid, 0) + + assert len(tracker) == 2 + + +def sender_a(statsd, running): + while running[0]: + statsd.gauge("spam", 1) + + +def sender_b(statsd, signal): + while running[0]: + with statsd: + statsd.gauge("spam", 1) + +@pytest.mark.parametrize( + "disable_background_sender, disable_buffering, sender", + list(itertools.product([True, False], [True, False], [sender_a, sender_b])), +) +def test_fork_with_thread(disable_background_sender, disable_buffering, sender): + if not SUPPORTS_FORKING: + pytest.skip("os.register_at_fork is required for this test") + + statsd = DogStatsd( + telemetry_min_flush_interval=0, + disable_background_sender=disable_background_sender, + disable_buffering=disable_buffering, + ) + + sender = None + try: + sender_running = [True] + sender = threading.Thread(target=sender, args=(statsd, sender_running)) + sender.daemon = True + sender.start() + + pid = os.fork() + if pid == 0: + os._exit(42) + + assert pid > 0 + (_, status) = os.waitpid(pid, 0) + + assert os.WEXITSTATUS(status) == 42 + finally: + statsd.stop() + if sender: + sender_running[0] = False + sender.join() diff --git a/tests/integration/dogstatsd/test_statsd_sender.py b/tests/integration/dogstatsd/test_statsd_sender.py new file mode 100644 index 000000000..55710c173 --- /dev/null +++ b/tests/integration/dogstatsd/test_statsd_sender.py @@ -0,0 +1,103 @@ +import itertools +import socket +from threading import Thread + +import pytest + +from datadog.dogstatsd.base import DogStatsd + +@pytest.mark.parametrize( + "disable_background_sender, disable_buffering, wait_for_pending, socket_timeout, stop", + list(itertools.product([True, False], [True, False], [True, False], [0, 1], [True, False])), +) +def test_sender_mode(disable_background_sender, disable_buffering, wait_for_pending, socket_timeout, stop): + # Test basic sender operation with an assortment of options + foo, bar = socket.socketpair(socket.AF_UNIX, socket.SOCK_DGRAM, 0) + statsd = DogStatsd( + telemetry_min_flush_interval=0, + disable_background_sender=disable_background_sender, + disable_buffering=disable_buffering, + socket_timeout=socket_timeout, + ) + + statsd.socket = foo + statsd._reset_telemetry() + + def reader_thread(): + msg = bar.recv(8192) + assert msg == b"test.metric:1|c\n" + + t = Thread(target=reader_thread, name="test_sender_mode/reader_thread") + t.daemon = True + t.start() + + statsd.increment("test.metric") + if wait_for_pending: + statsd.wait_for_pending() + + if stop: + statsd.stop() + + t.join(timeout=10) + assert not t.is_alive() + +def test_set_socket_timeout(): + statsd = DogStatsd(socket_timeout=0) + assert statsd.get_socket().gettimeout() == 0 + statsd.set_socket_timeout(1) + assert statsd.get_socket().gettimeout() == 1 + statsd.close_socket() + assert statsd.get_socket().gettimeout() == 1 + + +@pytest.mark.parametrize( + "disable_background_sender, disable_buffering", + list(itertools.product([True, False], [True, False])), +) +def test_fork_hooks(disable_background_sender, disable_buffering): + statsd = DogStatsd( + telemetry_min_flush_interval=0, + disable_background_sender=disable_background_sender, + disable_buffering=disable_buffering, + ) + + foo, bar = socket.socketpair(socket.AF_UNIX, socket.SOCK_DGRAM, 0) + statsd.socket = foo + + statsd.increment("test.metric") + + assert disable_buffering or statsd._flush_thread.is_alive() + assert disable_background_sender or statsd._sender_thread.is_alive() + + statsd.pre_fork() + + assert statsd._flush_thread is None + assert statsd._sender_thread is None + assert statsd._queue is None or statsd._queue.empty() + assert len(statsd._buffer) == 0 + + statsd.post_fork_parent() + + assert disable_buffering or statsd._flush_thread.is_alive() + assert disable_background_sender or statsd._sender_thread.is_alive() + + foo.close() + bar.close() + + +def test_buffering_with_context(): + statsd = DogStatsd( + telemetry_min_flush_interval=0, + disable_buffering=False, + ) + + foo, bar = socket.socketpair(socket.AF_UNIX, socket.SOCK_DGRAM, 0) + statsd.socket = foo + + statsd.increment("first") + with statsd: # should not erase previously buffered metrics + pass + + bar.settimeout(5) + msg = bar.recv(8192) + assert msg == b"first:1|c\n" diff --git a/tests/integration/test_freezer.py b/tests/integration/test_freezer.py new file mode 100644 index 000000000..59ac7761d --- /dev/null +++ b/tests/integration/test_freezer.py @@ -0,0 +1,10 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc + +import datetime + + +def test_freezer(freezer): + with freezer: + assert datetime.datetime.now() == freezer.time_to_freeze diff --git a/tests/performance/__init__.py b/tests/performance/__init__.py index e69de29bb..b3017a1db 100644 --- a/tests/performance/__init__.py +++ b/tests/performance/__init__.py @@ -0,0 +1,3 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc diff --git a/tests/performance/test_lambda_wrapper_thread_safety.py b/tests/performance/test_lambda_wrapper_thread_safety.py new file mode 100644 index 000000000..03b6c2f83 --- /dev/null +++ b/tests/performance/test_lambda_wrapper_thread_safety.py @@ -0,0 +1,51 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +import time +# import unittest +import threading + +from datadog import lambda_metric, datadog_lambda_wrapper +from datadog.threadstats.aws_lambda import _lambda_stats + + +TOTAL_NUMBER_OF_THREADS = 1000 + + +class MemoryReporter(object): + """ A reporting class that reports to memory for testing. """ + + def __init__(self): + self.distributions = [] + self.dist_flush_counter = 0 + + def flush_distributions(self, dists): + self.distributions += dists + self.dist_flush_counter = self.dist_flush_counter + 1 + + +@datadog_lambda_wrapper +def wrapped_function(id): + lambda_metric("dist_" + str(id), 42) + # sleep makes the os continue another thread + time.sleep(0.001) + + lambda_metric("common_dist", 42) + + +# Lambda wrapper - mute thread safety test, python 2.7 issues +# class TestWrapperThreadSafety(unittest.TestCase): + +# def test_wrapper_thread_safety(self): +# _lambda_stats.reporter = MemoryReporter() + +# for i in range(TOTAL_NUMBER_OF_THREADS): +# threading.Thread(target=wrapped_function, args=[i]).start() +# # Wait all threads to finish +# time.sleep(10) + +# # Check that at least one flush happened +# self.assertGreater(_lambda_stats.reporter.dist_flush_counter, 0) + +# dists = _lambda_stats.reporter.distributions +# self.assertEqual(len(dists), TOTAL_NUMBER_OF_THREADS + 1) diff --git a/tests/performance/test_statsd_thread_safety.py b/tests/performance/test_statsd_thread_safety.py index 04ebdf1e4..aff934e85 100644 --- a/tests/performance/test_statsd_thread_safety.py +++ b/tests/performance/test_statsd_thread_safety.py @@ -1,20 +1,34 @@ -import time -import six -import threading +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# stdlib from collections import deque -from nose import tools as t +from functools import reduce +import threading +import time +import unittest + +# 3p +from mock import patch +# datadog from datadog.dogstatsd.base import DogStatsd +from datadog.util.compat import is_p3k class FakeSocket(object): - """ A fake socket for testing. """ - + """ + Mocked socket for testing. + """ def __init__(self): self.payloads = deque() def send(self, payload): - assert type(payload) == six.binary_type + if is_p3k(): + assert type(payload) == bytes + else: + assert type(payload) == str + self.payloads.append(payload) def recv(self): @@ -27,41 +41,270 @@ def __repr__(self): return str(self.payloads) -class DogstatsdTest(DogStatsd): - def send_metrics(self): - self.increment('whatever') +class TestDogStatsdThreadSafety(unittest.TestCase): + """ + DogStatsd thread safety tests. + """ + def setUp(self): + """ + Mock a socket. + """ + self.socket = FakeSocket() + + def assertMetrics(self, values): + """ + Helper, assertions on metrics. + """ + count = len(values) + # Split packet per metric (required when buffered) and discard empty packets + packets = map(lambda x: x.split(b"\n"), self.socket.recv()) + packets = reduce(lambda prev, ele: prev + ele, packets, []) + packets = list(filter(lambda x: x, packets)) -class TestDogStatsdThreadSafety(object): + # Count + self.assertEqual( + len(packets), count, + u"Metric size assertion failed: expected={expected}, received={received}".format( + expected=count, received=len(packets) + ) + ) + # Values + for packet in packets: + metric_value = int(packet.split(b':', 1)[1].split(b'|', 1)[0]) + self.assertIn( + metric_value, values, + u"Metric assertion failed: unexpected metric value {metric_value}".format( + metric_value=metric_value + ) + ) + values.remove(metric_value) - def setUp(self): - self.socket = FakeSocket() + def test_socket_creation(self): + """ + Socket creation plays well with multiple threads. + """ + # Create a DogStatsd client but no socket + statsd = DogStatsd() - def recv(self): - return self.socket.recv() + # Submit metrics from different threads to create a socket + threads = [] + for value in range(10000): + t = threading.Thread(target=statsd.gauge, args=("foo", value)) + threads.append(t) + t.start() + for t in threads: + t.join() + + @staticmethod + def _submit_with_multiple_threads(statsd, submit_method, values): + """ + Helper, use the given statsd client and method to submit the values + within multiple threads. + """ + threads = [] + for value in values: + t = threading.Thread( + target=getattr(statsd, submit_method), + args=("foo", value) + ) + threads.append(t) + t.start() + for t in threads: + t.join() + + def test_increment(self): + """ + Increments can be submitted from concurrent threads. + """ + # Create a DogStatsd client with a mocked socket + statsd = DogStatsd() + statsd.socket = self.socket + + # Samples + values = set(range(10000)) + + # Submit metrics from different threads + self._submit_with_multiple_threads(statsd, "increment", values) + + # All metrics were properly submitted + self.assertMetrics(values) + + def test_decrement(self): + """ + Decrements can be submitted from concurrent threads. + """ + # Create a DogStatsd client with a mocked socket + statsd = DogStatsd() + statsd.socket = self.socket + + # Samples + values = set(range(10000)) + expected_value = set([-value for value in values]) + + # Submit metrics from different threads + self._submit_with_multiple_threads(statsd, "decrement", expected_value) + + # All metrics were properly submitted + self.assertMetrics(values) + + def test_gauge(self): + """ + Gauges can be submitted from concurrent threads. + """ + # Create a DogStatsd client with a mocked socket + statsd = DogStatsd() + statsd.socket = self.socket + + # Samples + values = set(range(10000)) + + # Submit metrics from different threads + self._submit_with_multiple_threads(statsd, "gauge", values) + + # All metrics were properly submitted + self.assertMetrics(values) + + def test_histogram(self): + """ + Histograms can be submitted from concurrent threads. + """ + # Create a DogStatsd client with a mocked socket + statsd = DogStatsd() + statsd.socket = self.socket + + # Samples + values = set(range(10000)) + + # Submit metrics from different threads + self._submit_with_multiple_threads(statsd, "histogram", values) - def test_send_metrics(self): - statsd = DogstatsdTest() + # All metrics were properly submitted + self.assertMetrics(values) + + def test_timing(self): + """ + Timings can be submitted from concurrent threads. + """ + # Create a DogStatsd client with a mocked socket + statsd = DogStatsd() statsd.socket = self.socket - for _ in range(10000): - threading.Thread(target=statsd.send_metrics).start() - time.sleep(1) - t.assert_equal(10000, len(self.recv()), len(self.recv())) + + # Samples + values = set(range(10000)) + + # Submit metrics from different threads + self._submit_with_multiple_threads(statsd, "timing", values) + + # All metrics were properly submitted + self.assertMetrics(values) def test_send_batch_metrics(self): - with DogstatsdTest() as batch: - batch.socket = self.socket - for _ in range(10000): - threading.Thread(target=batch.send_metrics).start() - time.sleep(1) - payload = map(lambda x: x.split("\n"), self.recv()) - payload = reduce(lambda prev, ele: prev + ele, payload, []) - t.assert_equal(10001, len(payload), len(payload)) + """ + Metrics can be buffered, submitted from concurrent threads. + """ + with DogStatsd() as batch_statsd: + # Create a DogStatsd buffer client with a mocked socket + batch_statsd.socket = self.socket - def test_socket_creation(self): + # Samples + values = set(range(10000)) + + # Submit metrics from different threads + self._submit_with_multiple_threads(batch_statsd, "gauge", values) + + # All metrics were properly submitted + self.assertMetrics(values) + + @patch('datadog.dogstatsd.context.monotonic') + def test_timed_decorator_threaded(self, mock_monotonic): + """ + `timed` decorator plays well with concurrent threads. + """ + # Create a DogStatsd client with a mocked socket + statsd = DogStatsd() + statsd.socket = self.socket + + # Set up the mocked time + mock_monotonic.return_value = 0 + + # Method to time + @statsd.timed("foo") + def bar(): + """ + Wait 5 time units and return. + """ + initial_time = mock_monotonic.return_value + while mock_monotonic.return_value < initial_time + 2: + pass + + # Run the method within multiple threads + threads = [] + for value in range(10): + t = threading.Thread(target=bar) + threads.append(t) + # Bump time so that previous thread can complete + mock_monotonic.return_value += 1 + t.start() + # Sleep to let the threads start + time.sleep(0.1) + + # Bump time so that all threads completes + time.sleep(0.1) + mock_monotonic.return_value += 1 + time.sleep(0.1) + mock_monotonic.return_value += 1 + + for t in threads: + t.join() + + # All metrics were properly submitted + expected_values = [2 for _ in range(0, 10)] + self.assertMetrics(expected_values) + + @patch('datadog.dogstatsd.context.monotonic') + def test_timed_context_manager_threaded(self, mock_monotonic): """ - Assess thread safeness in socket creation. + `timed` context manager plays well with concurrent threads. """ - statsd = DogstatsdTest() - for _ in range(10000): - threading.Thread(target=statsd.send_metrics).start() + # Create a DogStatsd client with a mocked socket + statsd = DogStatsd() + statsd.socket = self.socket + + # Set up the mocked time + mock_monotonic.return_value = 0 + + # Method to time + def bar(): + """ + Wait 5 time units and return. + """ + initial_time = mock_monotonic.return_value + + with statsd.timed("foo"): + while mock_monotonic.return_value < initial_time + 2: + pass + + # Run the method within multiple threads + threads = [] + for value in range(10): + t = threading.Thread(target=bar) + threads.append(t) + # Bump time so that previous thread can complete + mock_monotonic.return_value += 1 + t.start() + # Sleep to let the threads start + time.sleep(0.1) + + # Bump time so that all threads completes + time.sleep(0.1) + mock_monotonic.return_value += 1 + time.sleep(0.1) + mock_monotonic.return_value += 1 + + for t in threads: + t.join() + + # All metrics were properly submitted + expected_values = [2 for _ in range(0, 10)] + self.assertMetrics(expected_values) diff --git a/tests/performance/test_statsd_throughput.py b/tests/performance/test_statsd_throughput.py new file mode 100644 index 000000000..a08723e52 --- /dev/null +++ b/tests/performance/test_statsd_throughput.py @@ -0,0 +1,377 @@ +# coding: utf8 +# Unless explicitly stated otherwise all files in this repository are licensed +# under the BSD-3-Clause License. This product includes software developed at +# Datadog (https://www.datadoghq.com/). + +# Copyright 2015-Present Datadog, Inc + +# stdlib +import cProfile +import io +import logging +import os +import pstats +import random +import sys +import threading +import timeit +import unittest +import warnings + +try: + import queue +except ImportError: + import Queue as queue + +# datadog +from datadog.dogstatsd.base import DogStatsd +from datadog.util.compat import is_p3k + +# test utils +from tests.util.fake_statsd_server import FakeServer +from tests.util.system_info_observer import SysInfoObserver + + +# StatsdSender is a static helper for sending mock metrics to statsd via a simple API +# pylint: disable=too-few-public-methods,useless-object-inheritance +class StatsdSender(object): + EXTRA_TAGS = ["bar = barval", "baz = bazval"] + STATIC_TIMING_SET = set(range(100)) + + # Enums are not part of 2.7 built-ins + METRICS_TYPE = [ + "decrement", + "distribution", + "gauge", + "histogram", + "increment", + "set", + "timing", + ] + + @staticmethod + def send(metric, statsd_instance, value): + getattr(StatsdSender, "_submit_{}".format(StatsdSender.METRICS_TYPE[metric]))( + statsd_instance, threading.current_thread().name, value + ) + + @staticmethod + def _submit_decrement(statsd_instance, metric_prefix, _): + statsd_instance.decrement( + "{}.counter".format(metric_prefix), tags=StatsdSender.EXTRA_TAGS + ) + + @staticmethod + def _submit_distribution(statsd_instance, metric_prefix, value): + statsd_instance.distribution( + "{}.distribution".format(metric_prefix), value, tags=StatsdSender.EXTRA_TAGS + ) + + @staticmethod + def _submit_gauge(statsd_instance, metric_prefix, value): + statsd_instance.gauge( + "{}.gauge".format(metric_prefix), value, tags=StatsdSender.EXTRA_TAGS + ) + + @staticmethod + def _submit_histogram(statsd_instance, metric_prefix, value): + statsd_instance.histogram( + "{}.histogram".format(metric_prefix), value, tags=StatsdSender.EXTRA_TAGS + ) + + @staticmethod + def _submit_increment(statsd_instance, metric_prefix, _): + statsd_instance.increment( + "{}.counter".format(metric_prefix), tags=StatsdSender.EXTRA_TAGS + ) + + @staticmethod + def _submit_set(statsd_instance, metric_prefix, value): + statsd_instance.set( + "{}.set".format(metric_prefix), value, tags=StatsdSender.EXTRA_TAGS + ) + + @staticmethod + def _submit_timing(statsd_instance, metric_prefix, _): + statsd_instance.timing( + "{}.set".format(metric_prefix), + StatsdSender.STATIC_TIMING_SET, + tags=StatsdSender.EXTRA_TAGS, + ) + + +class TestDogStatsdThroughput(unittest.TestCase): + """ + DogStatsd throughput tests. + """ + + DEFAULT_NUM_DATAPOINTS = 50000 + DEFAULT_NUM_THREADS = 1 + DEFAULT_NUM_RUNS = 5 + DEFAULT_TRANSPORT = "udp" + + RUN_MESSAGE = ( + "Run #{:2d}/{:2d}: {:.4f}s (latency: {:.2f}μs, cpu: {:.4f}," + + " mem.rss_diff: {:.0f}kb, recv: {:.2f}%)" + ) + + def setUp(self): + # Parse the benchmark parameters and use sensible defaults for values + # that are not configured + self.num_datapoints = int( + os.getenv("BENCHMARK_NUM_DATAPOINTS", str(self.DEFAULT_NUM_DATAPOINTS)) + ) + self.num_threads = int( + os.getenv("BENCHMARK_NUM_THREADS", str(self.DEFAULT_NUM_THREADS)) + ) + self.num_runs = int(os.getenv("BENCHMARK_NUM_RUNS", str(self.DEFAULT_NUM_RUNS))) + self.profiling_enabled = os.getenv("BENCHMARK_PROFILING", "false") in ["1", "true", "True", "Y", "yes", "Yes"] + self.transport = os.getenv( + "BENCHMARK_TRANSPORT", str(self.DEFAULT_TRANSPORT) + ).upper() + + # We do want to see any problems if they occur in the statsd library + logger = logging.getLogger() + logger.level = logging.DEBUG + logger.addHandler(logging.StreamHandler(sys.stdout)) + + # Ensure that warnings don't print the stack trace + def one_line_warning(message, category, filename, lineno, *_): + return "%s:%s: %s: %s" % (filename, lineno, category.__name__, message) + + warnings.formatwarning = one_line_warning + + # Add a newline so that we don't get clobbered by the test output + print("") + + # pylint: disable=too-many-locals + def test_statsd_performance(self): + print( + "Starting: {} run(s), {} thread(s), {} points/thread via {} (profiling: {}) on Python{}.{} ...".format( + self.num_runs, + self.num_threads, + self.num_datapoints, + self.transport, + str(self.profiling_enabled).lower(), + sys.version_info[0], + sys.version_info[1], + ) + ) + + # We want a stable random sequence so that parallel runs + # are consistent and repeatable + random.seed(1234) + + # Pre-calculate a random order of metric types for each thread + metrics_order = [] + for _ in range(self.num_threads): + thread_metrics_order = [] + for _ in range(self.num_datapoints): + thread_metrics_order.append( + random.randrange(len(StatsdSender.METRICS_TYPE)) + ) + + metrics_order.append(thread_metrics_order) + + run_cpu_stats = [] + run_durations = [] + run_latencies = [] + run_memory_stats = [] + received_packet_pcts = [] + + for run_idx in range(self.num_runs): + ( + duration, + total_latency, + sys_stats, + received_packet_pct, + ) = self._execute_test_run( + FakeServer(transport=self.transport), + metrics_order, + self.num_threads, + self.num_datapoints, + ) + avg_latency_secs = total_latency / (self.num_threads * self.num_datapoints) + avg_latency = avg_latency_secs * 1000000 + print( + self.RUN_MESSAGE.format( + run_idx + 1, + self.num_runs, + duration, + avg_latency, + sys_stats["cpu.user"] + sys_stats["cpu.system"], + sys_stats["mem.rss_diff_kb"], + received_packet_pct, + ) + ) + + run_durations.append(duration) + run_cpu_stats.append(sys_stats["cpu.user"] + sys_stats["cpu.system"]) + run_memory_stats.append(sys_stats["mem.rss_diff_kb"]) + run_latencies.append(float(avg_latency)) + received_packet_pcts.append(received_packet_pct) + + result_msg = "\nTotal for {} run(s), {} thread(s), {} points/thread via {} on Python{}.{}:\n" + result_msg += "\tDuration:\t\t{:.4f}s\n" + result_msg += "\tLatency:\t\t{:.2f}μs\n" + result_msg += "\tCPU:\t\t\t{:.4f}\n" + result_msg += "\tMemory (rss) diff:\t{:.0f}kb\n" + result_msg += "\tReceived packets:\t{:.2f}%" + print( + result_msg.format( + self.num_runs, + self.num_threads, + self.num_datapoints, + self.transport, + sys.version_info[0], + sys.version_info[1], + sum(run_durations) / len(run_durations), + sum(run_latencies) / len(run_latencies), + sum(run_cpu_stats) / len(run_cpu_stats), + sum(run_memory_stats) / len(run_memory_stats), + sum(received_packet_pcts) / len(received_packet_pcts), + ) + ) + + # pylint: disable=too-many-locals,no-self-use + def _execute_test_run(self, server, metrics_order, num_threads, num_datapoints): + # Setup all the threads and get them in a waiting state + threads = [] + start_signal = threading.Event() + + latency_results = queue.Queue() + observer = SysInfoObserver() + + with server: + # Create a DogStatsd client with a mocked socket + statsd_instance = DogStatsd( + constant_tags=["foo = {}".format(random.random())], + host="localhost", + port=server.port, + socket_path=server.socket_path, + ) + + for thread_idx in range(num_threads): + thread = threading.Thread( + name="test_statsd_throughput_thread_{}".format(thread_idx), + target=TestDogStatsdThroughput._thread_runner, + args=( + statsd_instance, + start_signal, + metrics_order[thread_idx], + latency_results, + self.profiling_enabled, + ), + ) + thread.daemon = True + threads.append(thread) + thread.start() + + # `timeit.timeit` is not easily usable here since we need to pass in state + # and Python 2 version of `timeit()` does not accept the `global` keyword. + start_time = timeit.default_timer() + + # Let the thread know that it can start sending metrics + start_signal.set() + + # Observe system utilization while we wait for the threads to exit + with observer: + for thread in threads: + thread.join() + + total_latency = 0.0 + for thread in threads: + if latency_results.empty(): + warnings.warn("One or more threads did not report their results!") + continue + + total_latency += latency_results.get() + + duration = timeit.default_timer() - start_time + + # Sanity checks: Verify that metric transfer expectations are correct + expected_metrics = num_threads * num_datapoints + + # Verify that dropped metric count is matching our statsd expectations. This + # type of inconsistency should never happen. + if ( + expected_metrics - server.metrics_captured + != statsd_instance.packets_dropped + ): + error_msg = ( + "WARN: Statsd dropped packet count ({}) did not match the server " + ) + error_msg += "missing received packet count expectation ({})!\n" + warnings.warn( + error_msg.format( + statsd_instance.packets_dropped, + expected_metrics - server.metrics_captured, + ) + ) + + # Verify that received metric count is matching our metric totals expectations. Note + # that in some scenarios, some data is expected to be dropped. + if server.metrics_captured != expected_metrics: + error_msg = "WARN: Received metrics count ({}) did not match the sent " + error_msg += "metrics count ({})!\n" + warnings.warn(error_msg.format(server.metrics_captured, expected_metrics)) + + received_packet_pct = server.metrics_captured * 100.0 / expected_metrics + + return (duration, total_latency, observer.stats, received_packet_pct) + + @staticmethod + def _thread_runner( + statsd_instance, + start_event, + thread_metrics_order, + latency_results, + profiling_enabled, + ): + # We wait for a global signal to start running our events + start_event.wait(5) + + if profiling_enabled: + profiler = cProfile.Profile() + profiler.enable() + + duration = 0.0 + for metric_idx, metric in enumerate(thread_metrics_order): + start_time = timeit.default_timer() + + StatsdSender.send(metric, statsd_instance, metric_idx) + + duration += timeit.default_timer() - start_time + + if hasattr(statsd_instance, 'flush'): + statsd_instance.flush() + + latency_results.put(duration) + + if profiling_enabled: + TestDogStatsdThroughput.print_profiling_stats(profiler) + + + @staticmethod + def print_profiling_stats(profiler, sort_by='cumulative'): + """ + Prints profiling results for the thread that finishes its run. Options for + sorting include 'tottime', 'pcalls', 'ncalls', 'cumulative', etc but you can + check https://github.com/python/cpython/blob/3.9/Lib/pstats.py#L37-L45 for + other options. + """ + + profiler.disable() + + if is_p3k(): + output_stream = io.StringIO() + else: + output_stream = io.BytesIO() + + profiling_stats = pstats.Stats( + profiler, + stream=output_stream, + ).sort_stats(sort_by) + + profiling_stats.print_stats() + print(output_stream.getvalue()) diff --git a/tests/performance/test_threadstats_thread_safety.py b/tests/performance/test_threadstats_thread_safety.py index 6d1e0d0c7..e4e6e0653 100644 --- a/tests/performance/test_threadstats_thread_safety.py +++ b/tests/performance/test_threadstats_thread_safety.py @@ -1,7 +1,9 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc import re import time import threading -from nose import tools as t from datadog import ThreadStats @@ -56,7 +58,7 @@ def test_threadstats_thread_safety(self): events = reporter.events # Overview - t.assert_equal(len(metrics), 10009, len(metrics)) + assert len(metrics) == 10009 # Sort metrics counter_metrics = [] @@ -72,19 +74,19 @@ def test_threadstats_thread_safety(self): counter_metrics.append(m) # Counter - t.assert_equal(len(counter_metrics), 1, len(counter_metrics)) + assert len(counter_metrics) == 1 counter = counter_metrics[0] - t.assert_equal(counter['points'][0][1], 10000, counter['points'][0][1]) + assert counter['points'][0][1] == 10000 # Gauge - t.assert_equal(len(gauge_metrics), 10000, len(gauge_metrics)) + assert len(gauge_metrics) == 10000 # Histogram - t.assert_equal(len(histogram_metrics), 8, len(histogram_metrics)) + assert len(histogram_metrics) == 8 count_histogram = filter(lambda x: x['metric'] == "histogram.count", histogram_metrics)[0] - t.assert_equal(count_histogram['points'][0][1], 10000, count_histogram['points'][0][1]) + assert count_histogram['points'][0][1] == 10000 sum_histogram = filter(lambda x: x['metric'] == "histogram.avg", histogram_metrics)[0] - t.assert_equal(sum_histogram['points'][0][1], 4999.5, sum_histogram['points'][0][1]) + assert sum_histogram['points'][0][1] == 4999.5 # Events - t.assert_equal(10000, len(events), len(events)) + assert 10000 == len(events) diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py index e69de29bb..b3017a1db 100644 --- a/tests/unit/__init__.py +++ b/tests/unit/__init__.py @@ -0,0 +1,3 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc diff --git a/tests/unit/api/__init__.py b/tests/unit/api/__init__.py index e69de29bb..b3017a1db 100644 --- a/tests/unit/api/__init__.py +++ b/tests/unit/api/__init__.py @@ -0,0 +1,3 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc diff --git a/tests/unit/api/helper.py b/tests/unit/api/helper.py index 6db98ca0c..4a31ac979 100644 --- a/tests/unit/api/helper.py +++ b/tests/unit/api/helper.py @@ -1,63 +1,128 @@ -# python +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# stdlib +from io import BytesIO import unittest +import json + +# 3p +from mock import Mock +import requests # datadog from datadog import initialize, api -from datadog.api.base import CreateableAPIResource, UpdatableAPIResource, DeletableAPIResource,\ - GetableAPIResource, ListableAPIResource, ActionAPIResource -from datadog.util.compat import iteritems, json +from datadog.api.http_client import RequestClient +from datadog.api.exceptions import ApiError +from datadog.api.resources import ( + CreateableAPIResource, + UpdatableAPIResource, + DeletableAPIResource, + GetableAPIResource, + ListableAPIResource, + ListableAPISubResource, + AddableAPISubResource, + UpdatableAPISubResource, + DeletableAPISubResource, + ActionAPIResource +) +from datadog.util.compat import iteritems, is_p3k +from tests.util.contextmanagers import EnvVars -# 3p -import requests -from mock import patch, Mock API_KEY = "apikey" APP_KEY = "applicationkey" -API_HOST = "host" +API_HOST = "https://example.com" HOST_NAME = "agent.hostname" FAKE_PROXY = { "https": "http://user:pass@10.10.1.10:3128/", } -class MockReponse(requests.Response): - content = None +class MockSession(object): + """docstring for MockSession""" + _args = None + _kwargs = None + _count = 0 + + def request(self, *args, **kwargs): + self._args = args + self._kwargs = kwargs + self._count += 1 + return MockResponse() + + def call_args(self): + return self._args, self._kwargs + + def call_count(self): + return self._count + + +class MockResponse(requests.Response): + + def __init__(self, raise_for_status=False): + super(MockResponse, self).__init__() + self._raise_for_status = raise_for_status def raise_for_status(self): - pass + if not self._raise_for_status: + return + raise ApiError({'errors': ""}) # A few API Resources class MyCreatable(CreateableAPIResource): - _class_url = '/creatables' + _resource_name = 'creatables' +class MyParamsApiKeyCreatable(CreateableAPIResource): + _resource_name = 'series' class MyUpdatable(UpdatableAPIResource): - _class_url = '/updatables' + _resource_name = 'updatables' class MyGetable(GetableAPIResource): - _class_url = '/getables' + _resource_name = 'getables' class MyListable(ListableAPIResource): - _class_url = '/listables' + _resource_name = 'listables' class MyDeletable(DeletableAPIResource): - _class_url = '/deletables' + _resource_name = 'deletables' + + +class MyListableSubResource(ListableAPISubResource): + _resource_name = 'resource_name' + _sub_resource_name = 'sub_resource_name' + + +class MyAddableSubResource(AddableAPISubResource): + _resource_name = 'resource_name' + _sub_resource_name = 'sub_resource_name' + + +class MyUpdatableSubResource(UpdatableAPISubResource): + _resource_name = 'resource_name' + _sub_resource_name = 'sub_resource_name' + + +class MyDeletableSubResource(DeletableAPISubResource): + _resource_name = 'resource_name' + _sub_resource_name = 'sub_resource_name' class MyActionable(ActionAPIResource): - _class_url = '/actionables' + _resource_name = 'actionables' @classmethod - def trigger_class_action(cls, method, name, id=None, **params): - super(MyActionable, cls)._trigger_class_action(method, name, id, **params) + def trigger_class_action(cls, method, name, id=None, params=None, **body): + super(MyActionable, cls)._trigger_class_action(method, name, id, params, **body) @classmethod - def trigger_action(cls, method, name, id=None, **params): - super(MyActionable, cls)._trigger_action(method, name, id, **params) + def trigger_action(cls, method, name, id=None, **body): + super(MyActionable, cls)._trigger_action(method, name, id, **body) # Test classes @@ -65,35 +130,61 @@ class DatadogAPITestCase(unittest.TestCase): def setUp(self): # Mock patch requests - self.request_patcher = patch('requests.Session') - request_class_mock = self.request_patcher.start() - self.request_mock = request_class_mock.return_value - self.request_mock.request = Mock(return_value=MockReponse()) + self.request_mock = MockSession() + RequestClient._session = self.request_mock + # self.request_patcher = patch('requests.Session') + # request_class_mock = self.request_patcher.start() + # self.request_mock = request_class_mock.return_value + # self.request_mock.request = Mock(return_value=MockResponse()) - def get_request_data(self): + def tearDown(self): + RequestClient._session = None + + def load_request_response(self, status_code=200, response_body='{}', raise_for_status=False): + """ + Load the response body from the given payload + """ + mock_response = MockResponse(raise_for_status=raise_for_status) + if is_p3k(): + mock_response.raw = BytesIO(bytes(response_body, 'utf-8')) + else: + mock_response.raw = BytesIO(response_body) + mock_response.status_code = status_code + + self.request_mock.request = Mock(return_value=mock_response) + + def arm_requests_to_raise(self): + """ + Arm the mocked request to raise for status. """ + self.request_mock.request = Mock(return_value=MockResponse(raise_for_status=True)) + def get_request_data(self): """ - _, kwargs = self.request_mock.request.call_args + Returns JSON formatted data from the submitted `requests`. + """ + _, kwargs = self.request_mock.call_args() return json.loads(kwargs['data']) def request_called_with(self, method, url, data=None, params=None): - (req_method, req_url), others = self.request_mock.request.call_args - assert method == req_method, req_method - assert url == req_url, req_url + (req_method, req_url), others = self.request_mock.call_args() + self.assertEqual(method, req_method, req_method) + self.assertEqual(url, req_url, req_url) if data: - assert 'data' in others - assert json.dumps(data) == others['data'], others['data'] + self.assertIn('data', others) + others_data = json.loads(others['data']) + self.assertEqual(data, others_data, others['data']) if params: - assert 'params' in others + self.assertIn('params', others) for (k, v) in iteritems(params): - assert k in others['params'], others['params'] - assert v == others['params'][k] + self.assertIn(k, others['params'], others['params']) + self.assertEqual(v, others['params'][k]) - def tearDown(self): - self.request_patcher.stop() + def assertIn(self, first, second, msg=None): + msg = msg or "{0} not in {1}".format(first, second) + self.assertTrue(first in second, msg) class DatadogAPINoInitialization(DatadogAPITestCase): @@ -104,9 +195,23 @@ def tearDown(self): api._application_key = None api._api_host = None api._host_name = None + api._proxies = None + + def setUp(self): + super(DatadogAPINoInitialization, self).setUp() + api._api_key = api._application_key = api._host_name = api._api_host = None class DatadogAPIWithInitialization(DatadogAPITestCase): def setUp(self): super(DatadogAPIWithInitialization, self).setUp() initialize(api_key=API_KEY, app_key=APP_KEY, api_host=API_HOST) + + def tearDown(self): + super(DatadogAPIWithInitialization, self).tearDown() + # Restore default values + api._api_key = None + api._application_key = None + api._api_host = None + api._host_name = None + api._proxies = None diff --git a/tests/unit/api/test_api.py b/tests/unit/api/test_api.py index b34a49a55..e44e4ce43 100644 --- a/tests/unit/api/test_api.py +++ b/tests/unit/api/test_api.py @@ -1,71 +1,123 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc # stdlib from copy import deepcopy -from functools import wraps +import json import os import tempfile from time import time +import zlib # 3p -import mock -from nose.tools import assert_raises, assert_true, assert_false +import mock, pytest # datadog -from datadog import initialize, api -from datadog.api import Metric -from datadog.api.exceptions import ApiNotInitialized +from datadog import initialize, api, util +from datadog.api import ( + Distribution, + Event, + Logs, + Metric, + ServiceCheck, + User +) +from datadog.api.exceptions import ( + DatadogException, + ProxyError, + ClientError, + HttpTimeout, + HttpBackoff, + HTTPError, + ApiError, + ApiNotInitialized, +) from datadog.util.compat import is_p3k +from datadog.util.format import normalize_tags from tests.unit.api.helper import ( DatadogAPIWithInitialization, DatadogAPINoInitialization, MyCreatable, + MyParamsApiKeyCreatable, MyUpdatable, MyDeletable, MyGetable, MyListable, + MyListableSubResource, + MyAddableSubResource, + MyUpdatableSubResource, + MyDeletableSubResource, MyActionable, API_KEY, APP_KEY, API_HOST, HOST_NAME, - FAKE_PROXY) + FAKE_PROXY +) +from datadog.util.hostname import CfgNotFound, get_hostname - -def preserve_environ_datadog(func): - """ - Decorator to preserve the original environment value. - """ - @wraps(func) - def wrapper(env_name, *args, **kwds): - environ_api_param = os.environ.get(env_name) - try: - return func(env_name, *args, **kwds) - finally: - # restore the original environ value - if environ_api_param: - os.environ[env_name] = environ_api_param - elif os.environ.get(env_name): - del os.environ[env_name] - - return wrapper +from tests.util.contextmanagers import EnvVars class TestInitialization(DatadogAPINoInitialization): - def test_no_initialization_fails(self, test='sisi'): - assert_raises(ApiNotInitialized, MyCreatable.create) + def test_default_settings_set(self): + """ + Test all the default setting are properly set before calling initialize + """ + from datadog.api import ( + _api_key, + _application_key, + _api_version, + _api_host, + _host_name, + _hostname_from_config, + _cacert, + _proxies, + _timeout, + _max_timeouts, + _max_retries, + _backoff_period, + _mute, + _return_raw_response, + ) + + assert _api_key is None + assert _application_key is None + assert _api_version == 'v1' + assert _api_host is None + assert _host_name is None + assert _hostname_from_config is True + assert _cacert is True + assert _proxies is None + assert _timeout == 60 + assert _max_timeouts == 3 + assert _max_retries == 3 + assert _backoff_period == 300 + assert _mute is True + assert _return_raw_response is False + + def test_no_initialization_fails(self): + """ + Raise ApiNotInitialized exception when `initialize` has not ran or no API key was set. + """ + self.assertRaises(ApiNotInitialized, MyCreatable.create) # No API key => only stats in statsd mode should work initialize() api._api_key = None - assert_raises(ApiNotInitialized, MyCreatable.create) + self.assertRaises(ApiNotInitialized, MyCreatable.create) # Finally, initialize with an API key initialize(api_key=API_KEY, api_host=API_HOST) MyCreatable.create() - assert self.request_mock.request.call_count == 1 + self.assertEqual(self.request_mock.call_count(), 1) @mock.patch('datadog.util.config.get_config_path') def test_get_hostname(self, mock_config_path): + """ + API hostname parameter fallback with Datadog Agent hostname when available. + """ # Generate a fake agent config tmpfilepath = os.path.join(tempfile.gettempdir(), "tmp-agentconfig") with open(tmpfilepath, "wb") as f: @@ -79,152 +131,530 @@ def test_get_hostname(self, mock_config_path): mock_config_path.return_value = tmpfilepath initialize() - assert api._host_name == HOST_NAME, api._host_name + self.assertEqual(api._host_name, HOST_NAME, api._host_name) + + def test_hostname_warning_not_present(self): + try: + get_hostname(hostname_from_config=False) + except CfgNotFound: + pytest.fail("Unexpected CfgNotFound Exception") + + def test_normalize_tags(self): + tag_list_test = ["tag1, tag2", "tag3 ,tag4", "tag5,tag6"] + tag_list_final = normalize_tags(tag_list_test) + assert tag_list_final == ["tag1__tag2", "tag3__tag4", "tag5_tag6"] + + def test_errors_suppressed(self): + """ + API `errors` field ApiError suppressed when specified + """ + # Test API, application keys, API host, and some HTTP client options + initialize(api_key=API_KEY, app_key=APP_KEY, api_host=API_HOST) + + # Make a simple API call + self.load_request_response(response_body='{"data": {}, "errors": ["foo error"]}') + resp = MyCreatable.create(params={"suppress_response_errors_on_codes": [200]}) + self.assertNotIsInstance(resp, ApiError) + self.assertDictEqual({"data": {}, "errors": ["foo error"]}, resp) def test_request_parameters(self): - # Test API, application keys, API host and proxies - initialize(api_key=API_KEY, app_key=APP_KEY, api_host=API_HOST, proxies=FAKE_PROXY) + """ + API parameters are set with `initialize` method. + """ + # Test API, application keys, API host, and some HTTP client options + initialize(api_key=API_KEY, app_key=APP_KEY, api_host=API_HOST) + + # Make a simple API call + MyCreatable.create() + + _, options = self.request_mock.call_args() + + # Assert `requests` parameters + self.assertIn('params', options) + + self.assertIn('headers', options) + self.assertEqual(options['headers']['Content-Type'], 'application/json') + self.assertEqual(options['headers']['DD-API-KEY'], API_KEY) + self.assertEqual(options['headers']['DD-APPLICATION-KEY'], APP_KEY) + assert "api_key" not in options['params'] + assert "application_key" not in options['params'] + + def test_initialize_options(self): + """ + HTTP client and API options are set with `initialize` method. + """ + initialize(api_key=API_KEY, app_key=APP_KEY, api_host=API_HOST, + proxies=FAKE_PROXY, cacert=False) # Make a simple API call MyCreatable.create() - _, options = self.request_mock.request.call_args + _, options = self.request_mock.call_args() + + # Assert `requests` parameters + self.assertIn('proxies', options) + self.assertEqual(options['proxies'], FAKE_PROXY) - assert 'params' in options + self.assertIn('verify', options) + self.assertEqual(options['verify'], False) - assert 'api_key' in options['params'] - assert options['params']['api_key'] == API_KEY - assert 'application_key' in options['params'] - assert options['params']['application_key'] == APP_KEY + # Arm the `requests` to raise + self.arm_requests_to_raise() - assert 'proxies' in options - assert options['proxies'] == FAKE_PROXY + # No exception should be raised (mute=True by default) + MyCreatable.create() + + # Repeat with mute to False + initialize(api_key=API_KEY, mute=False) + self.assertRaises(ApiError, MyCreatable.create) - assert 'headers' in options - assert options['headers'] == {'Content-Type': 'application/json'} + def test_return_raw_response(self): + # Test default initialization sets return_raw_response to False + initialize() + assert not api._return_raw_response + # Assert that we can set this to True + initialize(return_raw_response=True) + assert api._return_raw_response + # Assert we get multiple fields back when set to True + initialize(api_key="aaaaaaaaaa", app_key="123456", return_raw_response=True) + data, raw = api.Monitor.get_all() + + def test_default_values(self): + with EnvVars(ignore=[ + "DATADOG_API_KEY", + "DATADOG_APP_KEY", + "DD_API_KEY", + "DD_APP_KEY" + ]): + initialize() - def test_initialization_from_env(self): - @preserve_environ_datadog - def test_api_params_from_env(env_name, attr_name, env_value): - """ - Set env_name environment variable to env_value - Assert api.attr_name = env_value - """ - os.environ[env_name] = env_value + self.assertIsNone(api._api_key) + self.assertIsNone(api._application_key) + self.assertEqual(api._api_host, "https://api.datadoghq.com") + self.assertEqual(api._host_name, util.hostname.get_hostname(api._hostname_from_config)) + + def test_env_var_values(self): + with EnvVars( + env_vars={ + "DATADOG_API_KEY": "API_KEY_ENV", + "DATADOG_APP_KEY": "APP_KEY_ENV", + "DATADOG_HOST": "HOST_ENV", + } + ): initialize() - self.assertEquals(getattr(api, attr_name), env_value) - - @preserve_environ_datadog - def test_api_params_default(env_name, attr_name, expected_value): - """ - Unset env_name environment variable - Assert api.attr_name = expected_value - """ - if os.environ.get(env_name): - del os.environ[env_name] + + self.assertEqual(api._api_key, "API_KEY_ENV") + self.assertEqual(api._application_key, "APP_KEY_ENV") + self.assertEqual(api._api_host, "HOST_ENV") + self.assertEqual(api._host_name, util.hostname.get_hostname(api._hostname_from_config)) + + del os.environ["DATADOG_API_KEY"] + del os.environ["DATADOG_APP_KEY"] + del os.environ["DATADOG_HOST"] + + with EnvVars(env_vars={ + "DD_API_KEY": "API_KEY_ENV_DD", + "DD_APP_KEY": "APP_KEY_ENV_DD", + }): + api._api_key = None + api._application_key = None + + initialize() + + self.assertEqual(api._api_key, "API_KEY_ENV_DD") + self.assertEqual(api._application_key, "APP_KEY_ENV_DD") + + def test_function_param_value(self): + initialize(api_key="API_KEY", app_key="APP_KEY", api_host="HOST", host_name="HOSTNAME") + + self.assertEqual(api._api_key, "API_KEY") + self.assertEqual(api._application_key, "APP_KEY") + self.assertEqual(api._api_host, "HOST") + self.assertEqual(api._host_name, "HOSTNAME") + + def test_precedence(self): + # Initialize first with env vars + with EnvVars(env_vars={ + "DD_API_KEY": "API_KEY_ENV_DD", + "DD_APP_KEY": "APP_KEY_ENV_DD", + }): + os.environ["DATADOG_API_KEY"] = "API_KEY_ENV" + os.environ["DATADOG_APP_KEY"] = "APP_KEY_ENV" + os.environ["DATADOG_HOST"] = "HOST_ENV" + initialize() - self.assertEquals(getattr(api, attr_name), expected_value) - - @preserve_environ_datadog - def test_api_params_from_params(env_name, parameter, attr_name, value ): - """ - Unset env_name environment variable - Initialize API with parameter=value - Assert api.attr_name = value - """ - if os.environ.get(env_name): - del os.environ[env_name] - initialize(api_host='http://localhost') - self.assertEquals(api._api_host, 'http://localhost') - - # Default values - test_api_params_default("DATADOG_API_KEY", "_api_key", None) - test_api_params_default("DATADOG_APP_KEY", "_application_key", None) - test_api_params_default("DATADOG_HOST", "_api_host", "https://app.datadoghq.com") - - # From environment - test_api_params_from_env("DATADOG_API_KEY", "_api_key", env_value="apikey") - test_api_params_from_env("DATADOG_APP_KEY", "_application_key", env_value="appkey") - test_api_params_from_env("DATADOG_HOST", "_api_host", env_value="http://localhost") - - # From parameters - test_api_params_from_params("DATADOG_API_KEY", "api_key", "_api_key", "apikey2") - test_api_params_from_params("DATADOG_APP_KEY", "app_key", "_application_key", "appkey2") - test_api_params_from_params("DATADOG_HOST", "api_host", "_api_host", "http://127.0.0.1") + + self.assertEqual(api._api_key, "API_KEY_ENV") + self.assertEqual(api._application_key, "APP_KEY_ENV") + self.assertEqual(api._api_host, "HOST_ENV") + self.assertEqual(api._host_name, util.hostname.get_hostname(api._hostname_from_config)) + + # Initialize again to check given parameters take precedence over already set value and env vars + initialize(api_key="API_KEY", app_key="APP_KEY", api_host="HOST", host_name="HOSTNAME") + + self.assertEqual(api._api_key, "API_KEY") + self.assertEqual(api._application_key, "APP_KEY") + self.assertEqual(api._api_host, "HOST") + self.assertEqual(api._host_name, "HOSTNAME") + + # Initialize again without specifying attributes to check that already initialized value takes precedence + initialize() + + self.assertEqual(api._api_key, "API_KEY") + self.assertEqual(api._application_key, "APP_KEY") + self.assertEqual(api._api_host, "HOST") + self.assertEqual(api._host_name, "HOSTNAME") + + del os.environ["DATADOG_API_KEY"] + del os.environ["DATADOG_APP_KEY"] + del os.environ["DATADOG_HOST"] + + +class TestExceptions(DatadogAPINoInitialization): + + def test_base_exception(self): + args = [ "foo" ] + with pytest.raises(DatadogException): + raise DatadogException(*args) + + def test_proxyerror_exception(self): + args = [ "GET", "http://localhost:8080", HTTPError("oh no") ] + kwargs = { "method": "GET", "url": "http://localhost:8080", "exception": HTTPError("oh no") } + with pytest.raises(ProxyError): + raise ProxyError(*args) + with pytest.raises(DatadogException): + raise ProxyError(*args) + with pytest.raises(ProxyError): + raise ProxyError(**kwargs) + with pytest.raises(DatadogException): + raise ProxyError(**kwargs) + + def test_clienterror_exception(self): + args = [ "GET", "http://localhost:8080", HTTPError("oh no") ] + kwargs = { "method": "GET", "url": "http://localhost:8080", "exception": HTTPError("oh no") } + with pytest.raises(ClientError): + raise ClientError(*args) + with pytest.raises(DatadogException): + raise ClientError(*args) + with pytest.raises(ClientError): + raise ClientError(**kwargs) + with pytest.raises(DatadogException): + raise ClientError(**kwargs) + + def test_httptimeout_exception(self): + args = [ "GET", "http://localhost:8080", 5 ] + kwargs = { "method": "GET", "url": "http://localhost:8080", "timeout": 5 } + with pytest.raises(HttpTimeout): + raise HttpTimeout(*args) + with pytest.raises(DatadogException): + raise HttpTimeout(*args) + with pytest.raises(HttpTimeout): + raise HttpTimeout(**kwargs) + with pytest.raises(DatadogException): + raise HttpTimeout(**kwargs) + + def test_httpbackoff_exception(self): + args = [ 30 ] + kwargs = { "backoff_period": 30 } + with pytest.raises(HttpBackoff): + raise HttpBackoff(*args) + with pytest.raises(DatadogException): + raise HttpBackoff(*args) + with pytest.raises(HttpBackoff): + raise HttpBackoff(**kwargs) + with pytest.raises(DatadogException): + raise HttpBackoff(**kwargs) + + def test_httperror_exception(self): + args = [ 500, "oh no" ] + kwargs = { "status_code": 500, "reason": "oh no" } + with pytest.raises(HTTPError): + raise HTTPError(*args) + with pytest.raises(DatadogException): + raise HTTPError(*args) + with pytest.raises(HTTPError): + raise HTTPError(**kwargs) + with pytest.raises(DatadogException): + raise HTTPError(**kwargs) + + def test_apierror_exception(self): + with pytest.raises(ApiError): + raise ApiError() + with pytest.raises(DatadogException): + raise ApiError() + + def test_apinotinitialized_exception(self): + with pytest.raises(ApiNotInitialized): + raise ApiNotInitialized() + with pytest.raises(DatadogException): + raise ApiNotInitialized() class TestResources(DatadogAPIWithInitialization): def test_creatable(self): + """ + Creatable resource logic. + """ MyCreatable.create(mydata="val") - self.request_called_with('POST', "host/api/v1/creatables", data={'mydata': "val"}) + self.request_called_with('POST', API_HOST + "/api/v1/creatables", data={'mydata': "val"}) MyCreatable.create(mydata="val", attach_host_name=True) - self.request_called_with('POST', "host/api/v1/creatables", + self.request_called_with('POST', API_HOST + "/api/v1/creatables", data={'mydata': "val", 'host': api._host_name}) def test_getable(self): + """ + Getable resource logic. + """ getable_object_id = 123 MyGetable.get(getable_object_id, otherparam="val") - self.request_called_with('GET', "host/api/v1/getables/" + str(getable_object_id), + self.request_called_with('GET', API_HOST + "/api/v1/getables/" + str(getable_object_id), params={'otherparam': "val"}) + _, kwargs = self.request_mock.call_args() + self.assertIsNone(kwargs["data"]) def test_listable(self): + """ + Listable resource logic. + """ MyListable.get_all(otherparam="val") - self.request_called_with('GET', "host/api/v1/listables", params={'otherparam': "val"}) + self.request_called_with('GET', API_HOST + "/api/v1/listables", params={'otherparam': "val"}) + _, kwargs = self.request_mock.call_args() + self.assertIsNone(kwargs["data"]) def test_updatable(self): + """ + Updatable resource logic. + """ updatable_object_id = 123 MyUpdatable.update(updatable_object_id, params={'myparam': "val1"}, mydata="val2") - self.request_called_with('PUT', "host/api/v1/updatables/" + str(updatable_object_id), + self.request_called_with('PUT', API_HOST + "/api/v1/updatables/" + str(updatable_object_id), params={'myparam': "val1"}, data={'mydata': "val2"}) def test_detalable(self): + """ + Deletable resource logic. + """ deletable_object_id = 123 MyDeletable.delete(deletable_object_id, otherparam="val") - self.request_called_with('DELETE', "host/api/v1/deletables/" + str(deletable_object_id), + self.request_called_with('DELETE', API_HOST + "/api/v1/deletables/" + str(deletable_object_id), params={'otherparam': "val"}) + def test_listable_sub_resources(self): + """ + Listable sub-resources logic. + """ + resource_id = 123 + MyListableSubResource.get_items(resource_id, otherparam="val") + self.request_called_with( + 'GET', + API_HOST + '/api/v1/resource_name/{0}/sub_resource_name'.format(resource_id), + params={'otherparam': "val"} + ) + _, kwargs = self.request_mock.call_args() + self.assertIsNone(kwargs["data"]) + + def test_addable_sub_resources(self): + """ + Addable sub-resources logic. + """ + resource_id = 123 + MyAddableSubResource.add_items(resource_id, params={'myparam': 'val1'}, mydata='val2') + self.request_called_with( + 'POST', + API_HOST + '/api/v1/resource_name/{0}/sub_resource_name'.format(resource_id), + params={'myparam': 'val1'}, + data={'mydata': 'val2'} + ) + + def test_updatable_sub_resources(self): + """ + Updatable sub-resources logic. + """ + resource_id = 123 + MyUpdatableSubResource.update_items(resource_id, params={'myparam': 'val1'}, mydata='val2') + self.request_called_with( + 'PUT', + API_HOST + '/api/v1/resource_name/{0}/sub_resource_name'.format(resource_id), + params={'myparam': 'val1'}, + data={'mydata': 'val2'} + ) + + def test_deletable_sub_resources(self): + """ + Deletable sub-resources logic. + """ + resource_id = 123 + MyDeletableSubResource.delete_items(resource_id, params={'myparam': 'val1'}, mydata='val2') + self.request_called_with( + 'DELETE', + API_HOST + '/api/v1/resource_name/{0}/sub_resource_name'.format(resource_id), + params={'myparam': 'val1'}, + data={'mydata': 'val2'} + ) + def test_actionable(self): + """ + Actionable resource logic. + """ actionable_object_id = 123 - MyActionable.trigger_class_action('POST', "actionname", id=actionable_object_id, - mydata="val") - self.request_called_with('POST', "host/api/v1/actionables/" + str(actionable_object_id) + - "/actionname", data={'mydata': "val"}) + MyActionable.trigger_class_action( + 'POST', + 'actionname', + id=actionable_object_id, + params={'myparam': 'val1'}, + mydata='val', + mydata2='val2' + ) + self.request_called_with( + 'POST', + API_HOST + '/api/v1/actionables/{0}/actionname'.format(str(actionable_object_id)), + params={'myparam': 'val1'}, + data={'mydata': 'val', 'mydata2': 'val2'} + ) + + MyActionable.trigger_class_action( + 'POST', + 'actionname', + id=actionable_object_id, + mydata='val', + mydata2='val2' + ) + self.request_called_with( + 'POST', + API_HOST + '/api/v1/actionables/{0}/actionname'.format(str(actionable_object_id)), + params={}, + data={'mydata': 'val', 'mydata2': 'val2'} + ) + + MyActionable.trigger_class_action( + 'GET', + 'actionname', + id=actionable_object_id, + params={'param1': 'val1', 'param2': 'val2'} + ) + self.request_called_with( + 'GET', + API_HOST + '/api/v1/actionables/{0}/actionname'.format(str(actionable_object_id)), + params={'param1': 'val1', 'param2': 'val2'} + ) + _, kwargs = self.request_mock.call_args() + self.assertIsNone(kwargs["data"]) + + MyActionable.trigger_action( + 'POST', + 'actionname', + id=actionable_object_id, + mydata="val" + ) + self.request_called_with( + 'POST', + API_HOST + '/api/v1/actionname/{0}'.format(actionable_object_id), + data={'mydata': "val"} + ) + + MyActionable.trigger_action( + 'GET', + 'actionname', + id=actionable_object_id, + ) + self.request_called_with( + 'GET', + API_HOST + '/api/v1/actionname/{0}'.format(actionable_object_id) + ) + _, kwargs = self.request_mock.call_args() + self.assertIsNone(kwargs["data"]) + + +class TestEventResource(DatadogAPIWithInitialization): + + def test_submit_event_wrong_alert_type(self): + """ + Assess that an event submitted with a wrong alert_type raises the correct Exception + """ + with pytest.raises(ApiError) as excinfo: + Event.create( + title="test no hostname", text="test no hostname", attach_host_name=False, alert_type="wrong_type" + ) + assert "Parameter alert_type must be either error, warning, info or success" in str(excinfo.value) - MyActionable.trigger_action('POST', "actionname", id=actionable_object_id, mydata="val") - self.request_called_with('POST', "host/api/v1/actionname/" + str(actionable_object_id), - data={'mydata': "val"}) + +class TestLogsResource(DatadogAPIWithInitialization): + def test_list_logs(self): + Logs.list(data={"time": {"from": "2021-01-01T11:00:00Z", "to": "2021-01-02T11:00:00Z"}}) + self.request_called_with( + "POST", + "https://example.com/api/v1/logs-queries/list", + data={"time": {"from": "2021-01-01T11:00:00Z", "to": "2021-01-02T11:00:00Z"}} + ) class TestMetricResource(DatadogAPIWithInitialization): - def submit_and_assess_metric_payload(self, serie): + def submit_and_assess_metric_payload(self, serie, attach_host_name=True): """ Helper to assess the metric payload format. """ now = time() if isinstance(serie, dict): - Metric.send(**deepcopy(serie)) + Metric.send(attach_host_name=attach_host_name, **deepcopy(serie)) serie = [serie] else: - Metric.send(deepcopy(serie)) + Metric.send(deepcopy(serie), attach_host_name=attach_host_name) payload = self.get_request_data() for i, metric in enumerate(payload['series']): - assert set(metric.keys()) == set(['metric', 'points', 'host']) + if attach_host_name: + self.assertEqual(set(metric.keys()), set(['metric', 'points', 'host'])) + self.assertEqual(metric['host'], api._host_name) + else: + self.assertEqual(set(metric.keys()), set(['metric', 'points'])) + + self.assertEqual(metric['metric'], serie[i]['metric']) + + # points is a list of 1 point + self.assertTrue(isinstance(metric['points'], list)) + self.assertEqual(len(metric['points']), 1) + # it consists of a [time, value] pair + self.assertEqual(len(metric['points'][0]), 2) + # its value == value we sent + self.assertEqual(metric['points'][0][1], float(serie[i]['points'])) + # it's time not so far from current time + assert now - 1 < metric['points'][0][0] < now + 1 + + def submit_and_assess_dist_payload(self, serie, attach_host_name=True): + """ + Helper to assess the metric payload format. + """ + now = time() + + if isinstance(serie, dict): + Distribution.send(attach_host_name=attach_host_name, **deepcopy(serie)) + serie = [serie] + else: + Distribution.send(deepcopy(serie), attach_host_name=attach_host_name) + + payload = self.get_request_data() + + for i, metric in enumerate(payload['series']): + if attach_host_name: + self.assertEqual(set(metric.keys()), set(['metric', 'points', 'host'])) + self.assertEqual(metric['host'], api._host_name) + else: + self.assertEqual(set(metric.keys()), set(['metric', 'points'])) - assert metric['metric'] == serie[i]['metric'] - assert metric['host'] == api._host_name + self.assertEqual(metric['metric'], serie[i]['metric']) # points is a list of 1 point - assert isinstance(metric['points'], list) and len(metric['points']) == 1 + self.assertTrue(isinstance(metric['points'], list)) + self.assertEqual(len(metric['points']), 1) # it consists of a [time, value] pair - assert len(metric['points'][0]) == 2 + self.assertEqual(len(metric['points'][0]), 2) # its value == value we sent - assert metric['points'][0][1] == serie[i]['points'] + self.assertEqual(metric['points'][0][1], serie[i]['points'][0][1]) # it's time not so far from current time assert now - 1 < metric['points'][0][0] < now + 1 @@ -232,12 +662,12 @@ def test_metric_submit_query_switch(self): """ Endpoints are different for submission and queries. """ - Metric.send(points="val") - self.request_called_with('POST', "host/api/v1/series", - data={'series': [{'points': "val", 'host': api._host_name}]}) + Metric.send(points=(123, 456)) + self.request_called_with('POST', API_HOST + "/api/v1/series", + data={'series': [{'points': [[123, 456.0]], 'host': api._host_name}]}) Metric.query(start="val1", end="val2") - self.request_called_with('GET', "host/api/v1/query", + self.request_called_with('GET', API_HOST + "/api/v1/query", params={'from': "val1", 'to': "val2"}) def test_points_submission(self): @@ -252,3 +682,157 @@ def test_points_submission(self): serie = [dict(metric='metric.1', points=13), dict(metric='metric.2', points=19)] self.submit_and_assess_metric_payload(serie) + + # Single point no hostname + serie = dict(metric='metric.1', points=13) + self.submit_and_assess_metric_payload(serie, attach_host_name=False) + + # Multiple point no hostname + serie = [dict(metric='metric.1', points=13), + dict(metric='metric.2', points=19)] + self.submit_and_assess_metric_payload(serie, attach_host_name=False) + + def test_dist_points_submission(self): + """ + Assess the distribution data payload format, when submitting a single or multiple points. + """ + # Single point + serie = dict(metric='metric.1', points=[[time(), [13]]]) + self.submit_and_assess_dist_payload(serie) + + # Multiple point + serie = [dict(metric='metric.1', points=[[time(), [13]]]), + dict(metric='metric.2', points=[[time(), [19]]])] + self.submit_and_assess_dist_payload(serie) + + # Single point no hostname + serie = dict(metric='metric.1', points=[[time(), [13]]]) + self.submit_and_assess_dist_payload(serie, attach_host_name=False) + + # Multiple point no hostname + serie = [dict(metric='metric.1', points=[[time(), [13]]]), + dict(metric='metric.2', points=[[time(), [19]]])] + self.submit_and_assess_dist_payload(serie, attach_host_name=False) + + def test_data_type_support(self): + """ + `Metric` API supports `real` numerical data types. + """ + from decimal import Decimal + from fractions import Fraction + + m_long = int(1) # long in Python 3.x + + if not is_p3k(): + m_long = long(1) # noqa: F821 + + supported_data_types = [1, 1.0, m_long, Decimal(1), Fraction(1, 2)] + + for point in supported_data_types: + serie = dict(metric='metric.numerical', points=point) + self.submit_and_assess_metric_payload(serie) + + def test_compression(self): + """ + Metric and Distribution support zlib compression + """ + + # By default, there is no compression + # Metrics + series = dict(metric="metric.1", points=[(time(), 13.)]) + Metric.send(attach_host_name=False, **series) + _, kwargs = self.request_mock.call_args() + req_data = kwargs["data"] + headers = kwargs["headers"] + assert "Content-Encoding" not in headers + assert req_data == json.dumps({"series": [series]}) + # Same result when explicitly False + Metric.send(compress_payload=False, attach_host_name=False, **series) + _, kwargs = self.request_mock.call_args() + req_data = kwargs["data"] + headers = kwargs["headers"] + assert "Content-Encoding" not in headers + assert req_data == json.dumps({"series": [series]}) + # Distributions + series = dict(metric="metric.1", points=[(time(), 13.)]) + Distribution.send(attach_host_name=False, **series) + _, kwargs = self.request_mock.call_args() + req_data = kwargs["data"] + headers = kwargs["headers"] + assert "Content-Encoding" not in headers + assert req_data == json.dumps({"series": [series]}) + # Same result when explicitly False + Distribution.send(compress_payload=False, attach_host_name=False, **series) + _, kwargs = self.request_mock.call_args() + req_data = kwargs["data"] + headers = kwargs["headers"] + assert "Content-Encoding" not in headers + assert req_data == json.dumps({"series": [series]}) + + # Enabling compression + # Metrics + series = dict(metric="metric.1", points=[(time(), 13.)]) + compressed_series = zlib.compress(json.dumps({"series": [series]}).encode("utf-8")) + Metric.send(compress_payload=True, attach_host_name=False, **series) + _, kwargs = self.request_mock.call_args() + req_data = kwargs["data"] + headers = kwargs["headers"] + assert "Content-Encoding" in headers + assert headers["Content-Encoding"] == "deflate" + assert req_data == compressed_series + # Distributions + series = dict(metric='metric.1', points=[(time(), 13.)]) + compressed_series = zlib.compress(json.dumps({"series": [series]}).encode("utf-8")) + Distribution.send(compress_payload=True, attach_host_name=False, **series) + _, kwargs = self.request_mock.call_args() + req_data = kwargs["data"] + headers = kwargs["headers"] + assert "Content-Encoding" in headers + assert headers["Content-Encoding"] == "deflate" + assert req_data == compressed_series + + +class TestServiceCheckResource(DatadogAPIWithInitialization): + + def test_service_check_supports_none_parameters(self): + """ + ServiceCheck should support none parameters + + ``` + $ dog service_check check check_pg host0 1 + ``` + + resulted in `RuntimeError: dictionary changed size during iteration` + """ + ServiceCheck.check( + check='check_pg', host_name='host0', status=1, message=None, + timestamp=None, tags=None) + + +class TestUserResource(DatadogAPIWithInitialization): + + def test_create_user(self): + User.create(handle="handle", name="name", access_role="ro") + self.request_called_with( + "POST", "https://example.com/api/v1/user", data={"handle": "handle", "name": "name", "access_role": "ro"} + ) + + def test_get_user(self): + User.get("handle") + self.request_called_with("GET", "https://example.com/api/v1/user/handle") + + def test_update_user(self): + User.update("handle", name="name", access_role="ro", email="email", disabled="disabled") + self.request_called_with( + "PUT", + "https://example.com/api/v1/user/handle", + data={"name": "name", "access_role": "ro", "email": "email", "disabled": "disabled"} + ) + + def test_delete_user(self): + User.delete("handle") + self.request_called_with("DELETE", "https://example.com/api/v1/user/handle") + + def test_get_all_users(self): + User.get_all() + self.request_called_with("GET", "https://example.com/api/v1/user") diff --git a/tests/unit/dogstatsd/__init__.py b/tests/unit/dogstatsd/__init__.py index e69de29bb..b3017a1db 100644 --- a/tests/unit/dogstatsd/__init__.py +++ b/tests/unit/dogstatsd/__init__.py @@ -0,0 +1,3 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc diff --git a/tests/unit/dogstatsd/fixtures.py b/tests/unit/dogstatsd/fixtures.py new file mode 100644 index 000000000..c042151f2 --- /dev/null +++ b/tests/unit/dogstatsd/fixtures.py @@ -0,0 +1,19 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +""" +Helper(s), load fixtures. +""" +# stdlib +import os + + +def load_fixtures(name): + """ + Load fixtures. + + Args: + name (string): name of the fixture + """ + with open(os.path.join(os.path.dirname(__file__), 'fixtures', '{}'.format(name))) as fixture: + return fixture.read() diff --git a/tests/unit/dogstatsd/fixtures/route b/tests/unit/dogstatsd/fixtures/route new file mode 100644 index 000000000..14aa36ea9 --- /dev/null +++ b/tests/unit/dogstatsd/fixtures/route @@ -0,0 +1,3 @@ +Iface Destination Gateway Flags RefCnt Use Metric Mask MTU Window IRTT +eth0 00000000 010011AC 0003 0 0 0 00000000 0 0 0 +eth0 000011AC 00000000 0001 0 0 0 0000FFFF 0 0 0 diff --git a/tests/unit/dogstatsd/test_aggregator.py b/tests/unit/dogstatsd/test_aggregator.py new file mode 100644 index 000000000..be46e1ed6 --- /dev/null +++ b/tests/unit/dogstatsd/test_aggregator.py @@ -0,0 +1,78 @@ +import unittest +from datadog.dogstatsd.metric_types import MetricType +from datadog.dogstatsd.aggregator import Aggregator + + +class TestAggregator(unittest.TestCase): + def setUp(self): + self.aggregator = Aggregator() + + def test_aggregator_sample(self): + tags = ["tag1", "tag2"] + + self.aggregator.gauge("gaugeTest", 21, tags, 1) + self.assertEqual(len(self.aggregator.metrics_map[MetricType.GAUGE]), 1) + self.assertIn("gaugeTest:tag1,tag2", self.aggregator.metrics_map[MetricType.GAUGE]) + + self.aggregator.count("countTest", 21, tags, 1) + self.assertEqual(len(self.aggregator.metrics_map[MetricType.COUNT]), 1) + self.assertIn("countTest:tag1,tag2", self.aggregator.metrics_map[MetricType.COUNT]) + + self.aggregator.set("setTest", "value1", tags, 1) + self.assertEqual(len(self.aggregator.metrics_map[MetricType.SET]), 1) + self.assertIn("setTest:tag1,tag2", self.aggregator.metrics_map[MetricType.SET]) + + self.aggregator.gauge("gaugeTest", 123, tags, 1) + self.assertEqual(len(self.aggregator.metrics_map[MetricType.GAUGE]), 1) + self.assertIn("gaugeTest:tag1,tag2", self.aggregator.metrics_map[MetricType.GAUGE]) + + self.aggregator.count("countTest", 10, tags, 1) + self.assertEqual(len(self.aggregator.metrics_map[MetricType.COUNT]), 1) + self.assertIn("countTest:tag1,tag2", self.aggregator.metrics_map[MetricType.COUNT]) + + self.aggregator.set("setTest", "value1", tags, 1) + self.assertEqual(len(self.aggregator.metrics_map[MetricType.SET]), 1) + self.assertIn("setTest:tag1,tag2", self.aggregator.metrics_map[MetricType.SET]) + + def test_aggregator_flush(self): + tags = ["tag1", "tag2"] + + self.aggregator.gauge("gaugeTest1", 21, tags, 1) + self.aggregator.gauge("gaugeTest1", 10, tags, 1) + self.aggregator.gauge("gaugeTest2", 15, tags, 1) + + self.aggregator.count("countTest1", 21, tags, 1) + self.aggregator.count("countTest1", 10, tags, 1) + self.aggregator.count("countTest2", 1, tags, 1) + + self.aggregator.set("setTest1", "value1", tags, 1) + self.aggregator.set("setTest1", "value1", tags, 1) + self.aggregator.set("setTest1", "value2", tags, 1) + self.aggregator.set("setTest2", "value1", tags, 1) + + metrics = self.aggregator.flush_aggregated_metrics() + self.assertEqual(len(self.aggregator.metrics_map[MetricType.GAUGE]), 0) + self.assertEqual(len(self.aggregator.metrics_map[MetricType.COUNT]), 0) + self.assertEqual(len(self.aggregator.metrics_map[MetricType.SET]), 0) + + self.assertEqual(len(metrics), 7) + metrics.sort(key=lambda m: (m.metric_type, m.name, m.value)) + expected_metrics = [ + {"metric_type": MetricType.COUNT, "name": "countTest1", "tags": tags, "rate": 1, "value": 31, "timestamp": 0}, + {"metric_type": MetricType.COUNT, "name": "countTest2", "tags": tags, "rate": 1, "value": 1, "timestamp": 0}, + {"metric_type": MetricType.GAUGE, "name": "gaugeTest1", "tags": tags, "rate": 1, "value": 10, "timestamp": 0}, + {"metric_type": MetricType.GAUGE, "name": "gaugeTest2", "tags": tags, "rate": 1, "value": 15, "timestamp": 0}, + {"metric_type": MetricType.SET, "name": "setTest1", "tags": tags, "rate": 1, "value": "value1", "timestamp": 0}, + {"metric_type": MetricType.SET, "name": "setTest1", "tags": tags, "rate": 1, "value": "value2", "timestamp": 0}, + {"metric_type": MetricType.SET, "name": "setTest2", "tags": tags, "rate": 1, "value": "value1", "timestamp": 0}, + ] + + for metric, expected in zip(metrics, expected_metrics): + self.assertEqual(metric.name, expected["name"]) + self.assertEqual(metric.tags, expected["tags"]) + self.assertEqual(metric.rate, expected["rate"]) + self.assertEqual(metric.value, expected["value"]) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/unit/dogstatsd/test_container.py b/tests/unit/dogstatsd/test_container.py new file mode 100644 index 000000000..c97d80784 --- /dev/null +++ b/tests/unit/dogstatsd/test_container.py @@ -0,0 +1,183 @@ +# -*- coding: utf-8 -*- + +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +""" +Tests for container.py +""" + +import mock +import pytest + +from datadog.dogstatsd.container import Cgroup + + +def get_mock_open(read_data=None): + mock_open = mock.mock_open(read_data=read_data) + return mock.patch("datadog.dogstatsd.container.open", mock_open) + + +@pytest.mark.parametrize( + "file_contents,expected_container_id", + ( + # Docker file + ( + """ +13:name=systemd:/docker/3726184226f5d3147c25fdeab5b60097e378e8a720503a5e19ecfdf29f869860 +12:pids:/docker/3726184226f5d3147c25fdeab5b60097e378e8a720503a5e19ecfdf29f869860 +11:hugetlb:/docker/3726184226f5d3147c25fdeab5b60097e378e8a720503a5e19ecfdf29f869860 +10:net_prio:/docker/3726184226f5d3147c25fdeab5b60097e378e8a720503a5e19ecfdf29f869860 +9:perf_event:/docker/3726184226f5d3147c25fdeab5b60097e378e8a720503a5e19ecfdf29f869860 +8:net_cls:/docker/3726184226f5d3147c25fdeab5b60097e378e8a720503a5e19ecfdf29f869860 +7:freezer:/docker/3726184226f5d3147c25fdeab5b60097e378e8a720503a5e19ecfdf29f869860 +6:devices:/docker/3726184226f5d3147c25fdeab5b60097e378e8a720503a5e19ecfdf29f869860 +5:memory:/docker/3726184226f5d3147c25fdeab5b60097e378e8a720503a5e19ecfdf29f869860 +4:blkio:/docker/3726184226f5d3147c25fdeab5b60097e378e8a720503a5e19ecfdf29f869860 +3:cpuacct:/docker/3726184226f5d3147c25fdeab5b60097e378e8a720503a5e19ecfdf29f869860 +2:cpu:/docker/3726184226f5d3147c25fdeab5b60097e378e8a720503a5e19ecfdf29f869860 +1:cpuset:/docker/3726184226f5d3147c25fdeab5b60097e378e8a720503a5e19ecfdf29f869860 + """, + "ci-3726184226f5d3147c25fdeab5b60097e378e8a720503a5e19ecfdf29f869860", + ), + # k8s file + ( + """ +11:perf_event:/kubepods/test/pod3d274242-8ee0-11e9-a8a6-1e68d864ef1a/3e74d3fd9db4c9dd921ae05c2502fb984d0cde1b36e581b13f79c639da4518a1 +10:pids:/kubepods/test/pod3d274242-8ee0-11e9-a8a6-1e68d864ef1a/3e74d3fd9db4c9dd921ae05c2502fb984d0cde1b36e581b13f79c639da4518a1 +9:memory:/kubepods/test/pod3d274242-8ee0-11e9-a8a6-1e68d864ef1a/3e74d3fd9db4c9dd921ae05c2502fb984d0cde1b36e581b13f79c639da4518a1 +8:cpu,cpuacct:/kubepods/test/pod3d274242-8ee0-11e9-a8a6-1e68d864ef1a/3e74d3fd9db4c9dd921ae05c2502fb984d0cde1b36e581b13f79c639da4518a1 +7:blkio:/kubepods/test/pod3d274242-8ee0-11e9-a8a6-1e68d864ef1a/3e74d3fd9db4c9dd921ae05c2502fb984d0cde1b36e581b13f79c639da4518a1 +6:cpuset:/kubepods/test/pod3d274242-8ee0-11e9-a8a6-1e68d864ef1a/3e74d3fd9db4c9dd921ae05c2502fb984d0cde1b36e581b13f79c639da4518a1 +5:devices:/kubepods/test/pod3d274242-8ee0-11e9-a8a6-1e68d864ef1a/3e74d3fd9db4c9dd921ae05c2502fb984d0cde1b36e581b13f79c639da4518a1 +4:freezer:/kubepods/test/pod3d274242-8ee0-11e9-a8a6-1e68d864ef1a/3e74d3fd9db4c9dd921ae05c2502fb984d0cde1b36e581b13f79c639da4518a1 +3:net_cls,net_prio:/kubepods/test/pod3d274242-8ee0-11e9-a8a6-1e68d864ef1a/3e74d3fd9db4c9dd921ae05c2502fb984d0cde1b36e581b13f79c639da4518a1 +2:hugetlb:/kubepods/test/pod3d274242-8ee0-11e9-a8a6-1e68d864ef1a/3e74d3fd9db4c9dd921ae05c2502fb984d0cde1b36e581b13f79c639da4518a1 +1:name=systemd:/kubepods/test/pod3d274242-8ee0-11e9-a8a6-1e68d864ef1a/3e74d3fd9db4c9dd921ae05c2502fb984d0cde1b36e581b13f79c639da4518a1 + """, + "ci-3e74d3fd9db4c9dd921ae05c2502fb984d0cde1b36e581b13f79c639da4518a1", + ), + # ECS file + ( + """ +9:perf_event:/ecs/test-ecs-classic/5a0d5ceddf6c44c1928d367a815d890f/38fac3e99302b3622be089dd41e7ccf38aff368a86cc339972075136ee2710ce +8:memory:/ecs/test-ecs-classic/5a0d5ceddf6c44c1928d367a815d890f/38fac3e99302b3622be089dd41e7ccf38aff368a86cc339972075136ee2710ce +7:hugetlb:/ecs/test-ecs-classic/5a0d5ceddf6c44c1928d367a815d890f/38fac3e99302b3622be089dd41e7ccf38aff368a86cc339972075136ee2710ce +6:freezer:/ecs/test-ecs-classic/5a0d5ceddf6c44c1928d367a815d890f/38fac3e99302b3622be089dd41e7ccf38aff368a86cc339972075136ee2710ce +5:devices:/ecs/test-ecs-classic/5a0d5ceddf6c44c1928d367a815d890f/38fac3e99302b3622be089dd41e7ccf38aff368a86cc339972075136ee2710ce +4:cpuset:/ecs/test-ecs-classic/5a0d5ceddf6c44c1928d367a815d890f/38fac3e99302b3622be089dd41e7ccf38aff368a86cc339972075136ee2710ce +3:cpuacct:/ecs/test-ecs-classic/5a0d5ceddf6c44c1928d367a815d890f/38fac3e99302b3622be089dd41e7ccf38aff368a86cc339972075136ee2710ce +2:cpu:/ecs/test-ecs-classic/5a0d5ceddf6c44c1928d367a815d890f/38fac3e99302b3622be089dd41e7ccf38aff368a86cc339972075136ee2710ce +1:blkio:/ecs/test-ecs-classic/5a0d5ceddf6c44c1928d367a815d890f/38fac3e99302b3622be089dd41e7ccf38aff368a86cc339972075136ee2710ce + """, + "ci-38fac3e99302b3622be089dd41e7ccf38aff368a86cc339972075136ee2710ce", + ), + # Fargate file + ( + """ +11:hugetlb:/ecs/55091c13-b8cf-4801-b527-f4601742204d/432624d2150b349fe35ba397284dea788c2bf66b885d14dfc1569b01890ca7da +10:pids:/ecs/55091c13-b8cf-4801-b527-f4601742204d/432624d2150b349fe35ba397284dea788c2bf66b885d14dfc1569b01890ca7da +9:cpuset:/ecs/55091c13-b8cf-4801-b527-f4601742204d/432624d2150b349fe35ba397284dea788c2bf66b885d14dfc1569b01890ca7da +8:net_cls,net_prio:/ecs/55091c13-b8cf-4801-b527-f4601742204d/432624d2150b349fe35ba397284dea788c2bf66b885d14dfc1569b01890ca7da +7:cpu,cpuacct:/ecs/55091c13-b8cf-4801-b527-f4601742204d/432624d2150b349fe35ba397284dea788c2bf66b885d14dfc1569b01890ca7da +6:perf_event:/ecs/55091c13-b8cf-4801-b527-f4601742204d/432624d2150b349fe35ba397284dea788c2bf66b885d14dfc1569b01890ca7da +5:freezer:/ecs/55091c13-b8cf-4801-b527-f4601742204d/432624d2150b349fe35ba397284dea788c2bf66b885d14dfc1569b01890ca7da +4:devices:/ecs/55091c13-b8cf-4801-b527-f4601742204d/432624d2150b349fe35ba397284dea788c2bf66b885d14dfc1569b01890ca7da +3:blkio:/ecs/55091c13-b8cf-4801-b527-f4601742204d/432624d2150b349fe35ba397284dea788c2bf66b885d14dfc1569b01890ca7da +2:memory:/ecs/55091c13-b8cf-4801-b527-f4601742204d/432624d2150b349fe35ba397284dea788c2bf66b885d14dfc1569b01890ca7da +1:name=systemd:/ecs/55091c13-b8cf-4801-b527-f4601742204d/432624d2150b349fe35ba397284dea788c2bf66b885d14dfc1569b01890ca7da + """, + "ci-432624d2150b349fe35ba397284dea788c2bf66b885d14dfc1569b01890ca7da", + ), + # Fargate file >= 1.4.0 + ( + """ +11:hugetlb:/ecs/55091c13-b8cf-4801-b527-f4601742204d/34dc0b5e626f2c5c4c5170e34b10e765-1234567890 +10:pids:/ecs/55091c13-b8cf-4801-b527-f4601742204d/34dc0b5e626f2c5c4c5170e34b10e765-1234567890 +9:cpuset:/ecs/55091c13-b8cf-4801-b527-f4601742204d/34dc0b5e626f2c5c4c5170e34b10e765-1234567890 +8:net_cls,net_prio:/ecs/55091c13-b8cf-4801-b527-f4601742204d/34dc0b5e626f2c5c4c5170e34b10e765-1234567890 +7:cpu,cpuacct:/ecs/55091c13-b8cf-4801-b527-f4601742204d/34dc0b5e626f2c5c4c5170e34b10e765-1234567890 +6:perf_event:/ecs/55091c13-b8cf-4801-b527-f4601742204d/34dc0b5e626f2c5c4c5170e34b10e765-1234567890 +5:freezer:/ecs/55091c13-b8cf-4801-b527-f4601742204d/34dc0b5e626f2c5c4c5170e34b10e765-1234567890 +4:devices:/ecs/55091c13-b8cf-4801-b527-f4601742204d/34dc0b5e626f2c5c4c5170e34b10e765-1234567890 +3:blkio:/ecs/55091c13-b8cf-4801-b527-f4601742204d/34dc0b5e626f2c5c4c5170e34b10e765-1234567890 +2:memory:/ecs/55091c13-b8cf-4801-b527-f4601742204d/34dc0b5e626f2c5c4c5170e34b10e765-1234567890 +1:name=systemd:/ecs/34dc0b5e626f2c5c4c5170e34b10e765-1234567890 + """, + "ci-34dc0b5e626f2c5c4c5170e34b10e765-1234567890", + ), + # Linux non-containerized file + ( + """ +11:blkio:/user.slice/user-0.slice/session-14.scope +10:memory:/user.slice/user-0.slice/session-14.scope +9:hugetlb:/ +8:cpuset:/ +7:pids:/user.slice/user-0.slice/session-14.scope +6:freezer:/ +5:net_cls,net_prio:/ +4:perf_event:/ +3:cpu,cpuacct:/user.slice/user-0.slice/session-14.scope +2:devices:/user.slice/user-0.slice/session-14.scope +1:name=systemd:/user.slice/user-0.slice/session-14.scope + """, + None, + ), + ), +) +def test_container_id_from_cgroup(file_contents, expected_container_id): + with get_mock_open(read_data=file_contents) as mock_open: + if file_contents is None: + mock_open.side_effect = IOError + + with mock.patch("os.stat", mock.MagicMock(return_value=mock.Mock(st_ino=0xEFFFFFFB))): + reader = Cgroup() + assert expected_container_id == reader.container_id + + mock_open.assert_called_once_with("/proc/self/cgroup", mode="r") + + +def test_container_id_inode(): + """Test that the inode is returned when the container ID cannot be found.""" + with mock.patch("datadog.dogstatsd.container.open", mock.mock_open(read_data="0::/")) as mock_open: + with mock.patch("os.stat", mock.MagicMock(return_value=mock.Mock(st_ino=1234))): + reader = Cgroup() + assert reader.container_id == "in-1234" + mock_open.assert_called_once_with("/proc/self/cgroup", mode="r") + + cgroupv1_priority = """ +12:cpu,cpuacct:/ +11:hugetlb:/ +10:devices:/ +9:rdma:/ +8:net_cls,net_prio:/ +7:memory:/ +6:cpuset:/ +5:pids:/ +4:freezer:/ +3:perf_event:/ +2:blkio:/ +1:name=systemd:/ +0::/ +""" + + paths_checked = [] + + def inode_stat_mock(path): + paths_checked.append(path) + + # The cgroupv1 controller is mounted on inode 0. This will cause a fallback to the cgroupv2 controller. + if path == "/sys/fs/cgroup/memory/": + return mock.Mock(st_ino=0) + elif path == "/sys/fs/cgroup/": + return mock.Mock(st_ino=1234) + + with mock.patch("datadog.dogstatsd.container.open", mock.mock_open(read_data=cgroupv1_priority)) as mock_open: + with mock.patch("os.stat", mock.MagicMock(side_effect=inode_stat_mock)): + reader = Cgroup() + assert reader.container_id == "in-1234" + assert paths_checked[-2:] == [ + "/sys/fs/cgroup/memory/", + "/sys/fs/cgroup/" + ] + mock_open.assert_called_once_with("/proc/self/cgroup", mode="r") diff --git a/tests/unit/dogstatsd/test_metrics.py b/tests/unit/dogstatsd/test_metrics.py new file mode 100644 index 000000000..18b705b4a --- /dev/null +++ b/tests/unit/dogstatsd/test_metrics.py @@ -0,0 +1,73 @@ +import unittest + +from datadog.dogstatsd.metrics import CountMetric, GaugeMetric, SetMetric + +class TestMetrics(unittest.TestCase): + def test_new_count_metric(self): + c = CountMetric("test", 21, ["tag1", "tag2"], 1, 1713804588) + self.assertEqual(c.value, 21) + self.assertEqual(c.name, "test") + self.assertEqual(c.tags, ["tag1", "tag2"]) + self.assertEqual(c.rate, 1.0) + self.assertEqual(c.timestamp, 1713804588) + # Testing for default timestamp may be unecessary + c_default_timestamp = CountMetric("test", 21, ["tag1", "tag2"], 1) + self.assertEqual(c_default_timestamp.value, 21) + self.assertEqual(c_default_timestamp.name, "test") + self.assertEqual(c_default_timestamp.tags, ["tag1", "tag2"]) + self.assertEqual(c_default_timestamp.rate, 1.0) + self.assertEqual(c_default_timestamp.timestamp, 0) + + def test_count_metric_aggregate(self): + c = CountMetric("test", 10, ["tag1", "tag2"], 1, 1713804588) + c.aggregate(20) + self.assertEqual(c.value, 30) + self.assertEqual(c.name, "test") + self.assertEqual(c.tags, ["tag1", "tag2"]) + self.assertEqual(c.rate, 1.0) + self.assertEqual(c.timestamp, 1713804588) + + def test_new_gauge_metric(self): + g = GaugeMetric("test", 10, ["tag1", "tag2"], 1, 1713804588) + self.assertEqual(g.value, 10) + self.assertEqual(g.name, "test") + self.assertEqual(g.tags, ["tag1", "tag2"]) + self.assertEqual(g.rate, 1) + self.assertEqual(g.timestamp, 1713804588) + + g_default_timestamp = GaugeMetric("test", 10, ["tag1", "tag2"], 1) + self.assertEqual(g_default_timestamp.value, 10) + self.assertEqual(g_default_timestamp.name, "test") + self.assertEqual(g_default_timestamp.tags, ["tag1", "tag2"]) + self.assertEqual(g_default_timestamp.rate, 1) + self.assertEqual(g_default_timestamp.timestamp, 0) + + def test_gauge_metric_aggregate(self): + g = GaugeMetric("test", 10, ["tag1", "tag2"], 1, 1713804588) + g.aggregate(20) + self.assertEqual(g.value, 20) + self.assertEqual(g.name, "test") + self.assertEqual(g.tags, ["tag1", "tag2"]) + self.assertEqual(g.rate, 1.0) + self.assertEqual(g.timestamp, 1713804588) + + def test_new_set_metric(self): + s = SetMetric("test", "value1", ["tag1", "tag2"], 1) + self.assertEqual(s.data, {"value1"}) + self.assertEqual(s.name, "test") + self.assertEqual(s.tags, ["tag1", "tag2"]) + self.assertEqual(s.rate, 1) + self.assertEqual(s.timestamp, 0) + + def test_set_metric_aggregate(self): + s = SetMetric("test", "value1", ["tag1", "tag2"], 1) + s.aggregate("value2") + s.aggregate("value2") + self.assertEqual(s.data, {"value1", "value2"}) + self.assertEqual(s.name, "test") + self.assertEqual(s.tags, ["tag1", "tag2"]) + self.assertEqual(s.rate, 1) + self.assertEqual(s.timestamp, 0) + +if __name__ == '__main__': + unittest.main() diff --git a/tests/unit/dogstatsd/test_statsd.py b/tests/unit/dogstatsd/test_statsd.py index 57954b55c..b9a24cfc1 100644 --- a/tests/unit/dogstatsd/test_statsd.py +++ b/tests/unit/dogstatsd/test_statsd.py @@ -1,131 +1,563 @@ # -*- coding: utf-8 -*- +# pylint: disable=line-too-long,too-many-public-methods + +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc """ Tests for dogstatsd.py """ - +# Standard libraries from collections import deque -import six +from contextlib import closing +from threading import Thread +import errno +import os +import shutil import socket +import tempfile +import threading import time +import unittest +import warnings -from nose import tools as t +# Third-party libraries +import mock +from mock import call, Mock, mock_open, patch +import pytest -from datadog.dogstatsd.base import DogStatsd +# Datadog libraries from datadog import initialize, statsd +from datadog import __version__ as version +from datadog.dogstatsd.base import DEFAULT_BUFFERING_FLUSH_INTERVAL, DogStatsd, MIN_SEND_BUFFER_SIZE, UDP_OPTIMAL_PAYLOAD_LENGTH, UDS_OPTIMAL_PAYLOAD_LENGTH +from datadog.dogstatsd.context import TimedContextManagerDecorator +from datadog.util.compat import is_higher_py35, is_p3k +from tests.util.contextmanagers import preserve_environment_variable, EnvVars +from tests.unit.dogstatsd.fixtures import load_fixtures class FakeSocket(object): """ A fake socket for testing. """ - def __init__(self): + FLUSH_GRACE_PERIOD = 0.2 + + def __init__(self, flush_interval=DEFAULT_BUFFERING_FLUSH_INTERVAL): self.payloads = deque() + self._flush_interval = flush_interval + self._flush_wait = False + self.timeout = () # unit tuple = settimeout was not called + def send(self, payload): - assert type(payload) == six.binary_type + if is_p3k(): + assert isinstance(payload, bytes) + else: + assert isinstance(payload, str) + self.payloads.append(payload) - def recv(self): - try: - return self.payloads.popleft().decode('utf-8') - except IndexError: + def recv(self, count=1, reset_wait=False, no_wait=False): + # Initial receive should wait for the flush thread timeout unless we + # specifically want either a follow-up wait or no waiting at all + if not self._flush_wait or reset_wait: + if not no_wait: + time.sleep(self._flush_interval+self.FLUSH_GRACE_PERIOD) + self._flush_wait = True + + if count > len(self.payloads): return None + out = [] + for _ in range(count): + out.append(self.payloads.popleft().decode('utf-8')) + return '\n'.join(out) + + def close(self): + pass + def __repr__(self): return str(self.payloads) + def settimeout(self, timeout): + self.timeout = timeout class BrokenSocket(FakeSocket): + def __init__(self, error_number=None): + super(BrokenSocket, self).__init__() + + self.error_number = error_number def send(self, payload): - raise socket.error("Socket error") + error = socket.error("Socket error [Errno {}]".format(self.error_number)) + if self.error_number: + error.errno = self.error_number + + raise error -class TestDogStatsd(object): +class OverflownSocket(BrokenSocket): + + def __init__(self): + super(OverflownSocket, self).__init__(errno.EAGAIN) + + +def telemetry_metrics(metrics=1, events=0, service_checks=0, bytes_sent=0, bytes_dropped_writer=0, packets_sent=1, packets_dropped_writer=0, transport="udp", tags="", bytes_dropped_queue=0, packets_dropped_queue=0): + tags = "," + tags if tags else "" + + return "\n".join([ + "datadog.dogstatsd.client.metrics:{}|c|#client:py,client_version:{},client_transport:{}{}".format(metrics, version, transport, tags), + "datadog.dogstatsd.client.events:{}|c|#client:py,client_version:{},client_transport:{}{}".format(events, version, transport, tags), + "datadog.dogstatsd.client.service_checks:{}|c|#client:py,client_version:{},client_transport:{}{}".format(service_checks, version, transport, tags), + "datadog.dogstatsd.client.bytes_sent:{}|c|#client:py,client_version:{},client_transport:{}{}".format(bytes_sent, version, transport, tags), + "datadog.dogstatsd.client.bytes_dropped:{}|c|#client:py,client_version:{},client_transport:{}{}".format(bytes_dropped_queue + bytes_dropped_writer, version, transport, tags), + "datadog.dogstatsd.client.bytes_dropped_queue:{}|c|#client:py,client_version:{},client_transport:{}{}".format(bytes_dropped_queue, version, transport, tags), + "datadog.dogstatsd.client.bytes_dropped_writer:{}|c|#client:py,client_version:{},client_transport:{}{}".format(bytes_dropped_writer, version, transport, tags), + "datadog.dogstatsd.client.packets_sent:{}|c|#client:py,client_version:{},client_transport:{}{}".format(packets_sent, version, transport, tags), + "datadog.dogstatsd.client.packets_dropped:{}|c|#client:py,client_version:{},client_transport:{}{}".format(packets_dropped_queue + packets_dropped_writer, version, transport, tags), + "datadog.dogstatsd.client.packets_dropped_queue:{}|c|#client:py,client_version:{},client_transport:{}{}".format(packets_dropped_queue, version, transport, tags), + "datadog.dogstatsd.client.packets_dropped_writer:{}|c|#client:py,client_version:{},client_transport:{}{}".format(packets_dropped_writer, version, transport, tags), + ]) + "\n" + + +class TestDogStatsd(unittest.TestCase): + METRIC_TYPE_MAP = { + 'gauge': { 'id': 'g' }, + 'timing': { 'id': 'ms' }, + } def setUp(self): - self.statsd = DogStatsd() + """ + Set up a default Dogstatsd instance and mock the proc filesystem. + """ + # + self.statsd = DogStatsd(telemetry_min_flush_interval=0) self.statsd.socket = FakeSocket() + self.statsd._reset_telemetry() + + # Mock the proc filesystem + route_data = load_fixtures('route') + self._procfs_mock = patch('datadog.util.compat.builtins.open', mock_open()) + self._procfs_mock.start().return_value.readlines.return_value = route_data.split("\n") + + def tearDown(self): + """ + Unmock the proc filesystem. + """ + self._procfs_mock.stop() + + def assert_equal_telemetry(self, expected_payload, actual_payload, telemetry=None, **kwargs): + if telemetry is None: + telemetry = telemetry_metrics(bytes_sent=len(expected_payload), **kwargs) + + if expected_payload: + expected_payload = "\n".join([expected_payload, telemetry]) + else: + expected_payload = telemetry + + self.maxDiff = None + return self.assertEqual(expected_payload, actual_payload) + + def send_and_assert( + self, + dogstatsd, + expected_metrics, + last_telemetry_size=0, + buffered=False, + ): + """ + Send and then asserts that a chain of metrics arrive in the right order + and with expected telemetry values. + """ - def recv(self): - return self.statsd.socket.recv() + expected_messages = [] + for metric_type, metric_name, metric_value in expected_metrics: + # Construct the expected message data + metric_type_id = TestDogStatsd.METRIC_TYPE_MAP[metric_type]['id'] + expected_messages.append( + "{}:{}|{}\n".format(metric_name, metric_value, metric_type_id) + ) + + # Send the value + getattr(dogstatsd, metric_type)(metric_name, metric_value) + + # Sanity check + if buffered: + # Ensure that packets didn't arrive immediately if we are expecting + # buffering behavior + self.assertIsNone(dogstatsd.socket.recv(2, no_wait=True)) + + metrics = 1 + if buffered: + metrics = len(expected_messages) + + if buffered: + expected_messages = [ ''.join(expected_messages) ] + + for message in expected_messages: + packets_sent = 1 + # For all ono-initial packets, our current telemetry stats will + # contain the metadata for the last telemetry packet as well. + if last_telemetry_size > 0: + packets_sent += 1 + + expected_metrics=telemetry_metrics( + metrics=metrics, + packets_sent=packets_sent, + bytes_sent=len(message) + last_telemetry_size + ) + self.assert_equal_telemetry( + message, + dogstatsd.socket.recv(2, no_wait=not buffered, reset_wait=True), + telemetry=expected_metrics, + ) + last_telemetry_size = len(expected_metrics) + + return last_telemetry_size + + def assert_almost_equal(self, val1, val2, delta): + """ + Calculates a delta between first and second value and ensures + that this difference falls within the delta range + """ + return self.assertTrue( + 0 <= abs(val1 - val2) <= delta, + "Absolute difference of {} and {} ({}) is not within {}".format( + val1, + val2, + abs(val1-val2), + delta, + ), + ) + + def recv(self, *args, **kwargs): + return self.statsd.socket.recv(*args, **kwargs) def test_initialization(self): + """ + `initialize` overrides `statsd` default instance attributes. + """ options = { 'statsd_host': "myhost", 'statsd_port': 1234 } - t.assert_equal(statsd.host, "localhost") - t.assert_equal(statsd.port, 8125) + # Default values + self.assertEqual(statsd.host, "localhost") + self.assertEqual(statsd.port, 8125) + + # After initialization initialize(**options) - t.assert_equal(statsd.host, "myhost") - t.assert_equal(statsd.port, 1234) + self.assertEqual(statsd.host, "myhost") + self.assertEqual(statsd.port, 1234) + + # Add namespace + options['statsd_namespace'] = "mynamespace" + initialize(**options) + self.assertEqual(statsd.host, "myhost") + self.assertEqual(statsd.port, 1234) + self.assertEqual(statsd.namespace, "mynamespace") + + # Set `statsd` host to the system's default route + initialize(statsd_use_default_route=True, **options) + self.assertEqual(statsd.host, "172.17.0.1") + self.assertEqual(statsd.port, 1234) + + # Add UNIX socket + options['statsd_socket_path'] = '/var/run/dogstatsd.sock' + initialize(**options) + self.assertEqual(statsd.socket_path, options['statsd_socket_path']) + self.assertIsNone(statsd.host) + self.assertIsNone(statsd.port) + + def test_dogstatsd_initialization_with_env_vars(self): + """ + Dogstatsd can retrieve its config from env vars when + not provided in constructor. + """ + # Setup + with preserve_environment_variable('DD_AGENT_HOST'): + os.environ['DD_AGENT_HOST'] = 'myenvvarhost' + with preserve_environment_variable('DD_DOGSTATSD_PORT'): + os.environ['DD_DOGSTATSD_PORT'] = '4321' + dogstatsd = DogStatsd() + + # Assert + self.assertEqual(dogstatsd.host, "myenvvarhost") + self.assertEqual(dogstatsd.port, 4321) + + def test_initialization_closes_socket(self): + statsd.socket = FakeSocket() + self.assertIsNotNone(statsd.socket) + initialize() + self.assertIsNone(statsd.socket) + + def test_default_route(self): + """ + Dogstatsd host can be dynamically set to the default route. + """ + self.assertEqual( + DogStatsd(use_default_route=True).host, + "172.17.0.1" + ) def test_set(self): self.statsd.set('set', 123) - assert self.recv() == 'set:123|s' + self.assert_equal_telemetry('set:123|s\n', self.recv(2)) + + def test_report(self): + self.statsd._report('report', 'g', 123.4, tags=None, sample_rate=None) + self.assert_equal_telemetry('report:123.4|g\n', self.recv(2)) + + def test_report_metric_with_unsupported_ts(self): + self.statsd._reset_telemetry() + self.statsd._report('report', 'h', 123.5, tags=None, sample_rate=None, timestamp=100) + self.assert_equal_telemetry('report:123.5|h\n', self.recv(2)) + + self.statsd._reset_telemetry() + self.statsd._report('set', 's', 123, tags=None, sample_rate=None, timestamp=100) + self.assert_equal_telemetry('set:123|s\n', self.recv(2)) def test_gauge(self): self.statsd.gauge('gauge', 123.4) - assert self.recv() == 'gauge:123.4|g' + self.assert_equal_telemetry('gauge:123.4|g\n', self.recv(2)) + + def test_gauge_with_ts(self): + self.statsd.gauge_with_timestamp("gauge", 123.4, timestamp=1066) + self.assert_equal_telemetry("gauge:123.4|g|T1066\n", self.recv(2)) + + def test_gauge_with_invalid_ts_should_be_ignored(self): + self.statsd.gauge_with_timestamp("gauge", 123.4, timestamp=-500) + self.assert_equal_telemetry("gauge:123.4|g\n", self.recv(2)) def test_counter(self): self.statsd.increment('page.views') - t.assert_equal('page.views:1|c', self.recv()) + self.statsd.flush() + self.assert_equal_telemetry('page.views:1|c\n', self.recv(2)) + self.statsd._reset_telemetry() self.statsd.increment('page.views', 11) - t.assert_equal('page.views:11|c', self.recv()) + self.statsd.flush() + self.assert_equal_telemetry('page.views:11|c\n', self.recv(2)) + self.statsd._reset_telemetry() self.statsd.decrement('page.views') - t.assert_equal('page.views:-1|c', self.recv()) + self.statsd.flush() + self.assert_equal_telemetry('page.views:-1|c\n', self.recv(2)) + self.statsd._reset_telemetry() self.statsd.decrement('page.views', 12) - t.assert_equal('page.views:-12|c', self.recv()) + self.statsd.flush() + self.assert_equal_telemetry('page.views:-12|c\n', self.recv(2)) + + def test_count(self): + self.statsd.count('page.views', 11) + self.statsd.flush() + self.assert_equal_telemetry('page.views:11|c\n', self.recv(2)) + + def test_count_with_ts(self): + self.statsd.count_with_timestamp("page.views", 1, timestamp=1066) + self.statsd.flush() + self.assert_equal_telemetry("page.views:1|c|T1066\n", self.recv(2)) + + self.statsd._reset_telemetry() + self.statsd.count_with_timestamp("page.views", 11, timestamp=2121) + self.statsd.flush() + self.assert_equal_telemetry("page.views:11|c|T2121\n", self.recv(2)) + + def test_count_with_invalid_ts_should_be_ignored(self): + self.statsd.count_with_timestamp("page.views", 1, timestamp=-1066) + self.statsd.flush() + self.assert_equal_telemetry("page.views:1|c\n", self.recv(2)) def test_histogram(self): self.statsd.histogram('histo', 123.4) - t.assert_equal('histo:123.4|h', self.recv()) + self.assert_equal_telemetry('histo:123.4|h\n', self.recv(2)) + + def test_pipe_in_tags(self): + self.statsd.gauge('gt', 123.4, tags=['pipe|in:tag', 'red']) + self.assert_equal_telemetry('gt:123.4|g|#pipe_in:tag,red\n', self.recv(2)) def test_tagged_gauge(self): self.statsd.gauge('gt', 123.4, tags=['country:china', 'age:45', 'blue']) - t.assert_equal('gt:123.4|g|#country:china,age:45,blue', self.recv()) + self.assert_equal_telemetry('gt:123.4|g|#country:china,age:45,blue\n', self.recv(2)) def test_tagged_counter(self): - self.statsd.increment('ct', tags=['country:canada', 'red']) - t.assert_equal('ct:1|c|#country:canada,red', self.recv()) + self.statsd.increment('ct', tags=[u'country:españa', 'red']) + self.assert_equal_telemetry(u'ct:1|c|#country:españa,red\n', self.recv(2)) def test_tagged_histogram(self): self.statsd.histogram('h', 1, tags=['red']) - t.assert_equal('h:1|h|#red', self.recv()) + self.assert_equal_telemetry('h:1|h|#red\n', self.recv(2)) def test_sample_rate(self): + # Disabling telemetry since sample_rate imply randomness + self.statsd._telemetry = False + self.statsd.increment('c', sample_rate=0) - assert not self.recv() - for i in range(10000): + self.assertFalse(self.recv()) + + for _ in range(10000): self.statsd.increment('sampled_counter', sample_rate=0.3) - self.assert_almost_equal(3000, len(self.statsd.socket.payloads), 150) - t.assert_equal('sampled_counter:1|c|@0.3', self.recv()) + + self.statsd.flush() + + total_metrics = 0 + payload = self.recv() + while payload: + metrics = payload.rstrip('\n').split('\n') + for metric in metrics: + self.assertEqual('sampled_counter:1|c|@0.3', metric) + total_metrics += len(metrics) + payload = self.recv() + + self.assert_almost_equal(3000, total_metrics, 150) + + def test_default_sample_rate(self): + # Disabling telemetry since sample_rate imply randomness + self.statsd._telemetry = False + + self.statsd.default_sample_rate = 0.3 + for _ in range(10000): + self.statsd.increment('sampled_counter') + + total_metrics = 0 + payload = self.recv() + while payload: + metrics = payload.rstrip('\n').split('\n') + for metric in metrics: + self.assertEqual('sampled_counter:1|c|@0.3', metric) + + total_metrics += len(metrics) + payload = self.recv() + + self.assert_almost_equal(3000, total_metrics, 150) def test_tags_and_samples(self): - for i in range(100): + # Disabling telemetry since sample_rate imply randomness + self.statsd._telemetry = False + + for _ in range(100): self.statsd.gauge('gst', 23, tags=["sampled"], sample_rate=0.9) - def test_tags_and_samples(self): - for i in range(100): - self.statsd.gauge('gst', 23, tags=["sampled"], sample_rate=0.9) - t.assert_equal('gst:23|g|@0.9|#sampled') + self.assertEqual('gst:23|g|@0.9|#sampled', self.recv().split('\n')[0]) def test_timing(self): self.statsd.timing('t', 123) - t.assert_equal('t:123|ms', self.recv()) + self.assert_equal_telemetry('t:123|ms\n', self.recv(2)) def test_event(self): + self.statsd.event( + 'Title', + u'L1\nL2', + priority='low', + date_happened=1375296969, + ) + event2 = u'_e{5,6}:Title|L1\\nL2|d:1375296969|p:low\n' + self.assert_equal_telemetry( + event2, + self.recv(2), + telemetry=telemetry_metrics( + metrics=0, + events=1, + bytes_sent=len(event2), + ), + ) + + self.statsd._reset_telemetry() + + self.statsd.event('Title', u'♬ †øU †øU ¥ºu T0µ ♪', + aggregation_key='key', tags=['t1', 't2:v2']) + event3 = u'_e{5,32}:Title|♬ †øU †øU ¥ºu T0µ ♪|k:key|#t1,t2:v2\n' + self.assert_equal_telemetry( + event3, + self.recv(2, reset_wait=True), + telemetry=telemetry_metrics( + metrics=0, + events=1, + bytes_sent=len(event3), + ), + ) + + def test_unicode_event(self): + self.statsd.event( + 'my.prefix.Delivery - Daily Settlement Summary Report Delivery — Invoice Cloud succeeded', + 'Delivered — destination.csv') + event = u'_e{89,29}:my.prefix.Delivery - Daily Settlement Summary Report Delivery — Invoice Cloud succeeded|' + \ + u'Delivered — destination.csv\n' + self.assert_equal_telemetry( + event, + self.recv(2), + telemetry=telemetry_metrics( + metrics=0, + events=1, + bytes_sent=len(event), + ), + ) + + self.statsd._reset_telemetry() + + # Positional arg names should match threadstats + def test_event_matching_signature(self): + self.statsd.event(title="foo", message="bar1") + event = u'_e{3,4}:foo|bar1\n' + self.assert_equal_telemetry( + event, + self.recv(2), + telemetry=telemetry_metrics( + metrics=0, + events=1, + bytes_sent=len(event), + ), + ) + + self.statsd._reset_telemetry() + + def test_event_constant_tags(self): + self.statsd.constant_tags = ['bar:baz', 'foo'] self.statsd.event('Title', u'L1\nL2', priority='low', date_happened=1375296969) - t.assert_equal(u'_e{5,6}:Title|L1\\nL2|d:1375296969|p:low', self.recv()) + event = u'_e{5,6}:Title|L1\\nL2|d:1375296969|p:low|#bar:baz,foo\n' + self.assert_equal_telemetry( + event, + self.recv(2), + telemetry=telemetry_metrics( + metrics=0, + events=1, + tags="bar:baz,foo", + bytes_sent=len(event), + ), + ) + + self.statsd._reset_telemetry() self.statsd.event('Title', u'♬ †øU †øU ¥ºu T0µ ♪', aggregation_key='key', tags=['t1', 't2:v2']) - t.assert_equal(u'_e{5,19}:Title|♬ †øU †øU ¥ºu T0µ ♪|k:key|#t1,t2:v2', self.recv()) + event = u'_e{5,32}:Title|♬ †øU †øU ¥ºu T0µ ♪|k:key|#t1,t2:v2,bar:baz,foo\n' + self.assert_equal_telemetry( + event, + self.recv(2, reset_wait=True), + telemetry=telemetry_metrics( + metrics=0, + events=1, + tags="bar:baz,foo", + bytes_sent=len(event), + ), + ) + + def test_event_payload_error(self): + def func(): + # define an event payload that is > 8 * 1024 + message = ["l" for i in range(8 * 1024)] + message = "".join(message) + payload = {"title": "title", "message": message} + + self.statsd.event(**payload) + + # check that the method fails when the payload is too large + with pytest.raises(ValueError): + func() + + # check that the method does not fail with a small payload + self.statsd.event("title", "message") def test_service_check(self): now = int(time.time()) @@ -133,56 +565,425 @@ def test_service_check(self): 'my_check.name', self.statsd.WARNING, tags=['key1:val1', 'key2:val2'], timestamp=now, hostname='i-abcd1234', message=u"♬ †øU \n†øU ¥ºu|m: T0µ ♪") - t.assert_equal( - u'_sc|my_check.name|{0}|d:{1}|h:i-abcd1234|#key1:val1,key2:val2|m:{2}' - .format(self.statsd.WARNING, now, u"♬ †øU \\n†øU ¥ºu|m\: T0µ ♪"), self.recv()) + check = u'_sc|my_check.name|{0}|d:{1}|h:i-abcd1234|#key1:val1,key2:val2|m:{2}'.format(self.statsd.WARNING, now, u'♬ †øU \\n†øU ¥ºu|m\\: T0µ ♪\n') + self.assert_equal_telemetry( + check, + self.recv(2), + telemetry=telemetry_metrics( + metrics=0, + service_checks=1, + bytes_sent=len(check), + ), + ) - @staticmethod - def assert_almost_equal(a, b, delta): - assert 0 <= abs(a - b) <= delta, "%s - %s not within %s" % (a, b, delta) + def test_service_check_constant_tags(self): + self.statsd.constant_tags = ['bar:baz', 'foo'] + now = int(time.time()) + self.statsd.service_check( + 'my_check.name', self.statsd.WARNING, + timestamp=now, + hostname='i-abcd1234', message=u"♬ †øU \n†øU ¥ºu|m: T0µ ♪") + check = u'_sc|my_check.name|{0}|d:{1}|h:i-abcd1234|#bar:baz,foo|m:{2}'.format(self.statsd.WARNING, now, u"♬ †øU \\n†øU ¥ºu|m\\: T0µ ♪\n") + self.assert_equal_telemetry( + check, + self.recv(2, True), + telemetry=telemetry_metrics( + metrics=0, + service_checks=1, + tags="bar:baz,foo", + bytes_sent=len(check), + ), + ) + + self.statsd._reset_telemetry() + + self.statsd.service_check( + 'my_check.name', self.statsd.WARNING, + tags=['key1:val1', 'key2:val2'], timestamp=now, + hostname='i-abcd1234', message=u"♬ †øU \n†øU ¥ºu|m: T0µ ♪") + check = u'_sc|my_check.name|{0}|d:{1}|h:i-abcd1234|#key1:val1,key2:val2,bar:baz,foo|m:{2}'.format(self.statsd.WARNING, now, u"♬ †øU \\n†øU ¥ºu|m\\: T0µ ♪\n") + self.assert_equal_telemetry( + check, + self.recv(2, True), + telemetry=telemetry_metrics( + metrics=0, + service_checks=1, + tags="bar:baz,foo", + bytes_sent=len(check), + ), + ) + + def test_metric_namespace(self): + """ + Namespace prefixes all metric names. + """ + self.statsd.namespace = "foo" + self.statsd.gauge('gauge', 123.4) + self.assert_equal_telemetry('foo.gauge:123.4|g\n', self.recv(2)) + + # Test Client level content tags + def test_gauge_constant_tags(self): + self.statsd.constant_tags = ['bar:baz', 'foo'] + self.statsd.gauge('gauge', 123.4) + metric = 'gauge:123.4|g|#bar:baz,foo\n' + self.assert_equal_telemetry(metric, self.recv(2), telemetry=telemetry_metrics(tags="bar:baz,foo", bytes_sent=len(metric))) + + def test_counter_constant_tag_with_metric_level_tags(self): + self.statsd.constant_tags = ['bar:baz', 'foo'] + self.statsd.increment('page.views', tags=['extra']) + metric = 'page.views:1|c|#extra,bar:baz,foo\n' + self.assert_equal_telemetry(metric, self.recv(2), telemetry=telemetry_metrics(tags="bar:baz,foo", bytes_sent=len(metric))) + + def test_gauge_constant_tags_with_metric_level_tags_twice(self): + metric_level_tag = ['foo:bar'] + self.statsd.constant_tags = ['bar:baz'] + self.statsd.gauge('gauge', 123.4, tags=metric_level_tag) + metric = 'gauge:123.4|g|#foo:bar,bar:baz\n' + self.assert_equal_telemetry( + metric, + self.recv(2), + telemetry=telemetry_metrics( + tags="bar:baz", + bytes_sent=len(metric), + ), + ) + + self.statsd._reset_telemetry() + + # sending metrics multiple times with same metric-level tags + # should not duplicate the tags being sent + self.statsd.gauge('gauge', 123.4, tags=metric_level_tag) + metric = 'gauge:123.4|g|#foo:bar,bar:baz\n' + self.assert_equal_telemetry( + metric, + self.recv(2, reset_wait=True), + telemetry=telemetry_metrics( + tags="bar:baz", + bytes_sent=len(metric), + ), + ) def test_socket_error(self): self.statsd.socket = BrokenSocket() - self.statsd.gauge('no error', 1) - assert True, 'success' + with mock.patch("datadog.dogstatsd.base.log") as mock_log: + self.statsd.gauge('no error', 1) + self.statsd.flush() - def test_timed(self): + mock_log.error.assert_not_called() + mock_log.warning.assert_called_once_with( + "Error submitting packet: %s, dropping the packet and closing the socket", + mock.ANY, + ) + + def test_socket_overflown(self): + self.statsd.socket = OverflownSocket() + with mock.patch("datadog.dogstatsd.base.log") as mock_log: + self.statsd.gauge('no error', 1) + self.statsd.flush() + + mock_log.error.assert_not_called() + calls = [call("Socket send would block: %s, dropping the packet", mock.ANY)] + mock_log.debug.assert_has_calls(calls * 2) + def test_socket_message_too_long(self): + self.statsd.socket = BrokenSocket(error_number=errno.EMSGSIZE) + with mock.patch("datadog.dogstatsd.base.log") as mock_log: + self.statsd.gauge('no error', 1) + self.statsd.flush() + + mock_log.error.assert_not_called() + calls = [ + call( + "Packet size too big (size: %d): %s, dropping the packet", + mock.ANY, + mock.ANY, + ), + ] + mock_log.debug.assert_has_calls(calls * 2) + + def test_socket_no_buffer_space(self): + self.statsd.socket = BrokenSocket(error_number=errno.ENOBUFS) + with mock.patch("datadog.dogstatsd.base.log") as mock_log: + self.statsd.gauge('no error', 1) + self.statsd.flush() + + mock_log.error.assert_not_called() + calls = [call("Socket buffer full: %s, dropping the packet", mock.ANY)] + mock_log.debug.assert_has_calls(calls * 2) + + @patch('socket.socket') + def test_uds_socket_ensures_min_receive_buffer(self, mock_socket_create): + mock_socket = mock_socket_create.return_value + mock_socket.setblocking.return_value = None + mock_socket.connect.return_value = None + mock_socket.getsockopt.return_value = MIN_SEND_BUFFER_SIZE / 2 + + datadog = DogStatsd(socket_path="/fake/uds/socket/path") + datadog.gauge('some value', 1) + datadog.flush() + + # Sanity check + mock_socket_create.assert_called_once_with(socket.AF_UNIX, socket.SOCK_DGRAM) + + mock_socket.setsockopt.assert_called_once_with( + socket.SOL_SOCKET, + socket.SO_SNDBUF, + MIN_SEND_BUFFER_SIZE, + ) + + @patch('socket.socket') + def test_udp_socket_ensures_min_receive_buffer(self, mock_socket_create): + mock_socket = mock_socket_create.return_value + mock_socket.setblocking.return_value = None + mock_socket.connect.return_value = None + mock_socket.getsockopt.return_value = MIN_SEND_BUFFER_SIZE / 2 + + datadog = DogStatsd() + datadog.gauge('some value', 1) + datadog.flush() + + # Sanity check + mock_socket_create.assert_called_once_with(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP) + + mock_socket.setsockopt.assert_called_once_with( + socket.SOL_SOCKET, + socket.SO_SNDBUF, + MIN_SEND_BUFFER_SIZE, + ) + + def test_socket_path_updates_telemetry(self): + self.statsd.gauge("foo", 1) + self.assert_equal_telemetry("foo:1|g\n", self.recv(2), transport="udp") + self.statsd.socket_path = "/fake/path" + self.statsd._reset_telemetry() + self.statsd.gauge("foo", 2) + self.assert_equal_telemetry("foo:2|g\n", self.recv(2), transport="uds") + + def test_distributed(self): + """ + Measure the distribution of a function's run time using distribution custom metric. + """ + # In seconds + @self.statsd.distributed('distributed.test') + def func(arg1, arg2, kwarg1=1, kwarg2=1): + """docstring""" + time.sleep(0.1) + return (arg1, arg2, kwarg1, kwarg2) + + self.assertEqual('func', func.__name__) + self.assertEqual('docstring', func.__doc__) + + result = func(1, 2, kwarg2=3) + # Assert it handles args and kwargs correctly. + self.assertEqual(result, (1, 2, 1, 3)) + + packet = self.recv(2).split("\n")[0] # ignore telemetry packet + name_value, type_ = packet.split('|') + name, value = name_value.split(':') + + self.assertEqual('d', type_) + self.assertEqual('distributed.test', name) + self.assert_almost_equal(0.1, float(value), 0.09) + + # Repeat, force timer value in milliseconds + @self.statsd.distributed('distributed.test', use_ms=True) + def func(arg1, arg2, kwarg1=1, kwarg2=1): + """docstring""" + time.sleep(0.5) + return (arg1, arg2, kwarg1, kwarg2) + + func(1, 2, kwarg2=3) + + # Ignore telemetry packet + packet = self.recv(2, reset_wait=True).split("\n")[0] + name_value, type_ = packet.split('|') + name, value = name_value.split(':') + + self.assertEqual('d', type_) + self.assertEqual('distributed.test', name) + self.assert_almost_equal(500, float(value), 100) + + def test_timed(self): + """ + Measure the distribution of a function's run time. + """ + # In seconds @self.statsd.timed('timed.test') - def func(a, b, c=1, d=1): + def func(arg1, arg2, kwarg1=1, kwarg2=1): """docstring""" time.sleep(0.5) - return (a, b, c, d) + return (arg1, arg2, kwarg1, kwarg2) - t.assert_equal('func', func.__name__) - t.assert_equal('docstring', func.__doc__) + self.assertEqual('func', func.__name__) + self.assertEqual('docstring', func.__doc__) - result = func(1, 2, d=3) + result = func(1, 2, kwarg2=3) # Assert it handles args and kwargs correctly. - t.assert_equal(result, (1, 2, 1, 3)) + self.assertEqual(result, (1, 2, 1, 3)) + + packet = self.recv(2).split("\n")[0] # ignore telemetry packet + name_value, type_ = packet.split('|') + name, value = name_value.split(':') + + self.assertEqual('ms', type_) + self.assertEqual('timed.test', name) + self.assert_almost_equal(0.5, float(value), 0.1) + + # Repeat, force timer value in milliseconds + @self.statsd.timed('timed.test', use_ms=True) + def func(arg1, arg2, kwarg1=1, kwarg2=1): + """docstring""" + time.sleep(0.5) + return (arg1, arg2, kwarg1, kwarg2) + + func(1, 2, kwarg2=3) + self.statsd.flush() + + # Ignore telemetry packet + packet = self.recv(2).split("\n")[0] + name_value, type_ = packet.split('|') + name, value = name_value.split(':') + + self.assertEqual('ms', type_) + self.assertEqual('timed.test', name) + self.assert_almost_equal(500, float(value), 100) + + def test_timed_in_ms(self): + """ + Timed value is reported in ms when statsd.use_ms is True. + """ + # Arm statsd to use_ms + self.statsd.use_ms = True + + # Sample a function run time + @self.statsd.timed('timed.test') + def func(arg1, arg2, kwarg1=1, kwarg2=1): + """docstring""" + time.sleep(0.5) + return (arg1, arg2, kwarg1, kwarg2) + + func(1, 2, kwarg2=3) + + # Assess the packet + packet = self.recv(2).split("\n")[0] # ignore telemetry packet + name_value, type_ = packet.split('|') + name, value = name_value.split(':') + + self.assertEqual('ms', type_) + self.assertEqual('timed.test', name) + self.assert_almost_equal(500, float(value), 100) + + # Repeat, force timer value in seconds + @self.statsd.timed('timed.test', use_ms=False) + def func(arg1, arg2, kwarg1=1, kwarg2=1): + """docstring""" + time.sleep(0.5) + return (arg1, arg2, kwarg1, kwarg2) + + func(1, 2, kwarg2=3) + self.statsd.flush() packet = self.recv() + name_value, type_ = packet.rstrip('\n').split('|') + name, value = name_value.split(':') + + self.assertEqual('ms', type_) + self.assertEqual('timed.test', name) + self.assert_almost_equal(0.5, float(value), 0.1) + + def test_timed_no_metric(self, ): + """ + Test using a decorator without providing a metric. + """ + + @self.statsd.timed() + def func(arg1, arg2, kwarg1=1, kwarg2=1): + """docstring""" + time.sleep(0.5) + return (arg1, arg2, kwarg1, kwarg2) + + self.assertEqual('func', func.__name__) + self.assertEqual('docstring', func.__doc__) + + result = func(1, 2, kwarg2=3) + # Assert it handles args and kwargs correctly. + self.assertEqual(result, (1, 2, 1, 3)) + + packet = self.recv(2).split("\n")[0] # ignore telemetry packet + name_value, type_ = packet.split('|') + name, value = name_value.split(':') + + self.assertEqual('ms', type_) + self.assertEqual('tests.unit.dogstatsd.test_statsd.func', name) + self.assert_almost_equal(0.5, float(value), 0.1) + + @unittest.skipIf(not is_higher_py35(), reason="Coroutines are supported on Python 3.5 or higher.") + def test_timed_coroutine(self): + """ + Measure the distribution of a coroutine function's run time. + + Warning: Python > 3.5 only. + """ + import asyncio + + source = """ +@self.statsd.timed('timed.test') +async def print_foo(): + "docstring" + import time + time.sleep(0.5) + print("foo") + """ + exec(source, {}, locals()) + + loop = asyncio.get_event_loop() + loop.run_until_complete(locals()['print_foo']()) + loop.close() + + # Assert + packet = self.recv(2).split("\n")[0] # ignore telemetry packet name_value, type_ = packet.split('|') name, value = name_value.split(':') - t.assert_equal('ms', type_) - t.assert_equal('timed.test', name) + self.assertEqual('ms', type_) + self.assertEqual('timed.test', name) self.assert_almost_equal(0.5, float(value), 0.1) def test_timed_context(self): - with self.statsd.timed('timed_context.test'): + """ + Measure the distribution of a context's run time. + """ + # In seconds + with self.statsd.timed('timed_context.test') as timer: + self.assertTrue(isinstance(timer, TimedContextManagerDecorator)) time.sleep(0.5) - packet = self.recv() + packet = self.recv(2).split("\n")[0] # ignore telemetry packet name_value, type_ = packet.split('|') name, value = name_value.split(':') - t.assert_equal('ms', type_) - t.assert_equal('timed_context.test', name) + self.assertEqual('ms', type_) + self.assertEqual('timed_context.test', name) self.assert_almost_equal(0.5, float(value), 0.1) + self.assert_almost_equal(0.5, timer.elapsed, 0.1) + + # In milliseconds + with self.statsd.timed('timed_context.test', use_ms=True) as timer: + time.sleep(0.5) + + packet = self.recv(2, reset_wait=True).split("\n")[0] # ignore telemetry packet + name_value, type_ = packet.split('|') + name, value = name_value.split(':') + + self.assertEqual('ms', type_) + self.assertEqual('timed_context.test', name) + self.assert_almost_equal(500, float(value), 100) + self.assert_almost_equal(500, timer.elapsed, 100) def test_timed_context_exception(self): - """Test that an exception bubbles out of the context manager.""" + """ + Exception bubbles out of the `timed` context manager. + """ class ContextException(Exception): pass @@ -192,55 +993,671 @@ def func(self): raise ContextException() # Ensure the exception was raised. - t.assert_raises(ContextException, func, self) + with pytest.raises(ContextException): + func(self) # Ensure the timing was recorded. - packet = self.recv() + packet = self.recv(2).split("\n")[0] # ignore telemetry packet name_value, type_ = packet.split('|') name, value = name_value.split(':') - t.assert_equal('ms', type_) - t.assert_equal('timed_context.test.exception', name) + self.assertEqual('ms', type_) + self.assertEqual('timed_context.test.exception', name) self.assert_almost_equal(0.5, float(value), 0.1) - def test_batched(self): + def test_timed_context_no_metric_exception(self): + """Test that an exception occurs if using a context manager without a metric.""" + + def func(self): + with self.statsd.timed(): + time.sleep(0.5) + + # Ensure the exception was raised. + with pytest.raises(TypeError): + func(self) + + # Ensure the timing was recorded. + packet = self.statsd.socket.recv() + self.assertIsNone(packet) + + def test_timed_start_stop_calls(self): + # In seconds + timer = self.statsd.timed('timed_context.test') + timer.start() + time.sleep(0.5) + timer.stop() + + packet = self.recv(2).split("\n")[0] # ignore telemetry packet + name_value, type_ = packet.split('|') + name, value = name_value.split(':') + + self.assertEqual('ms', type_) + self.assertEqual('timed_context.test', name) + self.assert_almost_equal(0.5, float(value), 0.1) + + # In milliseconds + timer = self.statsd.timed('timed_context.test', use_ms=True) + timer.start() + time.sleep(0.5) + timer.stop() + + packet = self.recv(2, reset_wait=True).split("\n")[0] # ignore telemetry packet + name_value, type_ = packet.split('|') + name, value = name_value.split(':') + + self.assertEqual('ms', type_) + self.assertEqual('timed_context.test', name) + self.assert_almost_equal(500, float(value), 100) + + def test_batching(self): + self.statsd.open_buffer() + self.statsd.gauge('page.views', 123) + self.statsd.timing('timer', 123) + self.statsd.close_buffer() + expected = 'page.views:123|g\ntimer:123|ms\n' + self.assert_equal_telemetry( + expected, + self.recv(2), + telemetry=telemetry_metrics(metrics=2, bytes_sent=len(expected)) + ) + + def test_flush(self): + dogstatsd = DogStatsd(disable_buffering=False, telemetry_min_flush_interval=0) + fake_socket = FakeSocket() + dogstatsd.socket = fake_socket + + dogstatsd.increment('page.views') + self.assertIsNone(fake_socket.recv(no_wait=True)) + dogstatsd.flush() + self.assert_equal_telemetry('page.views:1|c\n', fake_socket.recv(2)) + + def test_flush_interval(self): + dogstatsd = DogStatsd(disable_buffering=False, flush_interval=1, telemetry_min_flush_interval=0) + fake_socket = FakeSocket() + dogstatsd.socket = fake_socket + + dogstatsd.increment('page.views') + self.assertIsNone(fake_socket.recv(no_wait=True)) + + time.sleep(0.3) + self.assertIsNone(fake_socket.recv(no_wait=True)) + + time.sleep(1) + self.assert_equal_telemetry( + 'page.views:1|c\n', + fake_socket.recv(2, no_wait=True) + ) + + def test_aggregation_buffering_simultaneously(self): + dogstatsd = DogStatsd(disable_buffering=False, disable_aggregation=False, telemetry_min_flush_interval=0) + fake_socket = FakeSocket() + dogstatsd.socket = fake_socket + for _ in range(10): + dogstatsd.increment('test.aggregation_and_buffering') + self.assertIsNone(fake_socket.recv(no_wait=True)) + dogstatsd.flush_aggregated_metrics() + dogstatsd.flush() + self.assert_equal_telemetry('test.aggregation_and_buffering:10|c\n', fake_socket.recv(2)) + + def test_aggregation_buffering_simultaneously_with_interval(self): + dogstatsd = DogStatsd(disable_buffering=False, disable_aggregation=False, flush_interval=1, telemetry_min_flush_interval=0) + fake_socket = FakeSocket() + dogstatsd.socket = fake_socket + for _ in range(10): + dogstatsd.increment('test.aggregation_and_buffering_with_interval') + self.assertIsNone(fake_socket.recv(no_wait=True)) + + time.sleep(0.3) + self.assertIsNone(fake_socket.recv(no_wait=True)) + + time.sleep(1) + self.assert_equal_telemetry( + 'test.aggregation_and_buffering_with_interval:10|c\n', + fake_socket.recv(2, no_wait=True) + ) + + def test_disable_buffering(self): + dogstatsd = DogStatsd(disable_buffering=True, telemetry_min_flush_interval=0) + fake_socket = FakeSocket() + dogstatsd.socket = fake_socket + + dogstatsd.increment('page.views') + self.assert_equal_telemetry( + 'page.views:1|c\n', + fake_socket.recv(2, no_wait=True) + ) + + def test_flush_disable(self): + dogstatsd = DogStatsd( + disable_buffering=False, + flush_interval=0, + telemetry_min_flush_interval=0 + ) + fake_socket = FakeSocket() + dogstatsd.socket = fake_socket + + dogstatsd.increment('page.views') + self.assertIsNone(fake_socket.recv(no_wait=True)) + + time.sleep(DEFAULT_BUFFERING_FLUSH_INTERVAL) + self.assertIsNone(fake_socket.recv(no_wait=True)) + + time.sleep(0.3) + self.assertIsNone(fake_socket.recv(no_wait=True)) + + @unittest.skip("Buffering has been disabled again so the deprecation is not valid") + @patch("warnings.warn") + def test_manual_buffer_ops_deprecation(self, mock_warn): + self.assertFalse(mock_warn.called) + + self.statsd.open_buffer() + self.assertTrue(mock_warn.called) + self.assertEqual(mock_warn.call_count, 1) + + self.statsd.close_buffer() + self.assertEqual(mock_warn.call_count, 2) + + def test_batching_sequential(self): + self.statsd.open_buffer() + self.statsd.gauge('discarded.data', 123) + self.statsd.close_buffer() + self.statsd.open_buffer() self.statsd.gauge('page.views', 123) self.statsd.timing('timer', 123) self.statsd.close_buffer() - t.assert_equal('page.views:123|g\ntimer:123|ms', self.recv()) + expected1 = 'discarded.data:123|g\n' + expected_metrics1=telemetry_metrics(metrics=1, bytes_sent=len(expected1)) + self.assert_equal_telemetry( + expected1, + self.recv(2), + telemetry=expected_metrics1) + + expected2 = 'page.views:123|g\ntimer:123|ms\n' + self.assert_equal_telemetry( + expected2, + self.recv(2), + telemetry=telemetry_metrics( + metrics=2, + packets_sent=2, + bytes_sent=len(expected2 + expected_metrics1) + ) + ) + + def test_batching_runtime_changes(self): + dogstatsd = DogStatsd( + disable_buffering=True, + telemetry_min_flush_interval=0 + ) + dogstatsd.socket = FakeSocket() + + # Send some unbuffered metrics and verify we got it immediately + last_telemetry_size = self.send_and_assert( + dogstatsd, + [ + ('gauge', 'rt.gauge', 123), + ('timing', 'rt.timer', 123), + ], + ) + + # Disable buffering (noop expected) and validate + dogstatsd.disable_buffering = True + last_telemetry_size = self.send_and_assert( + dogstatsd, + [ + ('gauge', 'rt.gauge2', 321), + ('timing', 'rt.timer2', 321), + ], + last_telemetry_size = last_telemetry_size, + ) + + # Enable buffering and validate + dogstatsd.disable_buffering = False + last_telemetry_size = self.send_and_assert( + dogstatsd, + [ + ('gauge', 'buffered.gauge', 12345), + ('timing', 'buffered.timer', 12345), + ], + last_telemetry_size = last_telemetry_size, + buffered=True, + ) + + # Enable buffering again (another noop change expected) + dogstatsd.disable_buffering = False + last_telemetry_size = self.send_and_assert( + dogstatsd, + [ + ('gauge', 'buffered.gauge2', 321), + ('timing', 'buffered.timer2', 321), + ], + last_telemetry_size = last_telemetry_size, + buffered=True, + ) + + # Flip the toggle to unbuffered functionality one more time and verify + dogstatsd.disable_buffering = True + last_telemetry_size = self.send_and_assert( + dogstatsd, + [ + ('gauge', 'rt.gauge3', 333), + ('timing', 'rt.timer3', 333), + ], + last_telemetry_size = last_telemetry_size, + ) + + def test_threaded_batching(self): + num_threads = 4 + threads = [] + + dogstatsd = DogStatsd(telemetry_min_flush_interval=0) + fake_socket = FakeSocket() + dogstatsd.socket = fake_socket + + def batch_metrics(index, dsd): + time.sleep(0.3 * index) + + dsd.open_buffer() + + time.sleep(0.1) + dsd.gauge('page.%d.views' % index, 123) + + time.sleep(0.1) + dsd.timing('timer.%d' % index, 123) + + time.sleep(0.5) + dsd.close_buffer() + + for idx in range(num_threads): + thread = Thread( + name="{}_sender_thread_{}".format(self.__class__.__name__, idx), + target=batch_metrics, + args=(idx, dogstatsd) + ) + thread.daemon = True + + threads.append(thread) + + for thread in threads: + thread.start() + + time.sleep(5) + + for thread in threads: + if thread.is_alive(): + thread.join(0.1) + + previous_telemetry_packet_size = 0 + thread_idx = 0 + + while thread_idx < num_threads: + first_message = "page.{}.views:123|g\n".format(thread_idx) + first_message_len = len(first_message) + second_message = "timer.{}:123|ms\n".format(thread_idx) + second_message_len = len(second_message) + + received_payload = fake_socket.recv(1) + + # Base assumption is that we got both messages but + # we may get metrics split depending on when the flush thread triggers + if received_payload == first_message: + message = first_message + packet_size = first_message_len + num_metrics = 1 + elif received_payload == second_message: + message = second_message + packet_size = second_message_len + num_metrics = 1 + thread_idx += 1 + else: + message = first_message + second_message + packet_size = len(message) + num_metrics = 2 + thread_idx += 1 + + self.assertEqual(received_payload, message) + + packet_sent = 2 + if previous_telemetry_packet_size == 0: + packet_sent = 1 + + bytes_sent = previous_telemetry_packet_size + packet_size + telemetry = telemetry_metrics( + metrics=num_metrics, + bytes_sent=bytes_sent, + packets_sent=packet_sent, + ) + self.assertEqual(telemetry, fake_socket.recv(1)) + + previous_telemetry_packet_size = len(telemetry) + + def test_telemetry(self): + self.statsd.metrics_count = 1 + self.statsd.events_count = 2 + self.statsd.service_checks_count = 3 + self.statsd.bytes_sent = 4 + self.statsd.bytes_dropped_writer = 5 + self.statsd.packets_sent = 6 + self.statsd.packets_dropped_writer = 7 + self.statsd.bytes_dropped_queue = 8 + self.statsd.packets_dropped_queue = 9 + + self.statsd.open_buffer() + self.statsd.gauge('page.views', 123) + self.statsd.close_buffer() + + payload = 'page.views:123|g\n' + telemetry = telemetry_metrics(metrics=2, events=2, service_checks=3, bytes_sent=4 + len(payload), + bytes_dropped_writer=5, packets_sent=7, packets_dropped_writer=7, bytes_dropped_queue=8, packets_dropped_queue=9) + + self.assert_equal_telemetry(payload, self.recv(2), telemetry=telemetry) + + self.assertEqual(0, self.statsd.metrics_count) + self.assertEqual(0, self.statsd.events_count) + self.assertEqual(0, self.statsd.service_checks_count) + self.assertEqual(len(telemetry), self.statsd.bytes_sent) + self.assertEqual(0, self.statsd.bytes_dropped_writer) + self.assertEqual(1, self.statsd.packets_sent) + self.assertEqual(0, self.statsd.packets_dropped_writer) + self.assertEqual(0, self.statsd.bytes_dropped_queue) + self.assertEqual(0, self.statsd.packets_dropped_queue) + + def test_telemetry_flush_interval(self): + dogstatsd = DogStatsd(disable_buffering=False) + fake_socket = FakeSocket() + dogstatsd.socket = fake_socket + + # Set the last flush time in the future to be sure we won't flush + dogstatsd._last_flush_time = time.time() + dogstatsd._telemetry_flush_interval + dogstatsd.gauge('gauge', 123.4) + + metric = 'gauge:123.4|g\n' + self.assertEqual(metric, fake_socket.recv()) + + time1 = time.time() + # Setting the last flush time in the past to trigger a telemetry flush + dogstatsd._last_flush_time = time1 - dogstatsd._telemetry_flush_interval -1 + dogstatsd.gauge('gauge', 123.4) + self.assert_equal_telemetry( + metric, + fake_socket.recv(2, reset_wait=True), + telemetry=telemetry_metrics( + metrics=2, + bytes_sent=2*len(metric), + packets_sent=2, + ), + ) + + # assert that _last_flush_time has been updated + self.assertTrue(time1 < dogstatsd._last_flush_time) + + def test_telemetry_flush_interval_alternate_destination(self): + dogstatsd = DogStatsd(telemetry_host='foo') + fake_socket = FakeSocket() + dogstatsd.socket = fake_socket + fake_telemetry_socket = FakeSocket() + dogstatsd.telemetry_socket = fake_telemetry_socket + + self.assertIsNotNone(dogstatsd.telemetry_host) + self.assertIsNotNone(dogstatsd.telemetry_port) + self.assertTrue(dogstatsd._dedicated_telemetry_destination()) + + # set the last flush time in the future to be sure we won't flush + dogstatsd._last_flush_time = time.time() + dogstatsd._telemetry_flush_interval + dogstatsd.gauge('gauge', 123.4) + + self.assertEqual('gauge:123.4|g\n', fake_socket.recv()) + + time1 = time.time() + # setting the last flush time in the past to trigger a telemetry flush + dogstatsd._last_flush_time = time1 - dogstatsd._telemetry_flush_interval - 1 + dogstatsd.gauge('gauge', 123.4) + + self.assertEqual('gauge:123.4|g\n', fake_socket.recv(reset_wait=True)) + self.assert_equal_telemetry( + '', + fake_telemetry_socket.recv(), + telemetry=telemetry_metrics( + metrics=2, + bytes_sent=14*2, + packets_sent=2, + ), + ) + + # assert that _last_flush_time has been updated + self.assertTrue(time1 < dogstatsd._last_flush_time) + + def test_telemetry_flush_interval_batch(self): + dogstatsd = DogStatsd(disable_buffering=False) + + fake_socket = FakeSocket() + dogstatsd.socket = fake_socket + + dogstatsd.open_buffer() + dogstatsd.gauge('gauge1', 1) + dogstatsd.gauge('gauge2', 2) + + time1 = time.time() + # setting the last flush time in the past to trigger a telemetry flush + dogstatsd._last_flush_time = time1 - statsd._telemetry_flush_interval -1 + dogstatsd.close_buffer() + + metric = 'gauge1:1|g\ngauge2:2|g\n' + self.assert_equal_telemetry(metric, fake_socket.recv(2), telemetry=telemetry_metrics(metrics=2, bytes_sent=len(metric))) + # assert that _last_flush_time has been updated + self.assertTrue(time1 < dogstatsd._last_flush_time) + + def test_dedicated_udp_telemetry_dest(self): + listener_sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + listener_sock.bind(('localhost', 0)) + + def wait_for_data(): + global udp_thread_telemetry_data + udp_thread_telemetry_data = listener_sock.recvfrom(UDP_OPTIMAL_PAYLOAD_LENGTH)[0].decode('utf-8') + + with closing(listener_sock): + port = listener_sock.getsockname()[1] + + dogstatsd = DogStatsd( + host="localhost", + port=12345, + telemetry_min_flush_interval=0, + telemetry_host="localhost", + telemetry_port=port, + ) + + server = threading.Thread(target=wait_for_data) + server.start() + + dogstatsd.increment('abc') + + server.join(3) + + expected_telemetry = telemetry_metrics(metrics=1, packets_sent=1, bytes_sent=8) + self.assertEqual(udp_thread_telemetry_data, expected_telemetry) + + def test_dedicated_udp6_telemetry_dest(self): + listener_sock = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM) + listener_sock.bind(('localhost', 0)) + + def wait_for_data(): + global udp_thread_telemetry_data + udp_thread_telemetry_data = listener_sock.recvfrom(UDP_OPTIMAL_PAYLOAD_LENGTH)[0].decode('utf-8') + + with closing(listener_sock): + port = listener_sock.getsockname()[1] + + dogstatsd = DogStatsd( + host="localhost", + port=12345, + telemetry_min_flush_interval=0, + telemetry_host="::1", # use explicit address, localhost may resolve to v4. + telemetry_port=port, + ) + + server = threading.Thread(target=wait_for_data) + server.start() + + dogstatsd.increment('abc') + + server.join(3) + + expected_telemetry = telemetry_metrics(metrics=1, packets_sent=1, bytes_sent=8) + self.assertEqual(udp_thread_telemetry_data, expected_telemetry) + + def test_dedicated_uds_telemetry_dest(self): + tempdir = tempfile.mkdtemp() + socket_path = os.path.join(tempdir, 'socket.sock') + + listener_sock = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM) + listener_sock.bind(socket_path) + + def wait_for_data(): + global uds_thread_telemetry_data + uds_thread_telemetry_data = listener_sock.recvfrom(UDS_OPTIMAL_PAYLOAD_LENGTH)[0].decode('utf-8') + + with closing(listener_sock): + dogstatsd = DogStatsd( + host="localhost", + port=12345, + telemetry_min_flush_interval=0, + telemetry_socket_path=socket_path, + ) + + server = threading.Thread(target=wait_for_data) + server.start() + + dogstatsd.increment('def') + + server.join(3) + + expected_telemetry = telemetry_metrics(metrics=1, packets_sent=1, bytes_sent=8) + self.assertEqual(uds_thread_telemetry_data, expected_telemetry) + + shutil.rmtree(tempdir) def test_context_manager(self): fake_socket = FakeSocket() - with DogStatsd() as statsd: - statsd.socket = fake_socket - statsd.gauge('page.views', 123) - statsd.timing('timer', 123) + with DogStatsd(telemetry_min_flush_interval=0) as dogstatsd: + dogstatsd.socket = fake_socket + dogstatsd.gauge('page.views', 123) + dogstatsd.timing('timer', 123) + dogstatsd.increment('my_counter', 3) + + metric1 = "page.views:123|g" + metric2 = "timer:123|ms" + metric3 = "my_counter:3|c" + + metrics = '\n'.join([metric1, metric2, metric3]) + "\n" + self.assertEqual(metrics, fake_socket.recv(no_wait=True)) + + metrics_packet = telemetry_metrics( + metrics=3, + bytes_sent=len(metrics), + packets_sent=1, + ) + self.assertEqual(metrics_packet, fake_socket.recv(no_wait=True)) + + def test_context_manager_restores_enabled_buffering_state(self): + fake_socket = FakeSocket() + dogstatsd = DogStatsd(telemetry_min_flush_interval=0, disable_buffering=False) + dogstatsd.socket = fake_socket - t.assert_equal('page.views:123|g\ntimer:123|ms', fake_socket.recv()) + with dogstatsd: + dogstatsd.gauge('page.views', 123) + dogstatsd.timing('timer', 123) + + dogstatsd.gauge('newpage.views', 123) + dogstatsd.timing('newtimer', 123) + + metric1 = "page.views:123|g" + metric2 = "timer:123|ms" + metric3 = "newpage.views:123|g" + metric4 = "newtimer:123|ms" + + metrics1 = '\n'.join([metric1, metric2]) + "\n" + self.assertEqual(metrics1, fake_socket.recv(no_wait=True)) + + metrics_packet1 = telemetry_metrics(metrics=2, bytes_sent=len(metrics1), packets_sent=1) + self.assertEqual(metrics_packet1, fake_socket.recv(no_wait=True)) + + metrics2 = '\n'.join([metric3, metric4]) + "\n" + metrics_packet2 = telemetry_metrics(metrics=2, bytes_sent=len(metrics_packet1 + metrics2), packets_sent=2) + self.assertEqual(metrics2, fake_socket.recv(reset_wait=True)) + self.assertEqual(metrics_packet2, fake_socket.recv()) + + def test_context_manager_restores_disabled_buffering_state(self): + fake_socket = FakeSocket() + dogstatsd = DogStatsd(telemetry_min_flush_interval=0, disable_buffering=True) + dogstatsd.socket = fake_socket + + with dogstatsd: + dogstatsd.gauge('page.views', 123) + dogstatsd.timing('timer', 123) + + dogstatsd.gauge('newpage.views', 123) + dogstatsd.timing('newtimer', 123) + + metric1 = "page.views:123|g" + metric2 = "timer:123|ms" + metric3 = "newpage.views:123|g" + metric4 = "newtimer:123|ms" + + metrics1 = '\n'.join([metric1, metric2]) + "\n" + self.assertEqual(metrics1, fake_socket.recv(no_wait=True)) + + metrics_packet1 = telemetry_metrics(metrics=2, bytes_sent=len(metrics1), packets_sent=1) + self.assertEqual(metrics_packet1, fake_socket.recv(no_wait=True)) + + metrics2 = '\n'.join([metric3]) + "\n" + metrics_packet2 = telemetry_metrics(metrics=1, bytes_sent=len(metrics_packet1 + metrics2), packets_sent=2) + self.assertEqual(metrics2, fake_socket.recv()) + self.assertEqual(metrics_packet2, fake_socket.recv(no_wait=True)) + + metrics3 = '\n'.join([metric4]) + "\n" + metrics_packet3 = telemetry_metrics(metrics=1, bytes_sent=len(metrics_packet2 + metrics3), packets_sent=2) + self.assertEqual(metrics3, fake_socket.recv()) + self.assertEqual(metrics_packet3, fake_socket.recv(no_wait=True)) def test_batched_buffer_autoflush(self): fake_socket = FakeSocket() - with DogStatsd() as statsd: - statsd.socket = fake_socket - for i in range(51): - statsd.increment('mycounter') - t.assert_equal('\n'.join(['mycounter:1|c' for i in range(50)]), fake_socket.recv()) + bytes_sent = 0 + with DogStatsd(telemetry_min_flush_interval=0, disable_buffering=False) as dogstatsd: + dogstatsd.socket = fake_socket + + self.assertEqual(dogstatsd._max_payload_size, UDP_OPTIMAL_PAYLOAD_LENGTH) + + single_metric = 'mycounter:1|c\n' + metrics_per_packet = dogstatsd._max_payload_size // len(single_metric) + for _ in range(metrics_per_packet + 1): + dogstatsd.increment('mycounter') + payload = ''.join([single_metric for _ in range(metrics_per_packet)]) - t.assert_equal('mycounter:1|c', fake_socket.recv()) + telemetry = telemetry_metrics( + metrics=metrics_per_packet+1, + bytes_sent=len(payload), + ) + bytes_sent += len(payload) + len(telemetry) + self.assertEqual(payload, fake_socket.recv()) + self.assertEqual(telemetry, fake_socket.recv()) + + self.assertEqual(single_metric, fake_socket.recv()) + + telemetry = telemetry_metrics(metrics=0, packets_sent=2, bytes_sent=len(single_metric) + len(telemetry)) + self.assertEqual(telemetry, fake_socket.recv()) def test_module_level_instance(self): - t.assert_true(isinstance(statsd, DogStatsd)) + self.assertTrue(isinstance(statsd, DogStatsd)) def test_instantiating_does_not_connect(self): dogpound = DogStatsd() - t.assert_equal(None, dogpound.socket) + self.assertIsNone(dogpound.socket) def test_accessing_socket_opens_socket(self): dogpound = DogStatsd() try: - t.assert_not_equal(None, dogpound.get_socket()) + self.assertIsNotNone(dogpound.get_socket()) finally: dogpound.socket.close() @@ -248,13 +1665,407 @@ def test_accessing_socket_multiple_times_returns_same_socket(self): dogpound = DogStatsd() fresh_socket = FakeSocket() dogpound.socket = fresh_socket - t.assert_equal(fresh_socket, dogpound.get_socket()) - t.assert_not_equal(FakeSocket(), dogpound.get_socket()) + self.assertEqual(fresh_socket, dogpound.get_socket()) + self.assertNotEqual(FakeSocket(), dogpound.get_socket()) + + def test_tags_from_environment(self): + with preserve_environment_variable('DATADOG_TAGS'): + os.environ['DATADOG_TAGS'] = 'country:china,age:45,blue' + dogstatsd = DogStatsd(telemetry_min_flush_interval=0) + dogstatsd.socket = FakeSocket() + dogstatsd.gauge('gt', 123.4) + metric = 'gt:123.4|g|#country:china,age:45,blue\n' + self.assertEqual(metric, dogstatsd.socket.recv()) + self.assertEqual(telemetry_metrics(tags="country:china,age:45,blue", bytes_sent=len(metric)), dogstatsd.socket.recv()) + + def test_tags_from_environment_and_constant(self): + with preserve_environment_variable('DATADOG_TAGS'): + os.environ['DATADOG_TAGS'] = 'country:china,age:45,blue' + dogstatsd = DogStatsd(constant_tags=['country:canada', 'red'], telemetry_min_flush_interval=0) + dogstatsd.socket = FakeSocket() + dogstatsd.gauge('gt', 123.4) + tags = "country:canada,red,country:china,age:45,blue" + metric = 'gt:123.4|g|#' + tags + '\n' + self.assertEqual(metric, dogstatsd.socket.recv()) + self.assertEqual(telemetry_metrics(tags=tags, bytes_sent=len(metric)), dogstatsd.socket.recv()) + + def test_entity_id_and_container_id(self): + with preserve_environment_variable('DD_ENTITY_ID'): + os.environ['DD_ENTITY_ID'] = '04652bb7-19b7-11e9-9cc6-42010a9c016d' + dogstatsd = DogStatsd(telemetry_min_flush_interval=0) + dogstatsd.socket = FakeSocket() + dogstatsd._container_id = "ci-fake-container-id" + + dogstatsd.increment("page.views") + dogstatsd.flush() + tags = "dd.internal.entity_id:04652bb7-19b7-11e9-9cc6-42010a9c016d" + metric = 'page.views:1|c|#' + tags + '|c:ci-fake-container-id\n' + self.assertEqual(metric, dogstatsd.socket.recv()) + self.assertEqual(telemetry_metrics(tags=tags, bytes_sent=len(metric)), dogstatsd.socket.recv()) + + def test_entity_id_and_container_id_and_external_env(self): + with preserve_environment_variable('DD_ENTITY_ID'), preserve_environment_variable('DD_EXTERNAL_ENV'): + os.environ['DD_ENTITY_ID'] = '04652bb7-19b7-11e9-9cc6-42010a9c016d' + os.environ['DD_EXTERNAL_ENV'] = 'it-false,cn-container-name,pu-04652bb7-19b7-11e9-9cc6-42010a9c016d' + dogstatsd = DogStatsd(telemetry_min_flush_interval=0) + dogstatsd.socket = FakeSocket() + dogstatsd._container_id = "ci-fake-container-id" + + dogstatsd.increment("page.views") + dogstatsd.flush() + tags = "dd.internal.entity_id:04652bb7-19b7-11e9-9cc6-42010a9c016d" + metric = 'page.views:1|c|#' + tags + '|c:ci-fake-container-id' + '|e:it-false,cn-container-name,pu-04652bb7-19b7-11e9-9cc6-42010a9c016d' + '\n' + self.assertEqual(metric, dogstatsd.socket.recv()) + self.assertEqual(telemetry_metrics(tags=tags, bytes_sent=len(metric)), dogstatsd.socket.recv()) + + def test_entity_tag_from_environment(self): + with preserve_environment_variable('DD_ENTITY_ID'): + os.environ['DD_ENTITY_ID'] = '04652bb7-19b7-11e9-9cc6-42010a9c016d' + dogstatsd = DogStatsd(telemetry_min_flush_interval=0) + dogstatsd.socket = FakeSocket() + dogstatsd.gauge('gt', 123.4) + metric = 'gt:123.4|g|#dd.internal.entity_id:04652bb7-19b7-11e9-9cc6-42010a9c016d\n' + self.assertEqual(metric, dogstatsd.socket.recv()) + self.assertEqual( + telemetry_metrics(tags="dd.internal.entity_id:04652bb7-19b7-11e9-9cc6-42010a9c016d", bytes_sent=len(metric)), + dogstatsd.socket.recv()) + + def test_entity_tag_from_environment_and_constant(self): + with preserve_environment_variable('DD_ENTITY_ID'): + os.environ['DD_ENTITY_ID'] = '04652bb7-19b7-11e9-9cc6-42010a9c016d' + dogstatsd = DogStatsd(constant_tags=['country:canada', 'red'], telemetry_min_flush_interval=0) + dogstatsd.socket = FakeSocket() + dogstatsd.gauge('gt', 123.4) + metric = 'gt:123.4|g|#country:canada,red,dd.internal.entity_id:04652bb7-19b7-11e9-9cc6-42010a9c016d\n' + self.assertEqual(metric, dogstatsd.socket.recv()) + self.assertEqual( + telemetry_metrics(tags="country:canada,red,dd.internal.entity_id:04652bb7-19b7-11e9-9cc6-42010a9c016d", + bytes_sent=len(metric)), + dogstatsd.socket.recv() + ) + + def test_entity_tag_and_tags_from_environment_and_constant(self): + with preserve_environment_variable('DATADOG_TAGS'): + os.environ['DATADOG_TAGS'] = 'country:china,age:45,blue' + with preserve_environment_variable('DD_ENTITY_ID'): + os.environ['DD_ENTITY_ID'] = '04652bb7-19b7-11e9-9cc6-42010a9c016d' + dogstatsd = DogStatsd(constant_tags=['country:canada', 'red'], telemetry_min_flush_interval=0) + dogstatsd.socket = FakeSocket() + dogstatsd.gauge('gt', 123.4) + tags = "country:canada,red,country:china,age:45,blue,dd.internal.entity_id:04652bb7-19b7-11e9-9cc6-42010a9c016d" + metric = 'gt:123.4|g|#' + tags + '\n' + self.assertEqual(metric, dogstatsd.socket.recv()) + self.assertEqual(telemetry_metrics(tags=tags, bytes_sent=len(metric)), dogstatsd.socket.recv()) + + def test_dogstatsd_initialization_with_dd_env_service_version(self): + """ + Dogstatsd should automatically use DD_ENV, DD_SERVICE, and DD_VERSION (if present) + to set {env, service, version} as global tags for all metrics emitted. + """ + cases = [ + # Test various permutations of setting DD_* env vars, as well as other global tag configuration. + # An empty string signifies that the env var either isn't set or that it is explicitly set to empty string. + ('', '', '', '', [], []), + ('prod', '', '', '', [], ['env:prod']), + ('prod', 'dog', '', '', [], ['env:prod', 'service:dog']), + ('prod', 'dog', 'abc123', '', [], ['env:prod', 'service:dog', 'version:abc123']), + ('prod', 'dog', 'abc123', 'env:prod,type:app', [], ['env:prod', 'env:prod', 'service:dog', 'type:app', 'version:abc123']), + ('prod', 'dog', 'abc123', 'env:prod2,type:app', [], ['env:prod', 'env:prod2', 'service:dog', 'type:app', 'version:abc123']), + ('prod', 'dog', 'abc123', '', ['env:prod', 'type:app'], ['env:prod', 'env:prod', 'service:dog', 'type:app', 'version:abc123']), + ('prod', 'dog', 'abc123', '', ['env:prod2', 'type:app'], ['env:prod', 'env:prod2', 'service:dog', 'type:app', 'version:abc123']), + ('prod', 'dog', 'abc123', 'env:prod3,custom_tag:cat', ['env:prod2', 'type:app'], ['custom_tag:cat', 'env:prod', 'env:prod2', 'env:prod3', 'service:dog', 'type:app', 'version:abc123']), + ] + for case in cases: + dd_env, dd_service, dd_version, datadog_tags, constant_tags, global_tags = case + with EnvVars( + env_vars={ + 'DATADOG_TAGS': datadog_tags, + 'DD_ENV': dd_env, + 'DD_SERVICE': dd_service, + 'DD_VERSION': dd_version, + } + ): + dogstatsd = DogStatsd(constant_tags=constant_tags, telemetry_min_flush_interval=0) + dogstatsd.socket = FakeSocket() + + # Guarantee consistent ordering, regardless of insertion order. + dogstatsd.constant_tags.sort() + self.assertEqual(global_tags, dogstatsd.constant_tags) + + # Make call with no tags passed; only the globally configured tags will be used. + global_tags_str = ','.join([t for t in global_tags]) + dogstatsd.gauge('gt', 123.4) + dogstatsd.flush() + + # Protect against the no tags case. + metric = 'gt:123.4|g|#{}\n'.format(global_tags_str) if global_tags_str else 'gt:123.4|g\n' + self.assertEqual(metric, dogstatsd.socket.recv()) + self.assertEqual( + telemetry_metrics( + tags=global_tags_str, + bytes_sent=len(metric) + ), + dogstatsd.socket.recv(), + ) + dogstatsd._reset_telemetry() + + # Make another call with local tags passed. + passed_tags = ['env:prod', 'version:def456', 'custom_tag:toad'] + all_tags_str = ','.join([t for t in passed_tags + global_tags]) + dogstatsd.gauge('gt', 123.4, tags=passed_tags) + dogstatsd.flush() + + metric = 'gt:123.4|g|#{}\n'.format(all_tags_str) + self.assertEqual(metric, dogstatsd.socket.recv()) + self.assertEqual( + telemetry_metrics( + tags=global_tags_str, + bytes_sent=len(metric), + ), + dogstatsd.socket.recv(), + ) + + def test_default_max_udp_packet_size(self): + dogstatsd = DogStatsd(disable_buffering=False, flush_interval=10000, disable_telemetry=True) + dogstatsd.socket = FakeSocket() + + for _ in range(10000): + dogstatsd.increment('val') + + payload = dogstatsd.socket.recv() + self.assertIsNotNone(payload) + while payload is not None: + payload_size = len(payload) + self.assertLessEqual(payload_size, UDP_OPTIMAL_PAYLOAD_LENGTH) + self.assertGreater(payload_size, UDP_OPTIMAL_PAYLOAD_LENGTH - 100) + + payload = dogstatsd.socket.recv() + + def test_default_max_uds_packet_size(self): + dogstatsd = DogStatsd( + disable_buffering=False, + socket_path="fake", + flush_interval=10000, + disable_telemetry=True, + ) + dogstatsd.socket = FakeSocket() + + for _ in range(10000): + dogstatsd.increment('val') + + payload = dogstatsd.socket.recv() + self.assertIsNotNone(payload) + while payload is not None: + payload_size = len(payload) + self.assertLessEqual(payload_size, UDS_OPTIMAL_PAYLOAD_LENGTH) + self.assertGreater(payload_size, UDS_OPTIMAL_PAYLOAD_LENGTH - 100) + + payload = dogstatsd.socket.recv() + + def test_custom_max_packet_size(self): + dogstatsd = DogStatsd( + disable_buffering=False, + max_buffer_len=4000, + flush_interval=10000, + disable_telemetry=True, + ) + dogstatsd.socket = FakeSocket() + + for _ in range(10000): + dogstatsd.increment('val') + + payload = dogstatsd.socket.recv() + self.assertIsNotNone(payload) + while payload is not None: + payload_size = len(payload) + self.assertLessEqual(payload_size, 4000) + self.assertGreater(payload_size, 3900) + + payload = dogstatsd.socket.recv() + + def test_gauge_does_not_send_none(self): + self.statsd.gauge('metric', None) + self.assertIsNone(self.recv()) + + def test_increment_does_not_send_none(self): + self.statsd.increment('metric', None) + self.assertIsNone(self.recv()) + + def test_decrement_does_not_send_none(self): + self.statsd.decrement('metric', None) + self.assertIsNone(self.recv()) + + def test_timing_does_not_send_none(self): + self.statsd.timing('metric', None) + self.assertIsNone(self.recv()) + + def test_histogram_does_not_send_none(self): + self.statsd.histogram('metric', None) + self.assertIsNone(self.recv()) + + def test_set_with_container_field(self): + self.statsd._container_id = "ci-fake-container-id" + self.statsd.set("set", 123) + self.assert_equal_telemetry("set:123|s|c:ci-fake-container-id\n", self.recv(2)) + self.statsd._container_id = None + + def test_gauge_with_container_field(self): + self.statsd._container_id = "ci-fake-container-id" + self.statsd.gauge("gauge", 123.4) + self.assert_equal_telemetry("gauge:123.4|g|c:ci-fake-container-id\n", self.recv(2)) + self.statsd._container_id = None + + def test_counter_with_container_field(self): + self.statsd._container_id = "ci-fake-container-id" + + self.statsd.increment("page.views") + self.statsd.flush() + self.assert_equal_telemetry("page.views:1|c|c:ci-fake-container-id\n", self.recv(2)) + + self.statsd._reset_telemetry() + self.statsd.increment("page.views", 11) + self.statsd.flush() + self.assert_equal_telemetry("page.views:11|c|c:ci-fake-container-id\n", self.recv(2)) + + self.statsd._reset_telemetry() + self.statsd.decrement("page.views") + self.statsd.flush() + self.assert_equal_telemetry("page.views:-1|c|c:ci-fake-container-id\n", self.recv(2)) + + self.statsd._reset_telemetry() + self.statsd.decrement("page.views", 12) + self.statsd.flush() + self.assert_equal_telemetry("page.views:-12|c|c:ci-fake-container-id\n", self.recv(2)) + + self.statsd._container_id = None + + def test_histogram_with_container_field(self): + self.statsd._container_id = "ci-fake-container-id" + self.statsd.histogram("histo", 123.4) + self.assert_equal_telemetry("histo:123.4|h|c:ci-fake-container-id\n", self.recv(2)) + self.statsd._container_id = None + + def test_timing_with_container_field(self): + self.statsd._container_id = "ci-fake-container-id" + self.statsd.timing("t", 123) + self.assert_equal_telemetry("t:123|ms|c:ci-fake-container-id\n", self.recv(2)) + self.statsd._container_id = None + + def test_event_with_container_field(self): + self.statsd._container_id = "ci-fake-container-id" + self.statsd.event( + "Title", + "L1\nL2", + priority="low", + date_happened=1375296969, + ) + event2 = u"_e{5,6}:Title|L1\\nL2|d:1375296969|p:low|c:ci-fake-container-id\n" + self.assert_equal_telemetry( + event2, + self.recv(2), + telemetry=telemetry_metrics( + metrics=0, + events=1, + bytes_sent=len(event2), + ), + ) + + self.statsd._reset_telemetry() + + self.statsd.event("Title", u"♬ †øU †øU ¥ºu T0µ ♪", aggregation_key="key", tags=["t1", "t2:v2"]) + event3 = u"_e{5,32}:Title|♬ †øU †øU ¥ºu T0µ ♪|k:key|#t1,t2:v2|c:ci-fake-container-id\n" + self.assert_equal_telemetry( + event3, + self.recv(2, reset_wait=True), + telemetry=telemetry_metrics( + metrics=0, + events=1, + bytes_sent=len(event3), + ), + ) + self.statsd._container_id = None + + def test_service_check_with_container_field(self): + self.statsd._container_id = "ci-fake-container-id" + now = int(time.time()) + self.statsd.service_check( + "my_check.name", + self.statsd.WARNING, + tags=["key1:val1", "key2:val2"], + timestamp=now, + hostname=u"i-abcd1234", + message=u"♬ †øU \n†øU ¥ºu|m: T0µ ♪", + ) + check = u'_sc|my_check.name|{0}|d:{1}|h:i-abcd1234|#key1:val1,key2:val2|m:{2}|c:ci-fake-container-id\n'.format( + self.statsd.WARNING, now, u'♬ †øU \\n†øU ¥ºu|m\\: T0µ ♪' + ) + self.assert_equal_telemetry( + check, + self.recv(2), + telemetry=telemetry_metrics( + metrics=0, + service_checks=1, + bytes_sent=len(check), + ), + ) + self.statsd._container_id = None + + def test_sender_mode(self): + statsd = DogStatsd(disable_background_sender=True) + self.assertIsNone(statsd._queue) + + statsd.enable_background_sender() + self.assertIsNotNone(statsd._queue) + + statsd = DogStatsd(disable_background_sender=False) + self.assertIsNotNone(statsd._queue) + + def test_sender_calls_task_done(self): + statsd = DogStatsd(disable_background_sender=False) + statsd.socket = OverflownSocket() + statsd.increment("test.metric") + statsd.wait_for_pending() + + def test_sender_queue_no_timeout(self): + statsd = DogStatsd(disable_background_sender=False, sender_queue_timeout=None) + + def test_set_socket_timeout(self): + statsd = DogStatsd(disable_background_sender=False) + statsd.socket = FakeSocket() + statsd.set_socket_timeout(1) + self.assertEqual(statsd.socket.timeout, 1) + self.assertEqual(statsd.socket_timeout, 1) + + def test_telemetry_api(self): + statsd = DogStatsd(disable_background_sender=False) + + self.assertEqual(statsd.metrics_count, 0) + self.assertEqual(statsd.events_count, 0) + self.assertEqual(statsd.service_checks_count, 0) + self.assertEqual(statsd.bytes_sent, 0) + self.assertEqual(statsd.bytes_dropped, 0) + self.assertEqual(statsd.bytes_dropped_queue, 0) + self.assertEqual(statsd.bytes_dropped_writer, 0) + self.assertEqual(statsd.packets_sent, 0) + self.assertEqual(statsd.packets_dropped, 0) + self.assertEqual(statsd.packets_dropped_queue, 0) + self.assertEqual(statsd.packets_dropped_writer, 0) + def test_max_payload_size(self): + statsd = DogStatsd(socket_path=None, port=8125) + self.assertEqual(statsd._max_payload_size, UDP_OPTIMAL_PAYLOAD_LENGTH) + statsd.socket_path = "/foo" + self.assertEqual(statsd._max_payload_size, UDS_OPTIMAL_PAYLOAD_LENGTH) -if __name__ == '__main__': - statsd = statsd - while True: - statsd.gauge('test.gauge', 1) - statsd.increment('test.count', 2) - time.sleep(0.05) + def test_post_fork_locks(self): + def inner(): + statsd = DogStatsd(socket_path=None, port=8125) + # Statsd should survive this sequence of events + statsd.pre_fork() + statsd.get_socket() + statsd.post_fork_parent() + t = Thread(target=inner) + t.daemon = True + t.start() + t.join(timeout=5) + self.assertFalse(t.is_alive()) diff --git a/tests/unit/dogwrap/__init__.py b/tests/unit/dogwrap/__init__.py new file mode 100644 index 000000000..b3017a1db --- /dev/null +++ b/tests/unit/dogwrap/__init__.py @@ -0,0 +1,3 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc diff --git a/tests/unit/dogwrap/fixtures/proc_out.txt b/tests/unit/dogwrap/fixtures/proc_out.txt new file mode 100644 index 000000000..feb4f82ef --- /dev/null +++ b/tests/unit/dogwrap/fixtures/proc_out.txt @@ -0,0 +1,5 @@ +Starting Unicode Test +From ruby: Michélle +From ruby: helløøééé + Ið pri qūāeqūe periculīs sælutǽtus +Completed DD Unicode Test diff --git a/tests/unit/dogwrap/test_dogwrap.py b/tests/unit/dogwrap/test_dogwrap.py new file mode 100644 index 000000000..e0c72569b --- /dev/null +++ b/tests/unit/dogwrap/test_dogwrap.py @@ -0,0 +1,254 @@ +# coding: utf8 + +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +import unittest +import mock +import os +import tempfile + +from datadog.dogshell.wrap import OutputReader, build_event_body, parse_options, execute, Timeout, poll_proc +from datadog.util.compat import is_p3k + + +HERE = os.path.dirname(os.path.abspath(__file__)) + + +class TestDogwrap(unittest.TestCase): + def test_output_reader(self): + with open(os.path.join(HERE, "fixtures", "proc_out.txt"), 'rb') as cmd_out: + content = cmd_out.read() + + with tempfile.TemporaryFile() as fwd_out: + reader = OutputReader(open(os.path.join(HERE, "fixtures", "proc_out.txt"), 'rb'), fwd_out) + reader.start() + reader.join() + self.assertIsInstance(reader.content, bytes) + self.assertEqual(reader.content, content) + fwd_out.seek(0, 0) + self.assertEqual(reader.content, fwd_out.read()) + + def test_build_event_body(self): + # Only cmd is already unicode, the rest is decoded in the function + cmd = u"yö dudes" + returncode = 0 + stdout = b"s\xc3\xb9p\xaa" + stderr = b"d\xc3\xa0wg\xaa" + notifications = b"@m\xc3\xa9\xaa" + expected_body = u"%%%\n" \ + u"**>>>> CMD <<<<**\n```\nyö dudes \n```\n" \ + u"**>>>> EXIT CODE <<<<**\n\n 0\n\n\n" \ + u"**>>>> STDOUT <<<<**\n```\nsùp\ufffd \n```\n" \ + u"**>>>> STDERR <<<<**\n```\ndàwg\ufffd \n```\n" \ + u"**>>>> NOTIFICATIONS <<<<**\n\n @mé\ufffd\n" \ + u"%%%\n" + + event_body = build_event_body(cmd, returncode, stdout, stderr, notifications) + self.assertEqual(expected_body, event_body) + + # notifications can be unicode already in py3, make sure we don't try decoding + notifications = notifications.decode("utf-8", "replace") + event_body = build_event_body(cmd, returncode, stdout, stderr, notifications) + self.assertEqual(expected_body, event_body) + + def test_parse_options(self): + options, cmd = parse_options([]) + self.assertEqual(cmd, '') + + # The output of parse_args is already unicode in python 3, so don't encode the input + if is_p3k(): + arg = u'helløøééé' + else: + arg = u'helløøééé'.encode('utf-8') + + options, cmd = parse_options(['-n', 'name', '-k', 'key', '-m', 'all', '-p', 'low', '-t', '123', + '--sigterm_timeout', '456', '--sigkill_timeout', '789', + '--proc_poll_interval', '1.5', '--notify_success', 'success', + '--notify_error', 'error', '-b', '--tags', 'k1:v1,k2:v2', + 'echo', arg]) + self.assertEqual(cmd, u'echo helløøééé') + self.assertEqual(options.name, 'name') + self.assertEqual(options.api_key, 'key') + self.assertEqual(options.submit_mode, 'all') + self.assertEqual(options.priority, 'low') + self.assertEqual(options.timeout, 123) + self.assertEqual(options.sigterm_timeout, 456) + self.assertEqual(options.sigkill_timeout, 789) + self.assertEqual(options.proc_poll_interval, 1.5) + self.assertEqual(options.notify_success, 'success') + self.assertEqual(options.notify_error, 'error') + self.assertTrue(options.buffer_outs) + self.assertEqual(options.tags, 'k1:v1,k2:v2') + + with self.assertRaises(SystemExit): + parse_options(['-m', 'invalid']) + + with self.assertRaises(SystemExit): + parse_options(['-p', 'invalid']) + + with self.assertRaises(SystemExit): + parse_options(['-t', 'invalid']) + + with self.assertRaises(SystemExit): + parse_options(['--sigterm_timeout', 'invalid']) + + with self.assertRaises(SystemExit): + parse_options(['--sigkill_timeout', 'invalid']) + + with self.assertRaises(SystemExit): + parse_options(['--proc_poll_interval', 'invalid']) + + with mock.patch.dict(os.environ, values={"DD_API_KEY": "the_key"}, clear=True): + options, _ = parse_options([]) + self.assertEqual(options.api_key, "the_key") + + def test_poll_proc(self): + mock_proc = mock.Mock() + mock_proc.poll.side_effect = [None, 0] + + return_value = poll_proc(mock_proc, 0.1, 1) + self.assertEqual(return_value, 0) + self.assertEqual(mock_proc.poll.call_count, 2) + + def test_poll_timeout(self): + mock_proc = mock.Mock() + mock_proc.poll.side_effect = [None, None, None] + + with self.assertRaises(Timeout): + poll_proc(mock_proc, 0.1, 0.2) + + @mock.patch('datadog.dogshell.wrap.poll_proc') + @mock.patch('subprocess.Popen') + def test_execute(self, mock_popen, mock_poll): + mock_proc = mock.Mock() + mock_proc.stdout.readline.side_effect = [b'out1\n', b''] + mock_proc.stderr.readline.side_effect = [b'err1\n', b''] + mock_popen.return_value = mock_proc + mock_poll.return_value = 0 + + return_code, stdout, stderr, duration = execute('foo', 10, 20, 30, 1, False) + self.assertEqual(return_code, 0) + self.assertEqual(stdout, b'out1\n') + self.assertEqual(stderr, b'err1\n') + + mock_popen.assert_called_once() + mock_poll.assert_called_once_with(mock_proc, 1, 10) + mock_proc.terminate.assert_not_called() + mock_proc.kill.assert_not_called() + + @mock.patch('datadog.dogshell.wrap.poll_proc') + @mock.patch('subprocess.Popen') + def test_execute_exit_code(self, mock_popen, mock_poll): + mock_proc = mock.Mock() + mock_proc.stdout.readline.side_effect = [b'out1\n', b'out2\n', b''] + mock_proc.stderr.readline.side_effect = [b'err1\n', b''] + mock_popen.return_value = mock_proc + mock_poll.return_value = 14 + + return_code, stdout, stderr, duration = execute('foo', 10, 20, 30, 1, False) + self.assertEqual(return_code, 14) + self.assertEqual(stdout, b'out1\nout2\n') + self.assertEqual(stderr, b'err1\n') + + mock_popen.assert_called_once() + mock_poll.assert_called_once_with(mock_proc, 1, 10) + mock_proc.terminate.assert_not_called() + mock_proc.kill.assert_not_called() + + @mock.patch('datadog.dogshell.wrap.poll_proc') + @mock.patch('subprocess.Popen') + def test_execute_cmd_timeout(self, mock_popen, mock_poll): + mock_proc = mock.Mock() + mock_proc.stdout.readline.side_effect = [b'out1\n', b'out2\n', b''] + mock_proc.stderr.readline.side_effect = [b'err1\n', b''] + mock_popen.return_value = mock_proc + mock_poll.side_effect = [Timeout, 1] + + return_code, stdout, stderr, duration = execute('foo', 10, 20, 30, 1, False) + self.assertEqual(return_code, Timeout) + self.assertEqual(stdout, b'out1\nout2\n') + self.assertEqual(stderr, b'err1\n') + + mock_popen.assert_called_once() + mock_poll.assert_has_calls([ + mock.call(mock_proc, 1, 10), + mock.call(mock_proc, 1, 20) + ]) + mock_proc.terminate.assert_called_once() + mock_proc.kill.assert_not_called() + + @mock.patch('datadog.dogshell.wrap.poll_proc') + @mock.patch('subprocess.Popen') + def test_execute_sigterm_timeout(self, mock_popen, mock_poll): + mock_proc = mock.Mock() + mock_proc.stdout.readline.side_effect = [b'out1\n', b'out2\n', b''] + mock_proc.stderr.readline.side_effect = [b'err1\n', b''] + mock_popen.return_value = mock_proc + mock_poll.side_effect = [Timeout, Timeout, 2] + + return_code, stdout, stderr, duration = execute('foo', 10, 20, 30, 1, False) + self.assertEqual(return_code, Timeout) + self.assertEqual(stdout, b'out1\nout2\n') + self.assertEqual(stderr, b'err1\n') + + mock_popen.assert_called_once() + mock_poll.assert_has_calls([ + mock.call(mock_proc, 1, 10), + mock.call(mock_proc, 1, 20), + mock.call(mock_proc, 1, 30) + ]) + mock_proc.terminate.assert_called_once() + mock_proc.kill.assert_called_once() + + @mock.patch('datadog.dogshell.wrap.poll_proc') + @mock.patch('subprocess.Popen') + def test_execute_sigkill_timeout(self, mock_popen, mock_poll): + mock_proc = mock.Mock() + mock_proc.stdout.readline.side_effect = [b'out1\n', b'out2\n', b''] + mock_proc.stderr.readline.side_effect = [b'err1\n', b''] + mock_popen.return_value = mock_proc + mock_poll.side_effect = [Timeout, Timeout, Timeout] + + return_code, stdout, stderr, duration = execute('foo', 10, 20, 30, 1, False) + self.assertEqual(return_code, Timeout) + self.assertEqual(stdout, b'out1\nout2\n') + self.assertEqual(stderr, b'err1\n') + + mock_popen.assert_called_once() + mock_poll.assert_has_calls([ + mock.call(mock_proc, 1, 10), + mock.call(mock_proc, 1, 20), + mock.call(mock_proc, 1, 30) + ]) + mock_proc.terminate.assert_called_once() + mock_proc.kill.assert_called_once() + + @mock.patch('datadog.dogshell.wrap.poll_proc') + @mock.patch('subprocess.Popen') + def test_execute_oserror(self, mock_popen, mock_poll): + mock_proc = mock.Mock() + mock_proc.stdout.readline.side_effect = [b'out1\n', b'out2\n', b''] + mock_proc.stderr.readline.side_effect = [b'err1\n', b''] + mock_popen.return_value = mock_proc + mock_poll.side_effect = [Timeout, Timeout] + mock_proc.kill.side_effect = OSError(3, 'No process') + return_code, stdout, stderr, duration = execute('foo', 10, 20, 30, 1, False) + self.assertEqual(return_code, Timeout) + self.assertEqual(stdout, b'out1\nout2\n') + self.assertEqual(stderr, b'err1\n') + + mock_popen.assert_called_once() + mock_poll.assert_has_calls([ + mock.call(mock_proc, 1, 10), + mock.call(mock_proc, 1, 20) + ]) + mock_proc.terminate.assert_called_once() + mock_proc.kill.assert_called_once() + + @mock.patch('subprocess.Popen') + def test_execute_popen_fail(self, mock_popen): + mock_popen.side_effect = ValueError('Bad things') + + with self.assertRaises(ValueError): + execute('sleep 1', 10, 1, 1, 1, False) diff --git a/tests/unit/threadstats/__init__.py b/tests/unit/threadstats/__init__.py index e69de29bb..b3017a1db 100644 --- a/tests/unit/threadstats/__init__.py +++ b/tests/unit/threadstats/__init__.py @@ -0,0 +1,3 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc diff --git a/tests/unit/threadstats/test_threadstats.py b/tests/unit/threadstats/test_threadstats.py index 567abd95f..403e2decd 100644 --- a/tests/unit/threadstats/test_threadstats.py +++ b/tests/unit/threadstats/test_threadstats.py @@ -1,34 +1,44 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc """ Tests for the ThreadStats class, using HTTP mode """ +# stdlib import logging +import os import random import time -import threading +import unittest -import nose.tools as nt -from nose.plugins.skip import SkipTest - -from datadog import ThreadStats -from datadog.api.exceptions import ApiNotInitialized +# 3p +from mock import patch +# datadog +from datadog import ThreadStats, lambda_metric, datadog_lambda_wrapper +from datadog.threadstats.aws_lambda import _get_lambda_stats +from tests.util.contextmanagers import preserve_environment_variable, EnvVars # Silence the logger. logger = logging.getLogger('dd.datadogpy') logger.setLevel(logging.ERROR) -# -# Test fixtures. -# - class MemoryReporter(object): - """ A reporting class that reports to memory for testing. """ + """ + A reporting class that reports to memory for testing. + """ def __init__(self): + self.distributions = [] self.metrics = [] self.events = [] + self.dist_flush_counter = 0 + + def flush_distributions(self, distributions): + self.distributions += distributions + self.dist_flush_counter = self.dist_flush_counter + 1 def flush_metrics(self, metrics): self.metrics += metrics @@ -37,14 +47,20 @@ def flush_events(self, events): self.events += events -# -# Unit tests. -# -class TestUnitThreadStats(object): - """ Unit tests for the dog stats api. """ +class TestUnitThreadStats(unittest.TestCase): + """ + Unit tests for ThreadStats. + """ + def setUp(self): + """ + Set a mocked reporter. + """ + self.reporter = MemoryReporter() def sort_metrics(self, metrics): - """ Sort metrics by timestamp of first point and then name """ + """ + Sort metrics by timestamp of first point and then name. + """ def sort(metric): tags = metric['tags'] or [] host = metric['host'] or '' @@ -52,6 +68,51 @@ def sort(metric): metric['points'][0][1]) return sorted(metrics, key=sort) + def assertMetric(self, name=None, value=None, tags=None, count=None): + """ + Helper, to make assertions on metrics. + """ + matching_metrics = [] + + for metric in self.reporter.metrics: + if name and name != metric['metric']: + continue + if value and value != metric['points'][0][1]: + continue + if tags and tags != metric['tags']: + continue + matching_metrics.append(metric) + + if count: + self.assertEqual( + len(matching_metrics), count, + u"Candidate size assertion failure: expected {expected}, found {count}. " + u"Metric name={name}, value={value}, tags={tags}.".format( + expected=count, count=len(matching_metrics), + name=name, value=value, tags=tags + ) + ) + else: + self.assertTrue( + len(matching_metrics) > 0, + u"Candidate size assertion failure: no matching metric found. " + u"Metric name={name}, value={value}, tags={tags}.".format( + name=name, value=value, tags=tags + ) + ) + + def test_init(self): + # Test compress_payload setting + t = ThreadStats(compress_payload=True) + t.start() + assert t.reporter.compress_payload is True + t.stop() + # Default value + t = ThreadStats() + t.start() + assert t.reporter.compress_payload is False + t.stop() + def test_timed_decorator(self): dog = ThreadStats() dog.start(roll_up_interval=1, flush_in_thread=False) @@ -62,27 +123,67 @@ def func(a, b, c=1, d=1): """docstring""" return (a, b, c, d) - nt.assert_equal(func.__name__, 'func') - nt.assert_equal(func.__doc__, 'docstring') + assert func.__name__ == 'func' + assert func.__doc__ == 'docstring' result = func(1, 2, d=3) # Assert it handles args and kwargs correctly. - nt.assert_equal(result, (1, 2, 1, 3)) + assert result == (1, 2, 1, 3) time.sleep(1) # Argh. I hate this. dog.flush() metrics = self.sort_metrics(reporter.metrics) - nt.assert_equal(len(metrics), 8) + assert len(metrics) == 8 (_, _, _, _, avg, count, max_, min_) = metrics - nt.assert_equal(avg['metric'], 'timed.test.avg') - nt.assert_equal(count['metric'], 'timed.test.count') - nt.assert_equal(max_['metric'], 'timed.test.max') - nt.assert_equal(min_['metric'], 'timed.test.min') + assert avg['metric'] == 'timed.test.avg' + assert count['metric'] == 'timed.test.count' + assert max_['metric'] == 'timed.test.max' + assert min_['metric'] == 'timed.test.min' def test_event(self): dog = ThreadStats() dog.start(roll_up_interval=10, flush_in_thread=False) reporter = dog.reporter = MemoryReporter() + # Add two events + event1_title = "Event 1 title" + event2_title = "Event 1 title" + event1_text = "Event 1 text" + event2_text = "Event 2 text" + dog.event(event1_title, event1_text) + # Positional arg names should match statsd + dog.event(title=event2_title, message=event2_text) + + # Flush and test + dog.flush() + event1, event2 = reporter.events + assert event1['title'] == event1_title + assert event1['text'] == event1_text + assert event2['title'] == event2_title + assert event2['text'] == event2_text + + # Test more parameters + reporter.events = [] + event1_priority = "low" + event1_date_happened = 1375296969 + event1_tag = "Event 2 tag" + dog.event(event1_title, event1_text, priority=event1_priority, + date_happened=event1_date_happened, tags=[event1_tag]) + + # Flush and test + dog.flush() + event, = reporter.events + assert event['title'] == event1_title + assert event['text'] == event1_text + assert event['priority'] == event1_priority + assert event['date_happened'] == event1_date_happened + assert event['tags'] == [event1_tag] + + def test_event_constant_tags(self): + constant_tag = 'type:constant' + dog = ThreadStats(constant_tags=[constant_tag]) + dog.start(roll_up_interval=10, flush_in_thread=False) + reporter = dog.reporter = MemoryReporter() + # Add two events event1_title = "Event 1 title" event2_title = "Event 1 title" @@ -94,25 +195,30 @@ def test_event(self): # Flush and test dog.flush() event1, event2 = reporter.events - nt.assert_equal(event1['title'], event1_title) - nt.assert_equal(event1['text'], event1_text) - nt.assert_equal(event2['title'], event2_title) - nt.assert_equal(event2['text'], event2_text) + assert event1['title'] == event1_title + assert event1['text'] == event1_text + assert event1['tags'] == [constant_tag] + assert event2['title'] == event2_title + assert event2['text'] == event2_text + assert event2['text'] == event2_text + assert event2['tags'] == [constant_tag] # Test more parameters reporter.events = [] event1_priority = "low" event1_date_happened = 1375296969 + event1_tag = "Event 2 tag" dog.event(event1_title, event1_text, priority=event1_priority, - date_happened=event1_date_happened) + date_happened=event1_date_happened, tags=[event1_tag]) # Flush and test dog.flush() event, = reporter.events - nt.assert_equal(event['title'], event1_title) - nt.assert_equal(event['text'], event1_text) - nt.assert_equal(event['priority'], event1_priority) - nt.assert_equal(event['date_happened'], event1_date_happened) + assert event['title'] == event1_title + assert event['text'] == event1_text + assert event['priority'] == event1_priority + assert event['date_happened'] == event1_date_happened + assert event['tags'] == [event1_tag, constant_tag] def test_histogram(self): dog = ThreadStats() @@ -136,55 +242,55 @@ def test_histogram(self): # Flush and ensure they roll up properly. dog.flush(120.0) metrics = self.sort_metrics(reporter.metrics) - nt.assert_equal(len(metrics), 24) + assert len(metrics) == 24 # Test histograms elsewhere. (h1751, h1851, h1951, h1991, h1avg1, h1cnt1, h1max1, h1min1, _, _, _, _, h2avg1, h2cnt1, h2max1, h2min1, h1752, _, _, h1992, h1avg2, h1cnt2, h1max2, h1min2) = metrics - nt.assert_equal(h1avg1['metric'], 'histogram.1.avg') - nt.assert_equal(h1avg1['points'][0][0], 100.0) - nt.assert_equal(h1avg1['points'][0][1], 35) - nt.assert_equal(h1cnt1['metric'], 'histogram.1.count') - nt.assert_equal(h1cnt1['points'][0][0], 100.0) - nt.assert_equal(h1cnt1['points'][0][1], 4) - nt.assert_equal(h1min1['metric'], 'histogram.1.min') - nt.assert_equal(h1min1['points'][0][1], 20) - nt.assert_equal(h1max1['metric'], 'histogram.1.max') - nt.assert_equal(h1max1['points'][0][1], 50) - nt.assert_equal(h1751['metric'], 'histogram.1.75percentile') - nt.assert_equal(h1751['points'][0][1], 40) - nt.assert_equal(h1991['metric'], 'histogram.1.99percentile') - nt.assert_equal(h1991['points'][0][1], 50) - - nt.assert_equal(h1avg2['metric'], 'histogram.1.avg') - nt.assert_equal(h1avg2['points'][0][0], 110.0) - nt.assert_equal(h1avg2['points'][0][1], 40) - nt.assert_equal(h1cnt2['metric'], 'histogram.1.count') - nt.assert_equal(h1cnt2['points'][0][0], 110.0) - nt.assert_equal(h1cnt2['points'][0][1], 3) - nt.assert_equal(h1752['metric'], 'histogram.1.75percentile') - nt.assert_equal(h1752['points'][0][0], 110.0) - nt.assert_equal(h1752['points'][0][1], 40.0) - nt.assert_equal(h1992['metric'], 'histogram.1.99percentile') - nt.assert_equal(h1992['points'][0][0], 110.0) - nt.assert_equal(h1992['points'][0][1], 50.0) - - nt.assert_equal(h2avg1['metric'], 'histogram.2.avg') - nt.assert_equal(h2avg1['points'][0][0], 100.0) - nt.assert_equal(h2avg1['points'][0][1], 40) - nt.assert_equal(h2cnt1['metric'], 'histogram.2.count') - nt.assert_equal(h2cnt1['points'][0][0], 100.0) - nt.assert_equal(h2cnt1['points'][0][1], 1) + assert h1avg1['metric'] == 'histogram.1.avg' + assert h1avg1['points'][0][0] == 100.0 + assert h1avg1['points'][0][1] == 35 + assert h1cnt1['metric'] == 'histogram.1.count' + assert h1cnt1['points'][0][0] == 100.0 + assert h1cnt1['points'][0][1] == 0.4 + assert h1min1['metric'] == 'histogram.1.min' + assert h1min1['points'][0][1] == 20 + assert h1max1['metric'] == 'histogram.1.max' + assert h1max1['points'][0][1] == 50 + assert h1751['metric'] == 'histogram.1.75percentile' + assert h1751['points'][0][1] == 40 + assert h1991['metric'] == 'histogram.1.99percentile' + assert h1991['points'][0][1] == 50 + + assert h1avg2['metric'] == 'histogram.1.avg' + assert h1avg2['points'][0][0] == 110.0 + assert h1avg2['points'][0][1] == 40 + assert h1cnt2['metric'] == 'histogram.1.count' + assert h1cnt2['points'][0][0] == 110.0 + assert h1cnt2['points'][0][1] == 0.3 + assert h1752['metric'] == 'histogram.1.75percentile' + assert h1752['points'][0][0] == 110.0 + assert h1752['points'][0][1] == 40.0 + assert h1992['metric'] == 'histogram.1.99percentile' + assert h1992['points'][0][0] == 110.0 + assert h1992['points'][0][1] == 50.0 + + assert h2avg1['metric'] == 'histogram.2.avg' + assert h2avg1['points'][0][0] == 100.0 + assert h2avg1['points'][0][1] == 40 + assert h2cnt1['metric'] == 'histogram.2.count' + assert h2cnt1['points'][0][0] == 100.0 + assert h2cnt1['points'][0][1] == 0.1 # Flush again ensure they're gone. dog.reporter.metrics = [] dog.flush(140.0) - nt.assert_equal(len(dog.reporter.metrics), 8) + assert len(dog.reporter.metrics) == 8 dog.reporter.metrics = [] dog.flush(200.0) - nt.assert_equal(len(dog.reporter.metrics), 0) + assert len(dog.reporter.metrics) == 0 def test_histogram_percentiles(self): dog = ThreadStats() @@ -203,10 +309,10 @@ def test_histogram_percentiles(self): def assert_almost_equal(i, j, e=1): # Floating point math? assert abs(i - j) <= e, "%s %s %s" % (i, j, e) - nt.assert_equal(len(metrics), 8) + assert len(metrics) == 8 p75, p85, p95, p99, _, _, _, _ = self.sort_metrics(metrics) - nt.assert_equal(p75['metric'], 'percentiles.75percentile') - nt.assert_equal(p75['points'][0][0], 1000.0) + assert p75['metric'] == 'percentiles.75percentile' + assert p75['points'][0][0] == 1000.0 assert_almost_equal(p75['points'][0][1], 75, 8) assert_almost_equal(p85['points'][0][1], 85, 8) assert_almost_equal(p95['points'][0][1], 95, 8) @@ -226,23 +332,61 @@ def test_gauge(self): # Assert they've been properly flushed. metrics = self.sort_metrics(reporter.metrics) - nt.assert_equal(len(metrics), 2) + assert len(metrics) == 2 (first, second) = metrics - nt.assert_equal(first['metric'], 'test.gauge.1') - nt.assert_equal(first['points'][0][0], 100.0) - nt.assert_equal(first['points'][0][1], 22) - nt.assert_equal(second['metric'], 'test.gauge.2') + assert first['metric'] == 'test.gauge.1' + assert first['points'][0][0] == 100.0 + assert first['points'][0][1] == 22 + assert second['metric'] == 'test.gauge.2' # Flush again and make sure we're progressing. reporter.metrics = [] dog.flush(130.0) - nt.assert_equal(len(reporter.metrics), 1) + assert len(reporter.metrics) == 1 # Finally, make sure we've flushed all metrics. reporter.metrics = [] dog.flush(150.0) - nt.assert_equal(len(reporter.metrics), 0) + assert len(reporter.metrics) == 0 + + def test_set(self): + # Create some fake metrics. + dog = ThreadStats() + dog.start(roll_up_interval=10, flush_in_thread=False) + reporter = dog.reporter = MemoryReporter() + + dog.set('test.set.1', "a string", 100.0) + dog.set('test.set.1', frozenset(), 105.0) + dog.set('test.set.2', 30, 115.0) + dog.set('test.set.3', 30, 125.0) + dog.flush(120.0) + + # Assert they've been properly flushed. + metrics = self.sort_metrics(reporter.metrics) + assert len(metrics) == 2 + + (first, second) = metrics + assert first['metric'] == 'test.set.1' + assert first['points'][0][0] == 100.0 + assert first['points'][0][1] == 2 + assert second['metric'] == 'test.set.2' + assert second['points'][0][0] == 110.0 + assert second['points'][0][1] == 1 + + # Flush again and make sure we're progressing. + reporter.metrics = [] + dog.flush(130.0) + metrics = self.sort_metrics(reporter.metrics) + assert len(metrics) == 1 + assert metrics[0]['metric'] == 'test.set.3' + assert metrics[0]['points'][0][0] == 120.0 + assert metrics[0]['points'][0][1] == 1 + + # Finally, make sure we've flushed all metrics. + reporter.metrics = [] + dog.flush(150.0) + assert len(reporter.metrics) == 0 def test_counter(self): # Create some fake metrics. @@ -258,12 +402,12 @@ def test_counter(self): # Assert they've been properly flushed. metrics = self.sort_metrics(reporter.metrics) - nt.assert_equal(len(metrics), 2) + assert len(metrics) == 2 (first, second) = metrics - nt.assert_equal(first['metric'], 'test.counter.1') - nt.assert_equal(first['points'][0][0], 1000.0) - nt.assert_equal(first['points'][0][1], 3) - nt.assert_equal(second['metric'], 'test.counter.2') + assert first['metric'] == 'test.counter.1' + assert first['points'][0][0] == 1000.0 + assert first['points'][0][1] == 0.3 + assert second['metric'] == 'test.counter.2' # Test decrement dog.increment('test.counter.1', value=10, timestamp=1000.0) @@ -272,22 +416,54 @@ def test_counter(self): dog.flush(1021.0) metrics = self.sort_metrics(reporter.metrics) - nt.assert_equal(len(metrics), 1) + assert len(metrics) == 1 first, = metrics - nt.assert_equal(first['metric'], 'test.counter.1') - nt.assert_equal(first['points'][0][0], 1000.0) - nt.assert_equal(first['points'][0][1], 8) - nt.assert_equal(second['metric'], 'test.counter.2') + assert first['metric'] == 'test.counter.1' + assert first['points'][0][0] == 1000.0 + assert first['points'][0][1] == 0.8 + assert second['metric'] == 'test.counter.2' # Flush again and make sure we're progressing. reporter.metrics = [] dog.flush(1030.0) - nt.assert_equal(len(reporter.metrics), 1) + assert len(reporter.metrics) == 1 # Finally, make sure we've flushed all metrics. reporter.metrics = [] dog.flush(1050.0) - nt.assert_equal(len(reporter.metrics), 0) + assert len(reporter.metrics) == 0 + + def test_distribution(self): + # Create some fake metrics. + dog = ThreadStats() + dog.start(roll_up_interval=10, flush_in_thread=False) + reporter = dog.reporter = MemoryReporter() + + dog.distribution('test.dist.1', 20, 100.0) + dog.distribution('test.dist.1', 22, 105.0) + dog.distribution('test.dist.2', 30, 115.0) + dog.distribution('test.dist.3', 30, 125.0) + dog.flush(120.0) + + # Assert they've been properly flushed. + dists = self.sort_metrics(reporter.distributions) + assert len(dists) == 2 + + (first, second) = dists + assert first['metric'] == 'test.dist.1' + assert first['points'][0][0] == 100.0 + assert first['points'][0][1] == [20, 22] + assert second['metric'] == 'test.dist.2' + + # Flush again and make sure we're progressing. + reporter.distributions = [] + dog.flush(130.0) + assert len(reporter.distributions) == 1 + + # Finally, make sure we've flushed all metrics. + reporter.distributions = [] + dog.flush(150.0) + assert len(reporter.distributions) == 0 def test_default_host_and_device(self): dog = ThreadStats() @@ -306,8 +482,8 @@ def test_custom_host_and_device(self): dog.gauge('my.gauge', 1, 100.0, host='host') dog.flush(1000) metric = reporter.metrics[0] - nt.assert_equal(metric['device'], 'dev') - nt.assert_equal(metric['host'], 'host') + assert metric['device'] == 'dev' + assert metric['host'] == 'host' def test_tags(self): dog = ThreadStats() @@ -326,35 +502,101 @@ def test_tags(self): dog.flush(200.0) metrics = self.sort_metrics(reporter.metrics) - nt.assert_equal(len(metrics), 6) + assert len(metrics) == 6 [c1, c2, c3, g1, g2, g3] = metrics - (nt.assert_equal(c['metric'], 'counter') for c in [c1, c2, c3]) - nt.assert_equal(c1['tags'], None) - nt.assert_equal(c1['points'][0][1], 1) - nt.assert_equal(c2['tags'], ['env:production', 'db']) - nt.assert_equal(c2['points'][0][1], 1) - nt.assert_equal(c3['tags'], ['env:staging']) - nt.assert_equal(c3['points'][0][1], 1) - - (nt.assert_equal(c['metric'], 'gauge') for c in [g1, g2, g3]) - nt.assert_equal(g1['tags'], None) - nt.assert_equal(g1['points'][0][1], 10) - nt.assert_equal(g2['tags'], ['env:production', 'db']) - nt.assert_equal(g2['points'][0][1], 15) - nt.assert_equal(g3['tags'], ['env:staging']) - nt.assert_equal(g3['points'][0][1], 20) + assert c1['metric'] == 'counter' + assert c2['metric'] == 'counter' + assert c3['metric'] == 'counter' + assert c1['tags'] is None + assert c1['points'][0][1] == 0.1 + assert c2['tags'] == ['env:production', 'db'] + assert c2['points'][0][1] == 0.1 + assert c3['tags'] == ['env:staging'] + assert c3['points'][0][1] == 0.1 + + assert g1['metric'] == 'gauge' + assert g2['metric'] == 'gauge' + assert g3['metric'] == 'gauge' + assert g1['tags'] is None + assert g1['points'][0][1] == 10 + assert g2['tags'] == ['env:production', 'db'] + assert g2['points'][0][1] == 15 + assert g3['tags'] == ['env:staging'] + assert g3['points'][0][1] == 20 + + def test_constant_tags(self): + """ + Constant tags are attached to all metrics. + """ + dog = ThreadStats(constant_tags=["type:constant"]) + dog.start(roll_up_interval=1, flush_in_thread=False) + dog.reporter = self.reporter + + # Post the same metric with different tags. + dog.gauge("gauge", 10, timestamp=100.0) + dog.gauge("gauge", 15, timestamp=100.0, tags=["env:production", 'db']) + dog.gauge("gauge", 20, timestamp=100.0, tags=["env:staging"]) + + dog.increment("counter", timestamp=100.0) + dog.increment("counter", timestamp=100.0, tags=["env:production", 'db']) + dog.increment("counter", timestamp=100.0, tags=["env:staging"]) + + dog.flush(200.0) + + # Assertions on all metrics + self.assertMetric(count=6) + + # Assertions on gauges + self.assertMetric(name='gauge', value=10, tags=["type:constant"], count=1) + self.assertMetric(name="gauge", value=15, + tags=["env:production", "db", "type:constant"], count=1) # noqa + self.assertMetric(name="gauge", value=20, tags=["env:staging", "type:constant"], count=1) + + # Assertions on counters + self.assertMetric(name="counter", value=1, tags=["type:constant"], count=1) + self.assertMetric(name="counter", value=1, + tags=["env:production", "db", "type:constant"], count=1) # noqa + self.assertMetric(name="counter", value=1, tags=["env:staging", "type:constant"], count=1) # Ensure histograms work as well. @dog.timed('timed', tags=['version:1']) - def test(): + def do_nothing(): + """ + A function that does nothing, but being timed. + """ pass - test() + + with patch("datadog.threadstats.base.time", return_value=300): + do_nothing() + dog.histogram('timed', 20, timestamp=300.0, tags=['db', 'version:2']) - reporter.metrics = [] - dog.flush(400) - for metric in reporter.metrics: - assert metric['tags'] # this is enough + + self.reporter.metrics = [] + dog.flush(400.0) + + # Histograms, and related metric types, produce 8 different metrics + self.assertMetric(tags=["version:1", "type:constant"], count=8) + self.assertMetric(tags=["db", "version:2", "type:constant"], count=8) + + def test_metric_namespace(self): + """ + Namespace prefixes all metric names. + """ + # Set up ThreadStats with a namespace + dog = ThreadStats(namespace="foo") + dog.start(roll_up_interval=1, flush_in_thread=False) + dog.reporter = self.reporter + + # Send a few metrics + dog.gauge("gauge", 20, timestamp=100.0) + dog.increment("counter", timestamp=100.0) + dog.flush(200.0) + + # Metric names are prefixed with the namespace + self.assertMetric(count=2) + self.assertMetric(name="foo.gauge", count=1) + self.assertMetric(name="foo.counter", count=1) def test_host(self): dog = ThreadStats() @@ -376,27 +618,31 @@ def test_host(self): dog.flush(200.0) metrics = self.sort_metrics(reporter.metrics) - nt.assert_equal(len(metrics), 6) + assert len(metrics) == 6 [c1, c2, c3, g1, g2, g3] = metrics - (nt.assert_equal(c['metric'], 'counter') for c in [c1, c2, c3]) - nt.assert_equal(c1['host'], None) - nt.assert_equal(c1['tags'], None) - nt.assert_equal(c1['points'][0][1], 2) - nt.assert_equal(c2['host'], 'test') - nt.assert_equal(c2['tags'], None) - nt.assert_equal(c2['points'][0][1], 1) - nt.assert_equal(c3['host'], 'test') - nt.assert_equal(c3['tags'], ['tag']) - nt.assert_equal(c3['points'][0][1], 2) - - (nt.assert_equal(g['metric'], 'gauge') for g in [g1, g2, g3]) - nt.assert_equal(g1['host'], None) - nt.assert_equal(g1['points'][0][1], 10) - nt.assert_equal(g2['host'], '') - nt.assert_equal(g2['points'][0][1], 12) - nt.assert_equal(g3['host'], 'test') - nt.assert_equal(g3['points'][0][1], 15) + assert c1['metric'] == 'counter' + assert c2['metric'] == 'counter' + assert c3['metric'] == 'counter' + assert c1['host'] is None + assert c1['tags'] is None + assert c1['points'][0][1] == 0.2 + assert c2['host'] == 'test' + assert c2['tags'] is None + assert c2['points'][0][1] == 0.1 + assert c3['host'] == 'test' + assert c3['tags'] == ['tag'] + assert c3['points'][0][1] == 0.2 + + assert g1['metric'] == 'gauge' + assert g2['metric'] == 'gauge' + assert g3['metric'] == 'gauge' + assert g1['host'] is None + assert g1['points'][0][1] == 10 + assert g2['host'] == '' + assert g2['points'][0][1] == 12 + assert g3['host'] == 'test' + assert g3['points'][0][1] == 15 # Ensure histograms work as well. @dog.timed('timed', host='test') @@ -411,8 +657,8 @@ def test(): def test_disabled_mode(self): dog = ThreadStats() - reporter = dog.reporter = MemoryReporter() dog.start(disabled=True, flush_interval=1, roll_up_interval=1) + reporter = dog.reporter = MemoryReporter() dog.gauge('testing', 1, timestamp=1000) dog.gauge('testing', 2, timestamp=1000) dog.flush(2000.0) @@ -421,6 +667,7 @@ def test_disabled_mode(self): def test_stop(self): dog = ThreadStats() dog.start(flush_interval=1, roll_up_interval=1) + dog.reporter = MemoryReporter() for i in range(10): dog.gauge('metric', i) time.sleep(2) @@ -434,3 +681,183 @@ def test_stop(self): dog.gauge('metric', i) time.sleep(2) assert dog.flush_count in [flush_count, flush_count + 1] + + def test_tags_from_environment(self): + test_tags = ['country:china', 'age:45', 'blue'] + with preserve_environment_variable('DATADOG_TAGS'): + os.environ['DATADOG_TAGS'] = ','.join(test_tags) + dog = ThreadStats() + dog.start(roll_up_interval=10, flush_in_thread=False) + reporter = dog.reporter = MemoryReporter() + + # Add two events + event1_title = "Event 1 title" + event2_title = "Event 1 title" + event1_text = "Event 1 text" + event2_text = "Event 2 text" + dog.event(event1_title, event1_text) + dog.event(event2_title, event2_text) + + # Flush and test + dog.flush() + event1, event2 = reporter.events + assert event1['title'] == event1_title + assert event1['text'] == event1_text + assert event1['tags'] == test_tags + assert event2['title'] == event2_title + assert event2['text'] == event2_text + assert event2['text'] == event2_text + assert event2['tags'] == test_tags + + # Test more parameters + reporter.events = [] + event1_priority = "low" + event1_date_happened = 1375296969 + event1_tag = "Event 2 tag" + dog.event(event1_title, event1_text, priority=event1_priority, + date_happened=event1_date_happened, tags=[event1_tag]) + + # Flush and test + dog.flush() + event, = reporter.events + assert event['title'] == event1_title + assert event['text'] == event1_text + assert event['priority'] == event1_priority + assert event['date_happened'] == event1_date_happened + assert event['tags'] == [event1_tag] + test_tags + dog.start(flush_interval=1, roll_up_interval=1) + + def test_tags_from_environment_and_constant(self): + test_tags = ['country:china', 'age:45', 'blue'] + constant_tags = ['country:canada', 'red'] + with preserve_environment_variable('DATADOG_TAGS'): + os.environ['DATADOG_TAGS'] = ','.join(test_tags) + dog = ThreadStats(constant_tags=constant_tags) + dog.start(roll_up_interval=10, flush_in_thread=False) + reporter = dog.reporter = MemoryReporter() + + # Add two events + event1_title = "Event 1 title" + event2_title = "Event 1 title" + event1_text = "Event 1 text" + event2_text = "Event 2 text" + dog.event(event1_title, event1_text) + dog.event(event2_title, event2_text) + + # Flush and test + dog.flush() + event1, event2 = reporter.events + assert event1['title'] == event1_title + assert event1['text'] == event1_text + assert event1['tags'] == constant_tags + test_tags + assert event2['title'] == event2_title + assert event2['text'] == event2_text + assert event2['text'] == event2_text + assert event2['tags'] == constant_tags + test_tags + + # Test more parameters + reporter.events = [] + event1_priority = "low" + event1_date_happened = 1375296969 + event1_tag = "Event 2 tag" + dog.event(event1_title, event1_text, priority=event1_priority, + date_happened=event1_date_happened, tags=[event1_tag]) + + # Flush and test + dog.flush() + event, = reporter.events + assert event['title'] == event1_title + assert event['text'] == event1_text + assert event['priority'] == event1_priority + assert event['date_happened'] == event1_date_happened + assert event['tags'] == [event1_tag] + constant_tags + test_tags + dog.start(flush_interval=1, roll_up_interval=1) + + def test_tags_from_environment_env_service_version(self): + test_tags = set(['env:staging', 'service:food', 'version:1.2.3']) + with EnvVars( + env_vars={ + "DD_ENV": "staging", + "DD_VERSION": "1.2.3", + "DD_SERVICE": "food", + } + ): + dog = ThreadStats() + dog.start(roll_up_interval=10, flush_in_thread=False) + reporter = dog.reporter = MemoryReporter() + + # Add two events + event1_title = "Event 1 title" + event1_text = "Event 1 text" + dog.event(event1_title, event1_text) + + # Flush and test + dog.flush() + [event1] = reporter.events + assert event1['title'] == event1_title + assert event1['text'] == event1_text + assert set(event1['tags']) == test_tags + + def test_metric_type(self): + """ + Checks the submitted metric's metric type. + """ + # Set up ThreadStats with a namespace + dog = ThreadStats(namespace="foo") + dog.start(roll_up_interval=1, flush_in_thread=False) + reporter = dog.reporter = self.reporter + + # Send a few metrics + dog.gauge("gauge", 20, timestamp=100.0) + dog.increment("counter", timestamp=100.0) + dog.histogram('histogram.1', 20, 100.0) + dog.flush(200.0) + + (first, second, p75, p85, p95, p99, avg, cnt, max_, min_) = self.sort_metrics(reporter.metrics) + + # Assert Metric type + assert first['type'] == 'rate' + assert second['type'] == 'gauge' + assert p75['type'] == 'gauge' + assert p85['type'] == 'gauge' + assert p95['type'] == 'gauge' + assert p99['type'] == 'gauge' + assert avg['type'] == 'gauge' + assert cnt['type'] == 'rate' + assert max_['type'] == 'gauge' + assert min_['type'] == 'gauge' + + # Test lambda_wrapper (uses ThreadStats under the hood) + def test_basic_lambda_decorator(self): + + @datadog_lambda_wrapper + def basic_wrapped_function(): + lambda_metric("lambda.somemetric", 100) + + _get_lambda_stats().reporter = self.reporter + basic_wrapped_function() + + assert _get_lambda_stats().reporter.dist_flush_counter == 1 + dists = self.sort_metrics(_get_lambda_stats().reporter.distributions) + assert len(dists) == 1 + + def test_embedded_lambda_decorator(self): + """ + Test that the lambda decorator flushes metrics correctly and only once + """ + + @datadog_lambda_wrapper + def wrapped_function_1(): + lambda_metric("lambda.dist.1", 10) + + @datadog_lambda_wrapper + def wrapped_function_2(): + wrapped_function_1() + lambda_metric("lambda.dist.2", 30) + + _get_lambda_stats().reporter = self.reporter + wrapped_function_2() + assert _get_lambda_stats().reporter.dist_flush_counter == 1 + + dists = self.sort_metrics(_get_lambda_stats().reporter.distributions) + assert len(dists) == 2 diff --git a/tests/unit/util/__init__.py b/tests/unit/util/__init__.py new file mode 100644 index 000000000..b3017a1db --- /dev/null +++ b/tests/unit/util/__init__.py @@ -0,0 +1,3 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc diff --git a/tests/unit/util/test_cli.py b/tests/unit/util/test_cli.py new file mode 100644 index 000000000..84cc359e8 --- /dev/null +++ b/tests/unit/util/test_cli.py @@ -0,0 +1,214 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from argparse import ArgumentTypeError +from freezegun import freeze_time +import datetime +import unittest + +from datadog.util.cli import ( + comma_list, + comma_set, + comma_list_or_empty, + list_of_ints, + list_of_ints_and_strs, + set_of_ints, + DateParsingError, + _midnight, + parse_date_as_epoch_timestamp, + parse_date, +) +from datadog.util.compat import is_pypy +from datadog.util.format import force_to_epoch_seconds + + +class TestCLI(unittest.TestCase): + def test_comma_list(self): + invalid_cases = [None, ""] + for invalid_case in invalid_cases: + with self.assertRaises(ArgumentTypeError): + comma_list(invalid_case) + + valid_cases = ( + (["foo"], "foo", None), + (["foo", "bar"], "foo,bar", None), + ([1], "1", int), + ([1, 2], "1,2", int), + ) + for expected, list_str, item_func in valid_cases: + actual = comma_list(list_str, item_func) + self.assertListEqual(expected, actual) + + def test_comma_set(self): + invalid_cases = [None, ""] + for invalid_case in invalid_cases: + with self.assertRaises(ArgumentTypeError): + comma_set(invalid_case) + + valid_cases = ( + ({"foo"}, "foo", None), + ({"foo", "bar"}, "foo,bar", None), + ({1}, "1", int), + ({1}, "1,1,1", int), + ({1, 2}, "1,2,1", int), + ) + for expected, list_str, item_func in valid_cases: + actual = comma_set(list_str, item_func) + self.assertSetEqual(expected, actual) + + def test_comma_list_or_empty(self): + valid_cases = ( + ([], None, None), + ([], "", None), + (["foo"], "foo", None), + (["foo", "bar"], "foo,bar", None), + ) + for expected, list_str, item_func in valid_cases: + actual = comma_list_or_empty(list_str) + self.assertListEqual(expected, actual) + + def test_list_of_ints(self): + invalid_cases = [None, "", "foo", '["foo"]'] + for invalid_case in invalid_cases: + with self.assertRaises(ArgumentTypeError): + list_of_ints(invalid_case) + + valid_cases = (([1], "1"), ([1, 2], "1,2"), ([1], "[1]"), ([1, 2], "[1,2]")) + for expected, list_str in valid_cases: + actual = list_of_ints(list_str) + self.assertListEqual(expected, actual) + + def test_list_of_ints_and_strs(self): + invalid_cases = [None, ""] + for invalid_case in invalid_cases: + with self.assertRaises(ArgumentTypeError): + list_of_ints_and_strs(invalid_case) + + valid_cases = ( + (["foo"], "foo"), + (["foo", "bar"], "foo,bar"), + ([1], "1"), + ([1, 2], "1,2"), + (["foo", 2], "foo,2"), + ) + for expected, list_str in valid_cases: + actual = list_of_ints_and_strs(list_str) + self.assertListEqual(expected, actual) + + def test_set_of_ints(self): + invalid_cases = [None, "", "foo", '["foo"]'] + for invalid_case in invalid_cases: + with self.assertRaises(ArgumentTypeError): + set_of_ints(invalid_case) + + valid_cases = ( + ({1}, "1"), + ({1, 2}, "1,2"), + ({1}, "[1]"), + ({1}, "[1,1,1]"), + ({1, 2}, "[1,2,1]"), + ) + for expected, list_str in valid_cases: + actual = set_of_ints(list_str) + self.assertSetEqual(expected, actual) + + @freeze_time("2019-10-23 04:44:32", tz_offset=0) + def test_midnight(self): + d = _midnight() + self.assertEqual(2019, d.year) + self.assertEqual(10, d.month) + self.assertEqual(23, d.day) + self.assertEqual(0, d.hour) + self.assertEqual(0, d.minute) + self.assertEqual(0, d.second) + self.assertEqual(0, d.microsecond) + + @freeze_time("2019-10-23 04:44:32", tz_offset=0) + def test_parse_date(self): + test_date = datetime.datetime(2019, 10, 23, 4, 44, 32, 0) + cases = [ + (test_date, test_date), # already an instance, return + ("today", datetime.datetime(2019, 10, 23, 0, 0, 0)), + ("yesterday", datetime.datetime(2019, 10, 22, 0, 0, 0)), + ("tomorrow", datetime.datetime(2019, 10, 24, 0, 0, 0)), + ("2 days ago", datetime.datetime(2019, 10, 21, 4, 44, 32)), + ("2d ago", datetime.datetime(2019, 10, 21, 4, 44, 32)), + ("2 days ahead", datetime.datetime(2019, 10, 25, 4, 44, 32)), + ("2d ahead", datetime.datetime(2019, 10, 25, 4, 44, 32)), + ("now", datetime.datetime(2019, 10, 23, 4, 44, 32)), + ("2019-10-23 04:44:32.000000", test_date), + ("2019-10-23T04:44:32.000000", test_date), + ("2019-10-23 04:44:32", test_date), + ("2019-10-23T04:44:32", test_date), + ("2019-10-23 04:44", datetime.datetime(2019, 10, 23, 4, 44, 0, 0)), + ("2019-10-23-04", datetime.datetime(2019, 10, 23, 4, 0, 0, 0)), + ("2019-10-23", datetime.datetime(2019, 10, 23, 0, 0, 0, 0)), + ("2019-10", datetime.datetime(2019, 10, 1, 0, 0, 0, 0)), + ("2019", datetime.datetime(2019, 1, 1, 0, 0, 0, 0)), + ("2019-10", datetime.datetime(2019, 10, 1, 0, 0, 0, 0)), + ("1571805872", test_date), # seconds + ] + if not is_pypy(): + cases.append( + ("1571805872000", test_date) + ) # millis, pypy does not work (known) + + for i, (date_str, expected) in enumerate(cases): + actual = parse_date(date_str) + self.assertEqual( + expected, + actual, + "case {}: failed, date_str={} expected={} actual={}".format( + i, date_str, expected, actual + ), + ) + + # test invalid case + with self.assertRaises(DateParsingError): + parse_date("foo") + + @freeze_time("2019-10-23 04:44:32", tz_offset=0) + def test_parse_date_as_epoch_timestamp(self): + # this applies the same rules but always returns epoch seconds + test_date = datetime.datetime(2019, 10, 23, 4, 44, 32, 0) + cases = [ + (test_date, test_date), # already an instance, return + ("today", datetime.datetime(2019, 10, 23, 0, 0, 0)), + ("yesterday", datetime.datetime(2019, 10, 22, 0, 0, 0)), + ("tomorrow", datetime.datetime(2019, 10, 24, 0, 0, 0)), + ("2 days ago", datetime.datetime(2019, 10, 21, 4, 44, 32)), + ("2d ago", datetime.datetime(2019, 10, 21, 4, 44, 32)), + ("2 days ahead", datetime.datetime(2019, 10, 25, 4, 44, 32)), + ("2d ahead", datetime.datetime(2019, 10, 25, 4, 44, 32)), + ("now", datetime.datetime(2019, 10, 23, 4, 44, 32)), + ("2019-10-23 04:44:32.000000", test_date), + ("2019-10-23T04:44:32.000000", test_date), + ("2019-10-23 04:44:32", test_date), + ("2019-10-23T04:44:32", test_date), + ("2019-10-23 04:44", datetime.datetime(2019, 10, 23, 4, 44, 0, 0)), + ("2019-10-23-04", datetime.datetime(2019, 10, 23, 4, 0, 0, 0)), + ("2019-10-23", datetime.datetime(2019, 10, 23, 0, 0, 0, 0)), + ("2019-10", datetime.datetime(2019, 10, 1, 0, 0, 0, 0)), + ("2019", datetime.datetime(2019, 1, 1, 0, 0, 0, 0)), + ("2019-10", datetime.datetime(2019, 10, 1, 0, 0, 0, 0)), + ("1571805872", test_date), # seconds + ] + if not is_pypy(): + cases.append( + ("1571805872000", test_date) + ) # millis, pypy does not work (known) + + for i, (date_str, expected) in enumerate(cases): + actual_timestamp = parse_date_as_epoch_timestamp(date_str) + expected_timestamp = force_to_epoch_seconds(expected) + self.assertEqual( + expected_timestamp, + actual_timestamp, + "case {}: failed, date_str={} expected={} actual={}".format( + i, date_str, expected_timestamp, actual_timestamp + ), + ) + + # test invalid case + with self.assertRaises(DateParsingError): + parse_date_as_epoch_timestamp("foo") diff --git a/tests/unit/util/test_compat.py b/tests/unit/util/test_compat.py new file mode 100644 index 000000000..9766c32c4 --- /dev/null +++ b/tests/unit/util/test_compat.py @@ -0,0 +1,85 @@ +# coding: utf8 +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +import logging +import pytest +import sys +import unittest + +from mock import patch + +from datadog.util.compat import conditional_lru_cache, is_higher_py32, is_p3k + +class TestConditionalLRUCache(unittest.TestCase): + def test_normal_usage(self): + @conditional_lru_cache + def test_function(some_string, num1, num2, num3): + return (some_string, num1 + num2 + num3) + + for idx in range(600): + self.assertEqual( + test_function("abc", idx, idx*2, idx *3), + ("abc", idx + idx * 2 + idx *3), + ) + + def test_var_args(self): + @conditional_lru_cache + def test_function(*args): + return sum(list(args)) + + args = [] + for idx in range(100): + args.append(idx) + self.assertEqual( + test_function(*args), + sum(args), + ) + + # pylint: disable=no-self-use + def test_debug_log(self): + test_object_logger = logging.getLogger('datadog.util') + with patch.object(test_object_logger, 'debug') as mock_debug: + @conditional_lru_cache + def test_function(): + pass + + test_function() + + if is_higher_py32(): + mock_debug.assert_called_once() + else: + mock_debug.assert_not_called() + +@pytest.mark.skipif(not is_p3k(), reason='Python 3 only') +def test_slow_imports(monkeypatch): + # We should lazy load certain modules to avoid slowing down the startup + # time when running in a serverless environment. This test will fail if + # any of those modules are imported during the import of datadogpy. + + blocklist = [ + 'configparser', + 'email.mime.application', + 'email.mime.multipart', + 'importlib.metadata', + 'importlib_metadata', + 'logging.handlers', + 'multiprocessing', + 'urllib.request', + ] + + class BlockListFinder: + def find_spec(self, fullname, *args): + for lib in blocklist: + if fullname == lib: + raise ImportError('module %s was imported!' % fullname) + return None + find_module = find_spec # Python 2 + + monkeypatch.setattr('sys.meta_path', [BlockListFinder()] + sys.meta_path) + + for mod in sys.modules.copy(): + if mod in blocklist or mod.startswith('datadog'): + del sys.modules[mod] + + import datadog diff --git a/tests/unit/util/test_format.py b/tests/unit/util/test_format.py new file mode 100644 index 000000000..dc4d6b62b --- /dev/null +++ b/tests/unit/util/test_format.py @@ -0,0 +1,54 @@ +# coding: utf8 +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +import unittest + +import pytest + +from datadog.util.format import construct_url, normalize_tags + + +class TestConstructURL: + expected = "https://api.datadoghq.com/api/v1/graph/snapshot" + test_data = [ + ("https://api.datadoghq.com", "v1", "graph/snapshot", expected), + ("https://api.datadoghq.com/", "v1", "graph/snapshot", expected), + ("https://api.datadoghq.com", "/v1", "graph/snapshot", expected), + ("https://api.datadoghq.com/", "/v1", "graph/snapshot", expected), + ("https://api.datadoghq.com", "v1/", "graph/snapshot", expected), + ("https://api.datadoghq.com/", "v1/", "graph/snapshot", expected), + ("https://api.datadoghq.com", "/v1/", "graph/snapshot", expected), + ("https://api.datadoghq.com/", "/v1/", "graph/snapshot", expected), + ("https://api.datadoghq.com", "v1", "/graph/snapshot", expected), + ("https://api.datadoghq.com/", "v1", "/graph/snapshot", expected), + ("https://api.datadoghq.com", "/v1", "/graph/snapshot", expected), + ("https://api.datadoghq.com/", "/v1", "/graph/snapshot", expected), + ("https://api.datadoghq.com", "v1/", "/graph/snapshot", expected), + ("https://api.datadoghq.com/", "v1/", "/graph/snapshot", expected), + ("https://api.datadoghq.com", "/v1/", "/graph/snapshot", expected), + ("https://api.datadoghq.com/", "/v1/", "/graph/snapshot", expected), + ] + + @pytest.mark.parametrize("host,api_version,path,expected", test_data) + def test_construct_url(self, host, api_version, path, expected): + assert construct_url(host, api_version, path) == expected + +class TestNormalizeTags: + """ + Test of the format's `normalize_tags` functionality + """ + test_data = [ + ([], []), + ([''],['']), + (['this is a tag'], ['this_is_a_tag']), + (['abc!@#$%^&*()0987654321{}}{'], ['abc__________0987654321____']), + (['abc!@#', '^%$#3456#'], ['abc___', '____3456_']), + (['mutliple', 'tags', 'included'], ['mutliple', 'tags', 'included']), + ([u'абвгдежзийкл', u'абв' , 'test123'], [u'абвгдежзийкл', u'абв' , 'test123']), + ([u'абвгд西😃ежзийкл', u'аб😃西в' , u'a😃😃b'], [u'абвгд西_ежзийкл', u'аб_西в', u'a__b']), + ] + + @pytest.mark.parametrize("original_tags,expected_tags", test_data) + def test_normalize_tags(self, original_tags, expected_tags): + assert normalize_tags(original_tags) == expected_tags diff --git a/tests/util/__init__.py b/tests/util/__init__.py index e69de29bb..b3017a1db 100644 --- a/tests/util/__init__.py +++ b/tests/util/__init__.py @@ -0,0 +1,3 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc diff --git a/tests/util/contextmanagers.py b/tests/util/contextmanagers.py new file mode 100644 index 000000000..7e788ce6b --- /dev/null +++ b/tests/util/contextmanagers.py @@ -0,0 +1,39 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +import os +from contextlib import contextmanager + + +@contextmanager +def preserve_environment_variable(env_name): + environ_api_param = os.environ.get(env_name) + try: + yield + finally: + if environ_api_param is not None: + os.environ[env_name] = environ_api_param + else: + del os.environ[env_name] + + +# Code copied from - https://github.com/DataDog/integrations-core/blob/de1b684e4e98d06a7b0da3249805de74bb877cea/datadog_checks_dev/datadog_checks/dev/structures.py#L24 +class EnvVars(dict): + def __init__(self, env_vars=None, ignore=None): + super(EnvVars, self).__init__(os.environ) + self.old_env = dict(self) + + if env_vars is not None: + self.update(env_vars) + + if ignore is not None: + for env_var in ignore: + self.pop(env_var, None) + + def __enter__(self): + os.environ.clear() + os.environ.update(self) + + def __exit__(self, exc_type, exc_value, traceback): + os.environ.clear() + os.environ.update(self.old_env) diff --git a/tests/util/fake_statsd_server.py b/tests/util/fake_statsd_server.py new file mode 100755 index 000000000..37e90cb94 --- /dev/null +++ b/tests/util/fake_statsd_server.py @@ -0,0 +1,215 @@ +#!/usr/bin/env python +# Unless explicitly stated otherwise all files in this repository are licensed +# under the BSD-3-Clause License. This product includes software developed at +# Datadog (https://www.datadoghq.com/). + +# Copyright 2021-Present Datadog, Inc + +import ctypes +import os +import shutil +import socket +import sys +import tempfile +import threading +import time +from multiprocessing import Array, Event, Process, Value + +# pylint: disable=too-many-instance-attributes,useless-object-inheritance +class FakeServer(object): + """ + Fake statsd server that can be used for testing/benchmarking. Implementation + Uses a separate process to run and manage the context to not poison the + benchmarking results. + """ + + SOCKET_NAME = "fake_statsd_server_socket" + ALLOWED_TRANSPORTS = ["UDS", "UDP"] + MIN_RECV_BUFFER_SIZE = 32 * 1024 + + def __init__(self, transport="UDS", ignore_timeouts=True, debug=False): + if transport not in self.ALLOWED_TRANSPORTS: + raise ValueError( + "Transport {} is not a valid transport type. Only {} are allowed!".format( + transport, + self.ALLOWED_TRANSPORTS, + ) + ) + + self.transport = transport + self.ignore_timeouts = ignore_timeouts + self.debug = debug + + self.server_process = None + self.socket_dir = None + + # Inter-process coordination events + self.exit = Event() + self.ready = Event() + + # Shared-mem property value holders for inter-process communication + self._socket_path = Array(ctypes.c_char, 1024, lock=True) + self._port = Value(ctypes.c_long, 0, lock=True) + self._metric_counter_shmem_var = Value(ctypes.c_long, 0, lock=True) + self._payload_counter_shmem_var = Value(ctypes.c_long, 0, lock=True) + + def _run_server(self): + payload_counter = 0 + metric_counter = 0 + + if self.transport == "UDS": + self.socket_dir = tempfile.mkdtemp(prefix=self.__class__.__name__) + socket_path = os.path.join(self.socket_dir, self.SOCKET_NAME) + + if os.path.exists(socket_path): + os.unlink(socket_path) + + sock = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM) + sock.settimeout(3) + + # Increase the receiving buffer size where needed (e.g. MacOS has 4k RX + # buffers which is half of the max packet size that the client will send. + if os.name != 'nt': + recv_buff_size = sock.getsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF) + if recv_buff_size <= self.MIN_RECV_BUFFER_SIZE: + sock.setsockopt( + socket.SOL_SOCKET, + socket.SO_RCVBUF, + self.MIN_RECV_BUFFER_SIZE, + ) + + sock.bind(socket_path) + + if self.debug: + print("Listening via UDS on", socket_path) + + # We are using ctypes for shmem so we have to use a consistent + # datatype across Python versions + if sys.version_info[0] > 2: + self._socket_path.value = socket_path.encode("utf-8") + else: + self._socket_path.value = socket_path + + elif self.transport == "UDP": + sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + sock.settimeout(3) + sock.bind(("", 0)) + + _, self._port.value = sock.getsockname() + + if self.debug: + print("Listening via UDP on port", self.port) + + # Run an async thread to update our shared-mem counters. We don't want to update + # the shared memory values when we get data due to performance reasons. + def _update_counters(): + while not self.exit.is_set(): + self._payload_counter_shmem_var.value = payload_counter + self._metric_counter_shmem_var.value = metric_counter + time.sleep(0.2) + + counter_update_timer = threading.Thread(target=_update_counters) + counter_update_timer.daemon = True + counter_update_timer.start() + + try: + self.ready.set() + + while not self.exit.is_set(): + try: + payload, _ = sock.recvfrom(8192) + except socket.timeout as ste: + if self.ignore_timeouts is True: + continue + + raise ste + + payload_counter += 1 + + offset = 0 + if payload[-1] == b"\n": + offset = -1 + + metric_counter += len(payload[:offset].split(b"\n")) + + if self.debug: + print( + "Got '{}' (pkts: {}, payloads: {}, metrics: {})".format( + payload.decode('utf-8'), + len(payload[:offset].split(b"\n")), + payload_counter, + metric_counter, + ) + ) + + except socket.timeout as ste: + if not self.exit.is_set(): + self.exit.set() + raise ste + finally: + counter_update_timer.join() + sock.close() + + def __enter__(self): + if self.server_process: + raise RuntimeError("Server already running") + + self.server_process = Process( + target=self._run_server, + name=FakeServer.__class__.__name__, + args=(), + ) + self.server_process.daemon = True + self.server_process.start() + + self.ready.wait(5) + + return self + + def __exit__(self, exception_type, exception_value, exception_traceback): + # Allow grace time to capture all metrics + time.sleep(2) + + self.exit.set() + self.server_process.join(10) + + if self.socket_dir: + shutil.rmtree(self.socket_dir, ignore_errors=True) + + if self.server_process.exitcode != 0: + raise RuntimeError("Server process did not exit successfully!") + + @property + def port(self): + return self._port.value + + @property + def socket_path(self): + return self._socket_path.value or None + + @property + def payloads_captured(self): + return self._payload_counter_shmem_var.value + + @property + def metrics_captured(self): + return self._metric_counter_shmem_var.value + + def __repr__(self): + return " 1: + options['transport'] = sys.argv[1].upper() + + with FakeServer(**options) as server: + while not server.exit.is_set(): + time.sleep(0.5) diff --git a/tests/util/snapshot_test_utils.py b/tests/util/snapshot_test_utils.py deleted file mode 100644 index 33cc20d92..000000000 --- a/tests/util/snapshot_test_utils.py +++ /dev/null @@ -1,43 +0,0 @@ -import io - -from PIL import Image -import nose.tools as nt - -from datadog.util.compat import url_lib - -# For Python3 compat -try: - xrange -except NameError: - xrange = range - - -def read_image_as_raster(img_url): - """ Reads image data from URL in raster format.""" - img = url_lib.urlopen(img_url) - image_file = io.BytesIO(img.read()) - img = Image.open(image_file) - w, h = img.size - pixels = img.load() - return [pixels[x, y] for x in range(w) for y in xrange(h)] - - -def assert_snap_not_blank(snapshot_url): - """ Asserts snapshot is not blank""" - pixels = read_image_as_raster(snapshot_url) - nt.ok_(pixels is not None - and isinstance(pixels, list) - and len(set(pixels)) > 2, - msg="Invalid or blank snapshot: {0}".format(snapshot_url)) - for pixel in set(pixels): - nt.ok_(isinstance(pixel, tuple), - msg="Invalid snapshot: {0}".format(snapshot_url)) - - -def assert_snap_has_no_events(snapshot_url): - """ Asserts snapshot has no events""" - pixels = read_image_as_raster(snapshot_url) - for color in set(pixels): - r, g, b, a = color # red, green, blue, alpha - nt.ok_(r != 255 or g != 230 and b != 230, - msg="Snapshot should not have events: {0}".format(snapshot_url)) diff --git a/tests/util/system_info_observer.py b/tests/util/system_info_observer.py new file mode 100644 index 000000000..ce77ae61f --- /dev/null +++ b/tests/util/system_info_observer.py @@ -0,0 +1,106 @@ +# Unless explicitly stated otherwise all files in this repository are licensed +# under the BSD-3-Clause License. This product includes software developed at +# Datadog (https://www.datadoghq.com/). + +# Copyright 2021-Present Datadog, Inc + +from threading import Event, Thread +import os +import time + +# pylint: disable=import-error +import psutil + + +# pylint: disable=useless-object-inheritance +class SysInfoObserver(object): + """ + SysInfoObserver collects timed CPU and memory usage stats in a separate + thread about the current process. + """ + + def __init__(self, interval=0.1): + self._stats = [] + + self.interval = interval + + self.exit = None + self.initial_cpu_user = None + self.initial_cpu_system = None + self.observer_thread = None + self.proc_info = None + + def __enter__(self): + if self.observer_thread: + raise RuntimeError("Observer already running") + + self.exit = Event() + + pid = os.getpid() + self.proc_info = psutil.Process(pid) + + # Record baselines + self.initial_cpu_user = self.proc_info.cpu_times().user + self.initial_cpu_system = self.proc_info.cpu_times().system + self.initial_mem_rss = self.proc_info.memory_full_info().rss + self.initial_mem_vms = self.proc_info.memory_full_info().vms + + self.observer_thread = Thread( + name=self.__class__.__name__, + target=self.poll_system_info, + args=( + self.proc_info, + self.interval, + ), + ) + self.observer_thread.daemon = True + self.observer_thread.start() + + return self + + def __exit__(self, exception_type, exception_value, exception_traceback): + self.exit.set() + self.observer_thread.join() + + def poll_system_info(self, proc_info, interval): + while not self.exit.is_set(): + time.sleep(interval) + + mem_info = proc_info.memory_full_info() + datapoint = { + "interval": interval, + "mem.rss_diff_kb": (mem_info.rss - self.initial_mem_rss) / 1024, + "mem.vms_diff_kb": (mem_info.vms- self.initial_mem_vms) / 1024, + } + + self._stats.append(datapoint) + + @property + def stats(self): + # CPU data is cumulative + agg_stats = { + "cpu.user": self.proc_info.cpu_times().user - self.initial_cpu_user, + "cpu.system": self.proc_info.cpu_times().system - self.initial_cpu_system, + } + + if not self.exit.is_set(): + raise RuntimeError( + "You can only collect aggregated stats after context manager exits" + ) + + datapoints = len(self._stats) + for datapoint in self._stats: + for key, val in datapoint.items(): + if key.startswith("cpu"): + continue + + if key not in agg_stats: + agg_stats[key] = 0.0 + + agg_stats[key] += val + + for key, val in agg_stats.items(): + if not key.startswith("cpu"): + agg_stats[key] = val / datapoints + + return agg_stats diff --git a/tox.ini b/tox.ini index b48677bd7..15301e64b 100644 --- a/tox.ini +++ b/tox.ini @@ -1,29 +1,71 @@ [tox] +minversion = 3.7.0 +skip_missing_interpreters = true envlist = - py26, - py27, - pypy, - py33, - py34, - flake8 + py{27,37,38,py2.7,py3.8} + flake8 + integration + mypy + # black - see comments below [testenv] -commands = nosetests ./tests/unit/ +passenv = DD_TEST_CLIENT* +usedevelop = true +deps = + click + freezegun + mock + pytest + pytest-vcr + python-dateutil + vcrpy +commands = + !integration: pytest -v tests/unit {posargs} + integration: pytest -v tests/integration -m "not admin_needed" {posargs} + +[testenv:integration-admin] +passenv = DD_TEST_CLIENT* +usedevelop = true deps = - nose - six - mock -setenv = - PYTHONPATH = {toxinidir} + click + freezegun + mock + pytest + pytest-vcr + python-dateutil + vcrpy +commands = + pytest -v tests/integration -m "admin_needed" {posargs} [testenv:flake8] -commands = flake8 --max-line-length=100 ./datadog/ +skip_install = true deps = - flake8 + flake8==3.7.9 +commands = flake8 datadog + +# Black isn't safe to run while support is being maintained for python2.7, but +# can be re-enabled when support for 2.7 is dropped. +# +# [testenv:black] +# deps = +# black +# commands = black --line-length 120 {posargs} datadog + +[testenv:mypy] +# Mypy requires Python >= 3.5 and <=3.8 (but it can still type-check Python 2 +# code). +basepython = python3.8 +skip_install = true +deps = + mypy==0.770 +commands = + mypy --config-file mypy.ini datadog + mypy --config-file mypy.ini --py2 datadog + +[flake8] +max-line-length = 120 +ignore = E203,W503 -[testenv:doc] -basepython=python -changedir=doc/source -deps=sphinx -commands= - sphinx-build -c source -W -b html -d {envtmpdir}/doctrees . {envtmpdir}/html +[pytest] +markers = + admin_needed: marks tests that require the user associated with the application key to have admin rights, or destructive tests for the destination org. Do not run unless you know what you are doing. To run use `tox -e integration-admin`