Skip to content

Commit

Permalink
Merge branch 'master' into az/update-readme
Browse files Browse the repository at this point in the history
  • Loading branch information
yzhan289 authored Nov 3, 2021
2 parents c69fbc0 + 51d8964 commit 86ec332
Show file tree
Hide file tree
Showing 62 changed files with 1,264 additions and 903 deletions.
2 changes: 1 addition & 1 deletion .azure-pipelines/templates/install-deps.yml
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ steps:
- script: python -m pip install --disable-pip-version-check --upgrade pip setuptools wheel
displayName: 'Upgrade Python packaging tools'

- script: pip install --disable-pip-version-check git+git://github.com/ofek/codecov-python.git@datadog
- script: pip install --disable-pip-version-check git+https://github.com/ofek/codecov-python.git@datadog
displayName: 'Install Codecov'

- ${{ if eq(parameters.repo, 'core') }}:
Expand Down
1 change: 0 additions & 1 deletion .gitlab/validate-logs-intgs/validate_log_intgs.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,6 @@
'openshift': [ERR_UNEXPECTED_LOG_COLLECTION_CAT], # The agent collects logs from openshift environment but there is no pipeline
'pan_firewall': [ERR_NOT_DEFINED_WEB_UI], # The integration doesn't emit metric
'pivotal_pks': [ERR_UNEXPECTED_LOG_COLLECTION_CAT], # Using kubernetes pipeline
'win32_event_log': [ERR_UNEXPECTED_LOG_COLLECTION_CAT], # win32_event_log is about log collection but we don't collect win32_event_log logs
}


Expand Down
2 changes: 1 addition & 1 deletion .in-toto/tag.47c5a022.link
Original file line number Diff line number Diff line change
@@ -1 +1 @@
{"signatures":[{"keyid":"47c5a02273f1cf8104ee8d1a7a67dc43b24c1542","other_headers":"04000108001d16210447c5a02273f1cf8104ee8d1a7a67dc43b24c15420502616f3bb9","signature":"a8e9e60e3571b1ea78272bc95a470681db6d7876c07961cae5d5759dd2e86df1db4530be7afd211871bd59439c0e3723d683d14aac919996fc4e02a98aaf25ecacb97465b38521e90f0e7fa0a8800420b11ed9615e3fe2d54765854f179448b196025ba24a1c2e5b8b10764d31cc1bb3e03f9779f063f71932cccb42723a499b7e0ddb8936a5058c0a64245eb306735fd585b307fe22d4ebe443ac0739e66ad5177097195ccd670892ee9de014e44de2e2cdeaa7d7429a2c164a57dfdc38286327273a1b2fc48dd24808bcac55cd2ce0ef4b38b2823cc83ebcbaebac977c71448402446f991671cd81c8b7f4b7cec403e0f22552921d0653d7c2af9b0f85d32333db44519f59dccc9d071d3feb51e06af706afd143238f2a30545eb8b13015faeb2cc71d6666d050c8d345a4523d8870f4b40265329c7c4a3615641d768b1a6d524cac0c9417a98d054fa2e0c7c4491b63fbc159ca378f97ce6302c3ac7b351ec83638900a90fa20ca1a53e4f057577b1da56cf49e085193792da967d503fa70af6fc1d876bd4dbf485c02167e5de6266d9c778fb606ee54ab6bb905639d3f40389b8d72b9ac4f3097e7fd25c267b3ee64a34dc693dd3f49cd9784d86f6fc49d76c854705be5debbc3cfa684496b81417e89a273416a1791545009a88a681e69920dcf710f720733810f9a0be718bf458fa464f7f510572a1e746e821270480e"}],"signed":{"_type":"link","byproducts":{},"command":[],"environment":{},"materials":{},"name":"tag","products":{"datadog_checks_downloader/datadog_checks/__init__.py":{"sha256":"cd19a5ddb9a3b462fb28e9ef2e5afbec744a6a439a252fbda3638f8e1839b61d"},"datadog_checks_downloader/datadog_checks/downloader/__about__.py":{"sha256":"feabd7aeb139319055b220d1455dde7453cbc9ec5b4957a91c86a14afd6bf194"},"datadog_checks_downloader/datadog_checks/downloader/__init__.py":{"sha256":"7b06a5adef22cf70eec1ff5aeb5ea065980651095696e00aeb546825b4290a9f"},"datadog_checks_downloader/datadog_checks/downloader/__main__.py":{"sha256":"ca46cf78ef52a5a93e8607b6b897f5a66a3028ad072dd604fc4bb069346ef4ae"},"datadog_checks_downloader/datadog_checks/downloader/cli.py":{"sha256":"c4bc3048dd16ec6da09222d554494ba3ca4535dc4f9fba79596f49cd307dca01"},"datadog_checks_downloader/datadog_checks/downloader/data/repo/metadata/current/.gitignore":{"sha256":"254cc5813dd82408e73c5f92947961c0cf06864829b04b9ad642b9d844f226e0"},"datadog_checks_downloader/datadog_checks/downloader/data/repo/metadata/current/root.json":{"sha256":"08a3936ac9da6be58327aab2c575db6835b1eda0cc3ba3584d9803a7bb1d68be"},"datadog_checks_downloader/datadog_checks/downloader/data/repo/metadata/previous/.gitignore":{"sha256":"58256f4948a0b6d787dcc977a33c69bd07f0e4057f9e38e2700e4740008fe612"},"datadog_checks_downloader/datadog_checks/downloader/data/repo/targets/.gitignore":{"sha256":"58256f4948a0b6d787dcc977a33c69bd07f0e4057f9e38e2700e4740008fe612"},"datadog_checks_downloader/datadog_checks/downloader/download.py":{"sha256":"2729f048066d3c591b4d61a467af53c396b69f98cd58c2f29bfccc05d52fca19"},"datadog_checks_downloader/datadog_checks/downloader/exceptions.py":{"sha256":"f2f2db2086be603b07392e23179852866481d1b19eae7403f6a2bce2a190b071"},"datadog_checks_downloader/datadog_checks/downloader/parameters.py":{"sha256":"9436f81d9cdc4b6b8366fd95413b3bafcc098a987587a431319c6801dc003612"},"datadog_checks_downloader/requirements.in":{"sha256":"7ae121962d97b5e53b2062d1ef68702633a35c11186d4987f14f586159381dda"},"datadog_checks_downloader/setup.py":{"sha256":"455e8054048ba0c97e01d90a30b3d05f2f88fcef74e703fbb3c8d176bb34de23"}}}}
{"signatures":[{"keyid":"47c5a02273f1cf8104ee8d1a7a67dc43b24c1542","other_headers":"04000108001d16210447c5a02273f1cf8104ee8d1a7a67dc43b24c15420502618169d5","signature":"3ac39793658ab335a5a35efb1f8fa7bd697e7d87e45caa8b2b590c8e76c122ada8a1643e86afb50348642370614293c6435867d4db87a9c1999e8dd192b1b72d632a82ce4a317135ccbc91a7a3debfbf66d83067e4dc3474f8c681a002eaa8d2adc303557abe924ab48b3c98fae234612a5720113bb55452f2b96367bed4ce3c06c2f33f705e2bfc893672fe197cac9b4beca6af83c9c5ade3d64fb7da2e40f060d9a817e3020fd2465035dcf85ec7c879c713ace4cbceb2e7f5c6ab5e71d4e2f330bdccb867780800e059b0974c1b5438254167920972220c76fef465058c571852655b9b5a2549b8880a7e6ee9e52948c69894ea52863d0baff2bafc6c133d8c2934b063e6246a7d2d58938402d06e88cbe134d989bb082001698c169804120e037d18c6bcd4997d8b841ac60c45c971cd010bfb74e73987b38696f4745b11e2a31ec28e532beabbd305fbc03d9d51da5e11527699bae1930cf624d0a996db3989014ce7fa635c580e834d586aeb5cb4ea188ef03d9bbc0b2827e6f62e9367b6d2197d604f7afd2dc963844671d589ac8b0fa5f1f0f6db7b28a668ac9d9b890a7663548535809a38cf3791f0bb7cc2a4370e53fe38c52dfe10955f3ba619e3fb96be3c355b37f090a053c0e2938e2c3da8593bcddd5613ad97f2b9f82f91fd6c72ba064d31b5d2ea89b2ca87fb1d265792385a51a37cbe45cc24a20a8a065d"}],"signed":{"_type":"link","byproducts":{},"command":[],"environment":{},"materials":{},"name":"tag","products":{"proxysql/datadog_checks/__init__.py":{"sha256":"23ba67000a27faa09e41c489b6993b2a5067b40f581452b8e399b8db0beb224e"},"proxysql/datadog_checks/proxysql/__about__.py":{"sha256":"05c5b1911b31dc760b6c6142d4f0fd244052ee2edd18a70c4d1af0239aaa0b24"},"proxysql/datadog_checks/proxysql/__init__.py":{"sha256":"537558d7fc1dc8a413cb6b1dcc300f3000905b8d5c917b0a438614d115eeff8f"},"proxysql/datadog_checks/proxysql/config_models/__init__.py":{"sha256":"7a5c3cc09fab7c1e75de218a0c73f295ceb685fa95ab3f47c44eda82ca646a1e"},"proxysql/datadog_checks/proxysql/config_models/defaults.py":{"sha256":"0682240008a83d8c4365ca47e73561728b9f51190886b1460e62376af2bfda9b"},"proxysql/datadog_checks/proxysql/config_models/instance.py":{"sha256":"a4bd92b607e96227a7ffecc951d22f2fed89613e5122dada9401c161887fe0db"},"proxysql/datadog_checks/proxysql/config_models/shared.py":{"sha256":"65a852b77a60c267a4c4b61ee19ff822a4b33fcc8b8170d9aad25777fed10de7"},"proxysql/datadog_checks/proxysql/config_models/validators.py":{"sha256":"b2e1a32eb8591a9d541a935aa5c56f20fa7ebbc3de68cf24df3a650198f2712a"},"proxysql/datadog_checks/proxysql/data/conf.yaml.example":{"sha256":"9bf37619c1766ae10c6fb3e39958d4374add89bed479ca9983c60a71f609fa43"},"proxysql/datadog_checks/proxysql/proxysql.py":{"sha256":"5835eed49326600adc556ffd04283b2605308f6093a9e2b6b391a90c62475ceb"},"proxysql/datadog_checks/proxysql/queries.py":{"sha256":"5f5e38b8c78559896892bcf7f2a276f3574be7bf16856edf9ae5a59bb4adab7c"},"proxysql/datadog_checks/proxysql/ssl_utils.py":{"sha256":"c9b8d9f3c71e2f98b954abfb1068d9bb6057159d85f7ea0bad93c282cc45d8ae"},"proxysql/requirements.in":{"sha256":"8d53249d2417ea7b40f2d0eb503d73ac67779ced7084b295e4a81695ca6f2a51"},"proxysql/setup.py":{"sha256":"dc383a7a095902635aa72169ad31f42ba6d69731931e9d4f3eeed9af527ab780"}}}}
1 change: 1 addition & 0 deletions ceph/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ def dd_environment():
CheckDockerLogs(compose_file, 'spawning ceph --cluster ceph -w', wait=5),
CheckDockerLogs(compose_file, 'Running on http://0.0.0.0:5000/'),
],
attempts=2,
):
# Clean the disk space warning
run_command(
Expand Down
1 change: 1 addition & 0 deletions couchbase/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@ def dd_environment():
WaitFor(node_stats),
WaitFor(bucket_stats),
],
attempts=2,
):
yield DEFAULT_INSTANCE

Expand Down
1 change: 1 addition & 0 deletions datadog_checks_base/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,6 +115,7 @@ def uds_path():
'UDS_FILENAME': uds_filename,
},
conditions=[WaitFor(lambda: os.path.exists(uds_path))],
attempts=2,
):
yield uds_path

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,21 @@ def validate(self, check_name, decoded, fix):
self.fail(output)


class TileDescriptionValidator(BaseManifestValidator):
DESCRIPTION_PATH = '/tile/description'
MAX_DESCRIPTION_LENGTH = 70

def validate(self, check_name, decoded, fix):
# The description for V2 manifests should not be longer than 70 characters to avoid being
# cut off or shortened on the UI
tile_description = decoded.get_path(self.DESCRIPTION_PATH)
current_length = len(tile_description)
if current_length > self.MAX_DESCRIPTION_LENGTH:
output = f' The tile description is {current_length} characters long. It should be no longer than \
{self.MAX_DESCRIPTION_LENGTH} characters.'
self.fail(output)


class SchemaValidator(BaseManifestValidator):
def validate(self, check_name, decoded, fix):
if not self.should_validate():
Expand Down Expand Up @@ -219,6 +234,7 @@ def get_v2_validators(ctx, is_extras, is_marketplace):
common.ImmutableAttributesValidator(version=V2),
common.LogsCategoryValidator(version=V2),
DisplayOnPublicValidator(version=V2),
TileDescriptionValidator(is_marketplace, is_extras, version=V2),
MediaGalleryValidator(is_marketplace, is_extras, version=V2),
# keep SchemaValidator last, and avoid running this validation if errors already found
SchemaValidator(ctx=ctx, version=V2, skip_if_errors=True),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@
"tile": {
"changelog": "CHANGELOG.md",
"configuration": "README.md#Setup",
"description": "Oracle relational database system designed for enterprise grid computing",
"description": "Oracle relational database system designed for enterprise grid",
"media": [],
"overview": "README.md#Overview",
"title": "Oracle",
Expand Down Expand Up @@ -145,6 +145,16 @@

IMMUTABLE_ATTRIBUTES_V2_MANIFEST = JSONDict({"manifest_version": "2.0.0"})

VALID_TILE_DESCRIPTION_V2_MANIFEST = JSONDict({"tile": {"description": "This is a valid length tile description!"}})

INVALID_TILE_DESCRIPTION_V2_MANIFEST = JSONDict(
{
"tile": {
"description": "This description is way too long to be valid! It will be cut off when displayed in the UI."
}
}
)


class MockedResponseInvalid:
status_code = 400
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -475,3 +475,23 @@ def test_manifest_v2_media_gallery_validator_incorrect_vimeo_id_type(_, setup_ro
# Assert test case
assert validator.result.failed, validator.result
assert not validator.result.fixed


def test_manifest_v2_tile_description_validator_pass(setup_route):
# Use specific validator
validator = v2_validators.TileDescriptionValidator(is_marketplace=True, version=V2, check_in_extras=True)
validator.validate('active_directory', input_constants.VALID_TILE_DESCRIPTION_V2_MANIFEST, False)

# Assert test case
assert not validator.result.failed, validator.result
assert not validator.result.fixed


def test_manifest_v2_tile_description_validator_invalid(setup_route):
# Use specific validator
validator = v2_validators.TileDescriptionValidator(is_marketplace=True, version=V2, check_in_extras=True)
validator.validate('active_directory', input_constants.INVALID_TILE_DESCRIPTION_V2_MANIFEST, False)

# Assert test case
assert validator.result.failed, validator.result
assert not validator.result.fixed
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
'cluster_checks_updating_stats_duration_seconds': 'cluster_checks.updating_stats_duration_seconds',
'datadog_requests': 'datadog.requests',
'external_metrics': 'external_metrics',
'external_metrics_datadog_metrics': 'external_metrics.datadog_metrics',
'external_metrics_delay_seconds': 'external_metrics.delay_seconds',
'external_metrics_processed_value': 'external_metrics.processed_value',
'go_goroutines': 'go.goroutines',
Expand Down
1 change: 1 addition & 0 deletions datadog_cluster_agent/metadata.csv
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ datadog.cluster_agent.datadog.rate_limit_queries.remaining,gauge,,query,,Number
datadog.cluster_agent.datadog.rate_limit_queries.reset,gauge,,second,,Number of seconds before next reset applied to the Datadog API by endpoint,0,datadog_cluster_agent,rate limit queries reset
datadog.cluster_agent.datadog.requests,count,,request,,Requests made to Datadog by status,0,datadog_cluster_agent,datadog requests
datadog.cluster_agent.external_metrics,gauge,,,,Number of external metrics tagged,0,datadog_cluster_agent,external metrics
datadog.cluster_agent.external_metrics.datadog_metrics,gauge,,,,"The label valid is true if the DatadogMetric CR is valid, false otherwise",0,datadog_cluster_agent,external metrics datadog metrics
datadog.cluster_agent.external_metrics.delay_seconds,gauge,,second,,Freshness of the metric evaluated from querying Datadog,0,datadog_cluster_agent,external metrics delay
datadog.cluster_agent.external_metrics.processed_value,gauge,,,,Value processed from querying Datadog by metric,0,datadog_cluster_agent,external metrics processed
datadog.cluster_agent.go.goroutines,gauge,,,,Number of goroutines that currently exist,0,datadog_cluster_agent,go goroutines
Expand Down
4 changes: 4 additions & 0 deletions datadog_cluster_agent/tests/fixtures/metrics.txt
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,10 @@ datadog_requests{join_leader="true",status="success"} 13006
# HELP external_metrics Number of external metrics tagged.
# TYPE external_metrics gauge
external_metrics{join_leader="true"} 100
# HELP external_metrics_datadog_metrics The label valid is true if the DatadogMetric CR is valid, false otherwise
# TYPE external_metrics_datadog_metrics gauge
external_metrics_datadog_metrics{join_leader="true",name="dcaautogen-776576e7e4e1d57e7df8d30d63fdabe2c60edc",namespace="default",valid="false"} 1
external_metrics_datadog_metrics{join_leader="true",name="dcaautogen-f9a56c0456c0113a1d5d9f7673fcd09c5d6638",namespace="default",valid="true"} 1
# HELP external_metrics_delay_seconds freshness of the metric evaluated from querying Datadog
# TYPE external_metrics_delay_seconds gauge
external_metrics_delay_seconds{join_leader="true",metric="kubernetes.cpu.usage.total"} 41
Expand Down
1 change: 1 addition & 0 deletions datadog_cluster_agent/tests/test_datadog_cluster_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
'datadog.rate_limit_queries.reset',
'datadog.requests',
'external_metrics',
'external_metrics.datadog_metrics',
'external_metrics.delay_seconds',
'external_metrics.processed_value',
'go.goroutines',
Expand Down
6 changes: 5 additions & 1 deletion ecs_fargate/assets/configuration/spec.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,4 +9,8 @@ files:
- template: instances
options:
- template: instances/http
- template: instances/default
- template: instances/default
overrides:
min_collection_interval.display_default: 20
min_collection_interval.value.example: 20
min_collection_interval.enabled: 20
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ def instance_log_requests(field, value):


def instance_min_collection_interval(field, value):
return 15
return 20


def instance_ntlm_domain(field, value):
Expand Down
6 changes: 6 additions & 0 deletions ecs_fargate/datadog_checks/ecs_fargate/data/conf.yaml.default
Original file line number Diff line number Diff line change
Expand Up @@ -319,3 +319,9 @@ instances:
## Whether or not to persist cookies and use connection pooling for increased performance.
#
# persist_connections: false

## @param min_collection_interval - number - optional - default: 20
## This changes the collection interval of the check. For more information, see:
## https://docs.datadoghq.com/developers/write_agent_check/#collection-interval
#
min_collection_interval: 20
4 changes: 2 additions & 2 deletions ecs_fargate/datadog_checks/ecs_fargate/data/conf.yaml.example
Original file line number Diff line number Diff line change
Expand Up @@ -350,11 +350,11 @@ instances:
#
# service: <SERVICE>

## @param min_collection_interval - number - optional - default: 15
## @param min_collection_interval - number - optional - default: 20
## This changes the collection interval of the check. For more information, see:
## https://docs.datadoghq.com/developers/write_agent_check/#collection-interval
#
# min_collection_interval: 15
min_collection_interval: 20

## @param empty_default_hostname - boolean - optional - default: false
## This forces the check to send metrics with no hostname.
Expand Down
22 changes: 16 additions & 6 deletions ecs_fargate/datadog_checks/ecs_fargate/ecs_fargate.py
Original file line number Diff line number Diff line change
Expand Up @@ -182,20 +182,30 @@ def submit_perf_metrics(self, container_tags, container_id, container_stats):
cpu_stats = container_stats.get('cpu_stats', {})
prev_cpu_stats = container_stats.get('precpu_stats', {})

value_system = cpu_stats.get('system_cpu_usage')
value_system = cpu_stats.get('cpu_usage', {}).get('usage_in_kernelmode')
if value_system is not None:
self.gauge('ecs.fargate.cpu.system', value_system, tags)
self.rate('ecs.fargate.cpu.system', value_system, tags)

value_user = cpu_stats.get('cpu_usage', {}).get('usage_in_usermode')
if value_user is not None:
self.rate('ecs.fargate.cpu.user', value_user, tags)

value_total = cpu_stats.get('cpu_usage', {}).get('total_usage')
if value_total is not None:
self.gauge('ecs.fargate.cpu.user', value_total, tags)
self.rate('ecs.fargate.cpu.usage', value_total, tags)

available_cpu = cpu_stats.get('system_cpu_usage')
preavailable_cpu = prev_cpu_stats.get('system_cpu_usage')
prevalue_total = prev_cpu_stats.get('cpu_usage', {}).get('total_usage')
prevalue_system = prev_cpu_stats.get('system_cpu_usage')

if prevalue_system is not None and prevalue_total is not None:
if (
available_cpu is not None
and preavailable_cpu is not None
and value_total is not None
and prevalue_total is not None
):
cpu_delta = float(value_total) - float(prevalue_total)
system_delta = float(value_system) - float(prevalue_system)
system_delta = float(available_cpu) - float(preavailable_cpu)
else:
cpu_delta = 0.0
system_delta = 0.0
Expand Down
5 changes: 3 additions & 2 deletions ecs_fargate/metadata.csv
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,9 @@ ecs.fargate.io.ops.write,rate,,,,# write operations to the disk,0,amazon_fargate
ecs.fargate.io.bytes.write,rate,,byte,,# bytes written to the disk,0,amazon_fargate,io write
ecs.fargate.io.ops.read,rate,,,,# read operation on the disk,0,amazon_fargate,io read count
ecs.fargate.io.bytes.read,rate,,byte,,# bytes read on the disk,0,amazon_fargate,io read
ecs.fargate.cpu.user,gauge,,nanosecond,,Total CPU time consumed per container,0,amazon_fargate,cpu user
ecs.fargate.cpu.system,gauge,,nanosecond,,Total CPU time consumed by the system,0,amazon_fargate,cpu system
ecs.fargate.cpu.user,rate,,nanosecond,,User CPU time,0,amazon_fargate,cpu user
ecs.fargate.cpu.system,rate,,nanosecond,,System CPU time,0,amazon_fargate,cpu system
ecs.fargate.cpu.usage,rate,,nanosecond,,Total CPU Usage,0,amazon_fargate,cpu total
ecs.fargate.cpu.limit,gauge,,percent,,Limit in percent of the CPU usage,0,amazon_fargate,cpu limit
ecs.fargate.cpu.percent,gauge,,percent,,Percentage of CPU used per container,0,amazon_fargate,cpu percent
ecs.fargate.mem.cache,gauge,,byte,,# of bytes of page cache memory,0,amazon_fargate,mem cache
Expand Down
10 changes: 10 additions & 0 deletions ecs_fargate/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
'ecs.fargate.io.bytes.read',
'ecs.fargate.cpu.user',
'ecs.fargate.cpu.system',
'ecs.fargate.cpu.usage',
'ecs.fargate.cpu.percent',
'ecs.fargate.mem.cache',
'ecs.fargate.mem.active_file',
Expand Down Expand Up @@ -60,6 +61,15 @@ def mocked_requests_get(*args, **kwargs):
return MockResponse(status_code=404)


def mocked_requests_get_sys_delta(*args, **kwargs):
if args[0].endswith("/metadata"):
return MockResponse(file_path=os.path.join(HERE, 'fixtures', 'metadata.json'))
elif args[0].endswith("/stats"):
return MockResponse(file_path=os.path.join(HERE, 'fixtures', 'stats_wrong_system_delta.json'))
else:
return MockResponse(status_code=404)


def mocked_get_tags(entity, _):
# Values taken from Agent6's TestParseMetadataV10 test
tag_store = {
Expand Down
Loading

0 comments on commit 86ec332

Please sign in to comment.