Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update gmon to google-cloud-monitoring v2.x.x and above #623

Merged
merged 2 commits into from
Mar 21, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
46 changes: 20 additions & 26 deletions tools/gmon/gmon/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@

SAMPLE_METRIC_TYPE = "loadbalancing.googleapis.com/server/request_count"


def parse_args(args):
"""Parse CLI arguments.

Expand All @@ -41,8 +42,8 @@ def parse_args(args):
subparsers = parser.add_subparsers(title='Endpoints', dest='parser')

# Accounts parser
accounts = subparsers.add_parser(
'accounts', help='Cloud Operations Account operations')
accounts = subparsers.add_parser('accounts',
help='Cloud Operations Account operations')
accounts_sub = accounts.add_subparsers(dest='operation')
accounts_get = accounts_sub.add_parser(
'get', help='Get a Cloud Operations Account details')
Expand Down Expand Up @@ -105,7 +106,7 @@ def parse_args(args):
help='Window to query (in seconds)',
required=False,
default=60)
metrics_inspect.add_argument('--filters',
metrics_inspect.add_argument('--filters',
'-f',
nargs='+',
help='Filter on nested fields.',
Expand All @@ -129,7 +130,7 @@ def parse_args(args):
help='Number of results to show.',
required=False,
default=None)
metrics_list.add_argument('--filters',
metrics_list.add_argument('--filters',
'-f',
nargs='+',
help='Filter on nested fields.',
Expand Down Expand Up @@ -166,8 +167,7 @@ def parse_args(args):
# Services
sm_service = subparsers.add_parser(
'services', help='Cloud Monitoring Service Monitoring services')
sm_service_sub = sm_service.add_subparsers(
dest='operation')
sm_service_sub = sm_service.add_subparsers(dest='operation')
sm_service_get = sm_service_sub.add_parser(
'get', help='Get a Cloud Monitoring Service Monitoring service')
sm_service_create = sm_service_sub.add_parser(
Expand All @@ -180,29 +180,24 @@ def parse_args(args):
'list', help='List a Cloud Monitoring Service Monitoring service')

for p in [
sm_service_list, sm_service_get,
sm_service_create,
sm_service_update,
sm_service_delete
sm_service_list, sm_service_get, sm_service_create,
sm_service_update, sm_service_delete
]:
p.add_argument('--project',
'-p',
help='Cloud Monitoring host project id.',
required=True)
for p in [
sm_service_get, sm_service_create,
sm_service_delete
]:
for p in [sm_service_get, sm_service_create, sm_service_delete]:
p.add_argument('service_id', help='Cloud Monitoring service id')

sm_service_create.add_argument(
'--config', help='Path to service config.', required=True)
sm_service_create.add_argument('--config',
help='Path to service config.',
required=True)

# SLOs
sm_slo = subparsers.add_parser(
'slos', help='Cloud Monitoring Service Monitoring SLOs')
sm_slo_sub = sm_slo.add_subparsers(
dest='operation')
sm_slo_sub = sm_slo.add_subparsers(dest='operation')
sm_slo_get = sm_slo_sub.add_parser(
'get', help='Get a Cloud Monitoring Service Monitoring SLO')
sm_slo_create = sm_slo_sub.add_parser(
Expand All @@ -215,20 +210,15 @@ def parse_args(args):
'list', help='List Cloud Monitoring Service Monitoring SLOs')

for p in [
sm_slo_list, sm_slo_get,
sm_slo_update, sm_slo_create,
sm_slo_delete
sm_slo_list, sm_slo_get, sm_slo_update, sm_slo_create, sm_slo_delete
]:
p.add_argument('--project',
'-p',
help='Cloud Monitoring host project id.',
required=True)
p.add_argument('service_id', help='Cloud Monitoring service id')

for p in [
sm_slo_get, sm_slo_update,
sm_slo_delete
]:
for p in [sm_slo_get, sm_slo_update, sm_slo_delete]:
p.add_argument('slo_id', help='SLO id.')

sm_slo_create.add_argument('--config',
Expand All @@ -244,11 +234,13 @@ def parse_args(args):
}
return parsers, parser.parse_args(args)


def main():
"""gmon CLI entrypoint."""
parsers, args = parse_args(sys.argv[1:])
cli(parsers, args)


def cli(parsers, args):
"""Main CLI function.

Expand Down Expand Up @@ -295,7 +287,7 @@ def cli(parsers, args):

elif command in ['inspect']:
response = method(metric_type, window=args.window, filters=filters)
filters = {} # already using API filters
filters = {} # already using API filters

elif command in ['delete_unused']:
response = method(pattern=args.regex, window=args.window)
Expand Down Expand Up @@ -336,6 +328,7 @@ def cli(parsers, args):
response = method(args.service_id)
return fmt_response(response, limit, fields, filters)


def parse_filters(filters=[]):
"""Function to parse `filters` CLI argument.

Expand Down Expand Up @@ -408,6 +401,7 @@ def fmt_response(response, limit, fields, filters={}):
responses.append(r)
return responses


def parse_fields(fields):
"""Parse `fields` CLI argument.

Expand Down
90 changes: 57 additions & 33 deletions tools/gmon/gmon/clients/monitoring.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,10 @@
import re
import time

from google.cloud.monitoring_v3 import (MetricServiceClient, enums, types)
from google.api import metric_pb2 as ga_metric
from google.api_core import exceptions
from google.cloud.monitoring_v3 import (MetricServiceClient, types,
TimeInterval, ListTimeSeriesRequest)

from .utils import decorate_with, to_json

Expand All @@ -38,10 +40,11 @@ class MetricsClient:
project_id (str): Cloud Monitoring host project id (workspace) to query
metrics from.
"""

def __init__(self, project_id):
self.client = MetricServiceClient()
self.project_id = project_id
self.project = self.client.project_path(project_id)
self.project = f"projects/{project_id}"

def get(self, metric_type):
"""Get a metric descriptor from metric type.
Expand All @@ -55,13 +58,13 @@ def get(self, metric_type):
Returns:
iterator: Metric descriptor API response.
"""
name = f'{self.project}/metricDescriptors/{metric_type}'
try:
return self.client.get_metric_descriptor(
f'{self.project}/metricDescriptors/{metric_type}')
return self.client.get_metric_descriptor(name=name)
except exceptions.NotFound:
metric_type = self.get_approx(metric_type)
return self.client.get_metric_descriptor(
f'{self.project}/metricDescriptors/{metric_type}')
name = f'{self.project}/metricDescriptors/{metric_type}'
return self.client.get_metric_descriptor(name=name)

def get_approx(self, metric_type, interactive=True):
"""Get metric descriptors matching a regex of the metric_type.
Expand All @@ -81,14 +84,12 @@ def get_approx(self, metric_type, interactive=True):
if len(matches) == 0:
LOGGER.error(
f'No partial result matched your query "{metric_type}".')
raise # re-raise NotFound exception
raise # re-raise NotFound exception
if len(matches) == 1:
metric_type = matches[0][0]
project_id = matches[0][1]
LOGGER.info(
f'Found exactly one metric "{metric_type}" in project'
f'"{project_id}" matching regex.'
)
LOGGER.info(f'Found exactly one metric "{metric_type}" in project'
f'"{project_id}" matching regex.')
elif interactive:
LOGGER.info('Found multiple metrics matching regex.')
for idx, (mtype, project_id) in enumerate(matches):
Expand Down Expand Up @@ -118,18 +119,19 @@ def create(self,
Returns:
obj: Metric descriptor.
"""
descriptor = types.MetricDescriptor()
descriptor = ga_metric.MetricDescriptor()
if metric_type.startswith('custom.googleapis.com/'):
descriptor.type = metric_type
else:
descriptor.type = 'custom.googleapis.com/%s' % metric_type
descriptor.metric_kind = (getattr(enums.MetricDescriptor.MetricKind,
descriptor.metric_kind = (getattr(ga_metric.MetricDescriptor.MetricKind,
metric_kind))
descriptor.value_type = (getattr(enums.MetricDescriptor.ValueType,
descriptor.value_type = (getattr(ga_metric.MetricDescriptor.ValueType,
value_type))
descriptor.description = description
LOGGER.info(f'Creating metric descriptor "{descriptor.type}" ...')
return self.client.create_metric_descriptor(self.project, descriptor)
return self.client.create_metric_descriptor(
name=self.project, metric_descriptor=descriptor)

def delete(self, metric_type):
"""Delete a metric descriptor.
Expand All @@ -142,7 +144,7 @@ def delete(self, metric_type):
"""
LOGGER.info(f'Deleting metric descriptor "{metric_type}" ...')
return self.client.delete_metric_descriptor(
f'{self.project}/metricDescriptors/{metric_type}')
name=f'{self.project}/metricDescriptors/{metric_type}')

def list(self, pattern=None):
"""List all metric descriptors in project.
Expand All @@ -156,7 +158,8 @@ def list(self, pattern=None):
list: List of metric descriptors.
"""
LOGGER.debug(f'Listing metrics in project "{self.project_id}" ...')
descriptors = list(self.client.list_metric_descriptors(self.project))
descriptors = list(
self.client.list_metric_descriptors(name=self.project))
if pattern:
descriptors = [
x for x in descriptors if bool(re.search(pattern, x.type))
Expand Down Expand Up @@ -193,10 +196,8 @@ def delete_unused(self, pattern=None, window=1, interactive=True):
else:
last_written = results[0]['points'][0]['interval']['endTime']
keep_list.append({
'metric_type':
metric_type,
'message':
f'Last datapoint written on {last_written}'
'metric_type': metric_type,
'message': f'Last datapoint written on {last_written}'
})
LOGGER.info(
f'{metric_type}: last datapoint written at {last_written}')
Expand Down Expand Up @@ -225,26 +226,49 @@ def inspect(self, metric_type, window, filters={}):
"""
LOGGER.debug(
f'Inspecting metric "{metric_type}" in project "{self.project_id}"'
' ...'
)
' ...')
metric = list(self.get(metric_type))[0]
LOGGER.info(metric)
metric_type = metric['type']
interval = types.TimeInterval()
now = time.time()
interval.end_time.seconds = int(now)
interval.end_time.nanos = int(
(now - interval.end_time.seconds) * 10**9)
interval.start_time.seconds = int(now - window)
interval.start_time.nanos = interval.end_time.nanos
seconds = int(now)
nanos = int((now - seconds) * 10**9)
interval = TimeInterval({
"end_time": {
"seconds": seconds,
"nanos": nanos
},
"start_time": {
"seconds": (seconds - window),
"nanos": nanos
}
})

# TODO: Add custom aggregation filters
# aggregation = monitoring_v3.Aggregation({
# "alignment_period": {
# "seconds": 1200
# }, # 20 minutes
# "per_series_aligner":
# monitoring_v3.Aggregation.Aligner.ALIGN_MEAN,
# "cross_series_reducer":
# monitoring_v3.Aggregation.Reducer.REDUCE_MEAN,
# "group_by_fields": ["resource.zone"],
# })

query = f'metric.type = "{metric_type}"'
for filter_key, filter_value in filters.items():
query += f' {filter_key} = {filter_value}'
LOGGER.debug(f'Running query "{query}" ...')
results = list(
self.client.list_time_series(
self.project, query, interval,
enums.ListTimeSeriesRequest.TimeSeriesView.FULL))
results = self.client.list_time_series(
request={
"name": self.project,
"filter": query,
"interval": interval,
"view": ListTimeSeriesRequest.TimeSeriesView.FULL,
# "aggregation": aggregation # TODO: Uncomment this
})
return results

def switch_project(self, new_project_id):
Expand All @@ -254,4 +278,4 @@ def switch_project(self, new_project_id):
new_project_id (str): New project id.
"""
self.project_id = new_project_id
self.project = self.client.project_path(self.project_id)
self.project = f"projects/{self.project_id}"
Loading