Skip to content

Commit

Permalink
Merge pull request #27 from objectrocket/updates_circleci_tox
Browse files Browse the repository at this point in the history
Updates circleci tox
  • Loading branch information
paulrossmeier authored Jun 16, 2020
2 parents fa421c6 + b65a8ba commit b8d3117
Show file tree
Hide file tree
Showing 8 changed files with 119 additions and 29 deletions.
20 changes: 20 additions & 0 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
version: 2.1
jobs:
lint_test:
docker:
- image: circleci/python:2.7.13
steps:
- checkout

- run:
name: install test dependencies
command: sudo pip install --upgrade pip tox
- run:
name: lint and test
command: tox -r

workflows:
version: 2
basic-workflow:
jobs:
- lint_test
14 changes: 13 additions & 1 deletion CHANGELOG
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,18 @@ All notable changes to this project will be documented in this file.
* elasticstat/elasticstat.py - added version discovery to (_parse_threadpools) - updates threadpool to match version above or below 7
* elasticstat/elasticstat.py - added packages "json", "re", "packaging" to the import commands
* elasticstat/elasticstat.py - added "write" to the defailt threadpool
###Changed
### Changed
* elasticstat/elasticstat.py - moving self.threadpool constructor after client creation constructor to use version discovery
* setup.py - added packaging and certifi to 'install_requires'

## [1.3.5] - 2020-4-14 Paul Rossmeier
### Added
* requirements/prod.txt - added for requirements when insatlling the package
* requirements/dev.txt - added for tox to run
* tox.ini - added for tox configuration
* circleci/config.yml - added circleci intergration
### Changed
* setup.py - added discovery for prod/dev variable to allow for diffrent python requirement docs
* elasticstat/elasticstat.py - per flake8 linting I updated all requested linting errors
### Removed
* requirements.txt - removed on favor of requirements dir
45 changes: 24 additions & 21 deletions elasticstat/elasticstat.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,12 +73,14 @@
DEFAULT_THREAD_POOLS = ["index", "search", "bulk", "get", "write"]
CATEGORIES = ['general', 'os', 'jvm', 'threads', 'fielddata', 'connections', 'data_nodes']


class ESArgParser(argparse.ArgumentParser):
"""ArgumentParser which prints help by default on any arg parsing error"""
def error(self, message):
self.print_help()
sys.exit(2)


class ESColors:
"""ANSI escape codes for color output"""
END = '\033[00m'
Expand All @@ -88,6 +90,7 @@ class ESColors:
GRAY = '\033[1;30m'
WHITE = '\033[1;37m'


class Elasticstat:
"""Elasticstat"""

Expand All @@ -99,10 +102,10 @@ def __init__(self, args):
self.node_counters['gc'] = {}
self.node_counters['fd'] = {}
self.node_counters['hconn'] = {}
self.nodes_list = [] # used for detecting new nodes
self.nodes_by_role = {} # main list of nodes, organized by role
self.node_names = {} # node names, organized by id
self.new_nodes = [] # used to track new nodes that join the cluster
self.nodes_list = [] # used for detecting new nodes
self.nodes_by_role = {} # main list of nodes, organized by role
self.node_names = {} # node names, organized by id
self.new_nodes = [] # used to track new nodes that join the cluster
self.active_master = ""
self.no_color = args.no_color
self.categories = self._parse_categories(args.categories)
Expand Down Expand Up @@ -185,7 +188,7 @@ def thetime(self):
return datetime.datetime.now().strftime("%H:%M:%S")

def size_human(self, size):
for unit in ['B','KB','MB','GB','TB','PB','EB','ZB']:
for unit in ['B', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB']:
if abs(size) < 1024.0:
return "{:6.2f} {}".format(size, unit)
size /= 1024.0
Expand All @@ -205,15 +208,15 @@ def get_disk_usage(self, node_fs_stats):

return "{}|{}%".format(used_human, used_percent)

def get_role(self,node_id, node_stats):
def get_role(self, node_id, node_stats):
try:
# Section to handle ES 5
role = node_stats['nodes'][node_id]['roles']
if 'data' in role:
return "DATA"
if 'master' in role:
return "MST"
if 'ingest' in role:
if 'ingest' in role:
return "ING"
else:
return "UNK"
Expand All @@ -236,14 +239,14 @@ def get_role(self,node_id, node_stats):
return "RTR"
else:
return "UNK"
else:
# Section to handle ES 6.x
else:
# Section to handle ES 6.x
role = node_stats['nodes'][node_id]['nodeRole']
if 'data' in role:
return "DATA"
if 'master' in role:
return "MST"
if 'ingest' in role:
if 'ingest' in role:
return "ING"
else:
return "UNK"
Expand Down Expand Up @@ -318,7 +321,7 @@ def process_node_os(self, role, node_id, node):
# Pre Elasticsearch 5.x
node_load_avg = node['os'].get('load_average')
if isinstance(node_load_avg, list):
node_load_avg="/".join(str(x) for x in node_load_avg)
node_load_avg = "/".join(str(x) for x in node_load_avg)
elif isinstance(node_load_avg, float):
# Elasticsearch 2.0-2.3 only return 1 load average, not the standard 5/10/15 min avgs
node_load_avg = "{0:.2f}".format(node_load_avg)
Expand All @@ -334,7 +337,7 @@ def process_node_os(self, role, node_id, node):
def process_node_jvm(self, role, node_id, node):
processed_node_jvm = {}
processed_node_jvm['used_heap'] = "{0}%".format(node['jvm']['mem']['heap_used_percent'])
processed_node_jvm ['old_gc_sz'] = node['jvm']['mem']['pools']['old']['used']
processed_node_jvm['old_gc_sz'] = node['jvm']['mem']['pools']['old']['used']
node_gc_stats = node['jvm']['gc']['collectors']
processed_node_jvm['old_gc'], processed_node_jvm['young_gc'] = self.get_gc_stats(node_id, node_gc_stats)
return(NODES_TEMPLATE['jvm'].format(**processed_node_jvm))
Expand All @@ -343,9 +346,9 @@ def process_node_threads(self, role, node_id, node):
thread_segments = []
for pool in self.threadpools:
if pool in node['thread_pool']:
threads ="{0}|{1}|{2}".format(node['thread_pool'][pool]['active'],
node['thread_pool'][pool]['queue'],
node['thread_pool'][pool]['rejected'])
threads = "{0}|{1}|{2}".format(node['thread_pool'][pool]['active'],
node['thread_pool'][pool]['queue'],
node['thread_pool'][pool]['rejected'])
thread_segments.append(NODES_TEMPLATE['threads'].format(threads=threads))
else:
thread_segments.append(NODES_TEMPLATE['threads'].format(threads='-|-|-'))
Expand All @@ -359,7 +362,7 @@ def process_node_fielddata(self, role, node_id, node):

def process_node_connections(self, role, node_id, node):
processed_node_conns = {}
if node.get('http') == None:
if node.get('http') is None:
node['http'] = {u'total_opened': 0, u'current_open': 0}
processed_node_conns['http_conn'] = self.get_http_conns(node_id, node['http'])
processed_node_conns['transport_conn'] = node['transport']['server_open']
Expand Down Expand Up @@ -409,15 +412,15 @@ def process_role(self, role, nodes_stats):
else:
failed_node = {}
failed_node['name'] = failed_node_name + '-'
failed_node['role'] = "({0})".format(role) # Role it had when we last saw this node in the cluster
failed_node['role'] = "({0})".format(role) # Role it had when we last saw this node in the cluster
print self.colorize(NODES_FAILED_TEMPLATE.format(**failed_node), ESColors.GRAY)
continue
# make sure node's role hasn't changed
current_role = self.get_role(node_id, nodes_stats)
if current_role != role:
# Role changed, update lists so output will be correct on next iteration
self.nodes_by_role.setdefault(current_role, []).append(node_id) # add to new role
self.nodes_by_role[role].remove(node_id) # remove from current role
self.nodes_by_role.setdefault(current_role, []).append(node_id) # add to new role
self.nodes_by_role[role].remove(node_id) # remove from current role
row = self.process_node(current_role, node_id, nodes_stats['nodes'][node_id])
if node_id in self.new_nodes:
print self.colorize(row, ESColors.WHITE)
Expand Down Expand Up @@ -454,7 +457,7 @@ def print_stats(self):
cluster_segments = []
cluster_health = self.es_client.cluster.health()
nodes_stats = self.es_client.nodes.stats(human=True)
self.active_master = self.es_client.cat.master(h="id").strip() # needed to remove trailing newline
self.active_master = self.es_client.cat.master(h="id").strip() # needed to remove trailing newline

# Print cluster health
cluster_health['timestamp'] = self.thetime()
Expand Down Expand Up @@ -490,7 +493,7 @@ def print_stats(self):
print self.colorize(self.node_headings, ESColors.GRAY)
for role in self.nodes_by_role:
self.process_role(role, nodes_stats)
print "" # space out each run for readability
print "" # space out each run for readability
time.sleep(self.sleep_interval)


Expand Down
6 changes: 0 additions & 6 deletions requirements.txt

This file was deleted.

36 changes: 36 additions & 0 deletions requirements/dev.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
appdirs==1.4.3
atomicwrites==1.3.0
attrs==19.3.0
certifi==2020.4.5.1
configparser==4.0.2
contextlib2==0.6.0.post1
distlib==0.3.0
elasticsearch==7.6.0
entrypoints==0.3
enum34==1.1.10
filelock==3.0.12
flake8==3.7.9
funcsigs==1.0.2
functools32==3.2.3.post2
importlib-metadata==1.6.0
importlib-resources==1.4.0
mccabe==0.6.1
more-itertools==5.0.0
packaging==20.3
pathlib2==2.3.5
pluggy==0.13.1
py==1.8.1
pycodestyle==2.5.0
pyflakes==2.1.1
pyparsing==2.4.7
pytest==4.6.9
scandir==1.10.0
singledispatch==3.4.0.3
six==1.14.0
toml==0.10.0
tox==3.14.6
typing==3.7.4.1
urllib3==1.25.8
virtualenv==20.0.17
wcwidth==0.1.9
zipp==1.2.0
3 changes: 3 additions & 0 deletions requirements/prod.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
certifi
elasticsearch
packaging
8 changes: 7 additions & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,12 @@

from elasticstat import __version__, __author__


def read_requirements(env_type):
with open('requirements/{}.txt'.format(env_type), 'r') as fd:
return [line.strip() for line in fd if not line.startswith('-') and not line.startswith('#')]


setup(
name='elasticstat',
version=__version__,
Expand All @@ -10,7 +16,7 @@
author_email='jtharp@objectrocket.com',
url = 'https://github.com/objectrocket/elasticstat',
download_url = 'https://github.com/objectrocket/elasticstat/archive/1.3.0.tar.gz',
install_requires=['elasticsearch', 'packaging', 'certifi'],
install_requires=read_requirements('prod'),
packages=['elasticstat'],
entry_points={
'console_scripts': [
Expand Down
16 changes: 16 additions & 0 deletions tox.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
# content of: tox.ini , put in same dir as setup.py
[tox]
envlist = lint
skipsdist = True

[flake8]
exclude = *.egg-info,.venv,.git,.tox,build,dist,docs,Dockerfile
ignore = E501,W504,E126,F841,E251
max-line-length = 152

[testenv:lint]
# install pytest in the virtualenv where commands will be executed
deps = -r{toxinidir}/requirements/dev.txt
commands =
# NOTE: you can run any command line tool here - not just tests
flake8

0 comments on commit b8d3117

Please sign in to comment.