diff --git a/.gitignore b/.gitignore index f05bb8f..7f8da6d 100644 --- a/.gitignore +++ b/.gitignore @@ -7,3 +7,4 @@ *__pycache__* /.venv .venv-setup.log +/build/ diff --git a/.infrastructure/entrypoint.sh b/.infrastructure/entrypoint.sh index 37d487d..6d5e8d1 100644 --- a/.infrastructure/entrypoint.sh +++ b/.infrastructure/entrypoint.sh @@ -1,12 +1,5 @@ #!/bin/bash -# shellcheck disable=SC2001 -CHECK_INTERVAL=$(echo "${CHECK_INTERVAL}" | sed "s/\"//g") - -get_common_args () { - echo " --directory=/data --db-path=/database/db.sqlite3 " -} - prepare_data_directory () { # # Database is outside of "data" directory, as the database contains dynamic data, that could be considered @@ -17,34 +10,30 @@ prepare_data_directory () { touch /database/db.sqlite3 } -prepare_crontab () { - # depending on operating system, create an entrypoint for cron - echo "#!/bin/bash" > /entrypoint.cron.sh - echo "crond -d 2 -f" >> /entrypoint.cron.sh - - # check interval can be configured using environment variables - echo "${CHECK_INTERVAL} infracheck --force --wait=${WAIT_TIME} $(get_common_args) " > /etc/crontabs/root - - chmod +x /entrypoint.cron.sh -} - prepare_entrypoint () { ARGS="" - if [[ ${LAZY} == "true" ]] || [[ ${LAZY} == "1" ]]; then - ARGS="${ARGS} --lazy " + if [[ ${REFRESH_TIME} ]]; then + ARGS="${ARGS} --refresh-time=${REFRESH_TIME} " + fi + + if [[ ${CHECK_TIMEOUT} ]]; then + ARGS="${ARGS} --timeout=${CHECK_TIMEOUT} " + fi + + if [[ ${WAIT_TIME} ]]; then + ARGS="${ARGS} --wait${WAIT_TIME} " fi # allow to pass custom arguments from docker run command echo "#!/bin/bash" > /entrypoint.cmd.sh - echo "infracheck --server --server-port 8000 ${ARGS} $(get_common_args) $@" >> /entrypoint.cmd.sh + echo "infracheck --server-port 8000 ${ARGS} --directory=/data --db-path=/database/db.sqlite3 $@" >> /entrypoint.cmd.sh cat /entrypoint.cmd.sh chmod +x /entrypoint.cmd.sh } prepare_data_directory -prepare_crontab prepare_entrypoint "$@" exec supervisord -c /etc/supervisord.conf diff --git a/ChangeLog b/ChangeLog index bb3f63d..bb14d20 100644 --- a/ChangeLog +++ b/ChangeLog @@ -1,6 +1,172 @@ CHANGES ======= +* Fixed "AttributeError: 'str' object has no attribute 'decode'" +* Fixed "AttributeError: 'str' object has no attribute 'decode'" +* #12: Temporarily point to RKD beta version +* #12: Improve RKD integration by using simpler switch \`--imports\` +* #12: Added integration with RiotKit-Do +* #9, #10, #11: Extracted few exceptions into exceptions.py module, moved http layer to the lower level (it should be next to cli), added versioning endpoint +* #9, #10: Update docker image +* #9, #10: Logic extraction from controller into more layers, added model, using pickle instead of json in database cache, the application now is working in two threads - one is server, second is background worker (replaces crontab calls) + +v1.10 +----- + +* #8: Fix issue with missing "msgpack" + +v1.10.0.0b3 +----------- + +* #8: Add missing package "msgpack" + +v1.10.0.0b2 +----------- + +* #8: Add hashbang +* #8: Update docs +* #8: Update docs +* #8: Fix deprecations, update docs + +v1.10.0.0b1 +----------- + +* #8: Update docs formatting +* #8: Covered influxdb-query with tests +* #8: Refactor container test utils +* Docs: Add example to the disk-space +* Docs: Add example to the ovh-expiration +* Docs: Add example to the ovh-expiration +* Docs: Add example to the docker-container-log +* #8: Initially added influxdb-query check + +v1.9.1 +------ + +* #7: Added missing ARCH in docker tag +* Updated badges + +v1.9 +---- + +* #7: Clean up +* #7: Build on ARM +* #7: Set almost final pipelines +* #7: Enable ARM build +* #7: Attempt to keep the git tag in PBR build +* #7: Attempting to resolve git tag retrieving +* #7: Attempting to resolve git tag retrieving +* #7: add missing wheel package +* #7: add missing wheel package +* #7: add missing hashbang +* #7: return a value in :image task +* #7: Compat +* #7: Add login to registry +* #7: Release job +* #7: Release job +* #7: Fixes related to GH Actions +* #7: Fixes related to GH Actions +* #7: Fixes related to GH Actions +* #7: Fixes related to GH Actions +* #7: Fixes related to GH Actions +* #7: Moving to Github Actions +* #7: Moving to Github Actions +* Build on Python 3.7 +* #6: Implement "docker-container-log" check +* Remove unused, nonsense check +* Create index also on existing databases +* Correct: sqlite3.Warning: You can only execute one statement at a time +* Add index for better performance of the SQLite3 + +v1.8 +---- + +* Refactor: Moved docs to Python files +* Postgres support: Included "postgres" check in docs +* Postgres support: Added simple PostgreSQL check if the DB is up + +v1.8.rc1 +-------- + +* Postgres support: Corrected one typo, and added distinction between no rights and no data +* ARM: Correct package name +* Fixed problem with building Docker image - after enforcing regeneration of requirements.txt +* Postgres support: Added missing postgresql-devel required for building/installing psycopg2 +* Building: Always re-generate requirements.txt +* Postgres support: Switch from psycopg2 to psycopg2-binary +* Postgres support: Switch from psycopg2 to psycopg2-binary +* Postgres support: Added psycopg2 as dependency +* Postgres support: chmod +x +* Docs: Added PostgreSQL replication checks +* Added PostgreSQL replication checks + +v1.7 +---- + +* Docs: Added passwords deploying strategy +* Docs: Added templating documentation + +v1.7.rc5 +-------- + +* Runner: Added a simple templating in input variables. Example: ${ENV.USER} to see current user. Use this mechanism to pass credentials securely +* Runner: Added some default timeout for safety reasons + +v1.7.rc4 +-------- + +* SSH: Added creation of .ssh directory and known\_hosts file if not present +* SSH: Added support for adding to known\_hosts first time +* Tests: Increased SSH container timeout to avoid random failures +* Load avg checks: Corrected invalid casting in timeout parameter and extracted code into the checklib +* Formatting +* Docker: Added required packages +* Reminder check: Correct a misleading message +* Docs: Deleted development-only information + +v1.7.rc3 +-------- + +* Docker: Fixed containers - added missing gcc after adding dependency that requires compilation +* Added correct requirements.txt +* Tests: Corrected paths in tests +* Checks: Add swap-usage-max-percent +* Checks: Added load-average and load-average-auto + +v1.7.rc2 +-------- + +* CI: Increased timeout for SSH container +* CI: Added missing make task +* CI: debug +* CI: Fix build by allowing running in venv +* CI: Fix build by allowing running in venv +* Docs: Added all new checks to docs +* Docs: Fixed errors in the docs +* Added new check - \`ssh-files-checksum\`: \`Calls remote process using SSH and expects: the listed files and checksums will be matching\` +* SSH-based checks are now sharing the same code by using a library, simplified imports and made them more safe +* Runner: Added support for lists and dicts in parameters +* Add \`ssh-fingerprint\` check: \`Verifies if remote host fingerprint matches. Helps detecting man-in-the-middle and server takeover attacks.\` +* Tests: Close the docker socket to avoid warnings in tests +* Tests: Separate unit tests from functional tests, improve pipeline - correct mistake +* Tests: Separate unit tests from functional tests, improve pipeline + +v1.7.rc1 +-------- + +* Tests: Improved tests that uses SSH container - the container is setup one for a class +* Tests: Collecting output from SSH container +* Refactored "replication-running" check syntax and added additional status information +* CI: Attempt to run unit tests on travis +* CI: Trying to bring up the unit tests +* Covered "reminder" check with tests +* CI: Fixing Python 3.7 build +* CI: Add missing pipenv +* Add compatibility with Python 3.7 stage +* Downgrade packages for Python 3.6 compatibility +* No longer require strict Python version, correct Docker builds +* Corrected mistake in travis configuration +* Changing the way the application is deployed. Introducing pipenv * Enabled docker in travis builds as it is required for testing * Covered "ssh-command" with tests and fixed all spotted bugs * Preparation for the Pipenv support diff --git a/docs/source/first-steps.rst b/docs/source/first-steps.rst index 18eb3e5..e86a423 100644 --- a/docs/source/first-steps.rst +++ b/docs/source/first-steps.rst @@ -107,9 +107,9 @@ The image will by default expose a HTTP endpoint. List of supported environment variables: -- CHECK_INTERVAL="\*/1 \* \* \* \*" +- REFRESH_TIME=120 +- CHECK_TIMEOUT=120 - WAIT_TIME=0 -- LAZY=false **Without Docker** @@ -120,13 +120,11 @@ List of supported environment variables: rkd :install # run checks in the shell - infracheck --directory=/your-project-directory-path-there + infracheck --directory=/your-project-directory-path-there --no-server - # run a webserver - infracheck --directory=/your-project-directory-path-there --server --server-port=7422 --lazy + # run the application with webserver and background worker + infracheck --directory=/your-project-directory-path-there --server-port=7422 --refresh-time=120 --log-level=info - # set up a scheduled checking - echo "*/1 * * * * infracheck --directory=/your-project-directory-path-there --force" >> /etc/crontabs/root **Using PIP** @@ -135,11 +133,13 @@ List of supported environment variables: sudo pip install infracheck # run checks in the shell - infracheck --directory=/your-project-directory-path-there + infracheck --directory=/your-project-directory-path-there --no-server - # run a webserver - infracheck --directory=/your-project-directory-path-there --server --server-port=7422 + # run the application with webserver and background worker + infracheck --directory=/your-project-directory-path-there --server-port=7422 --refresh-time=120 --log-level=info - # set up a scheduled checking - echo "*/1 * * * * infracheck --directory=/your-project-directory-path-there --force" >> /etc/crontabs/root +Advanced +-------- + +**Setting timeout per check:** Set :code:`INFRACHECK_TIMEOUT` environment variable in json file to adjust timeout for given check. diff --git a/example/healthchecks/configured/rkd-sh.json b/example/healthchecks/configured/rkd-sh.json new file mode 100644 index 0000000..7ea0b6e --- /dev/null +++ b/example/healthchecks/configured/rkd-sh.json @@ -0,0 +1,6 @@ +{ + "type": "rkd://rkd.standardlib.shell:sh", + "input": { + "-c": "ls -la" + } +} diff --git a/infracheck/infracheck/__init__.py b/infracheck/infracheck/__init__.py index b110f36..c10f19a 100644 --- a/infracheck/infracheck/__init__.py +++ b/infracheck/infracheck/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- __author__ = 'RiotKit' -__email__ = 'riotkit_org@riseup.net' +__email__ = 'riotkit@riseup.net' diff --git a/infracheck/infracheck/bin.py b/infracheck/infracheck/bin.py index aa1dc2a..f726073 100644 --- a/infracheck/infracheck/bin.py +++ b/infracheck/infracheck/bin.py @@ -5,6 +5,8 @@ import os import argparse import json +from enum import Enum +from .http import HttpServer t = sys.argv[0].replace(os.path.basename(sys.argv[0]), "") + "/../" @@ -15,6 +17,17 @@ from .controller import Controller +class LogLevel(Enum): + info = 'info' + debug = 'debug' + warning = 'warning' + error = 'error' + fatal = 'fatal' + + def __str__(self): + return self.value + + def main(): # # Arguments parsing @@ -39,42 +52,38 @@ def main(): action='store_true' ) parser.add_argument( - '--directory', + '--directory', '-d', help='Alternative project directory', default='' ) parser.add_argument( - '--server', - help='Spawn a HTTP server at 7422 port', - default=False, - action='store_true' - ) - parser.add_argument( - '--server-port', + '--server-port', '-p', help='Server port, default is 7422', default=7422 ) parser.add_argument( - '--db-path', + '--db-path', '-b', help='Database path', default='~/.infracheck.sqlite3' ) parser.add_argument( - '--force', - help='Set write mode on cache, so all health checks are always executed', - default=False, - action='store_true' + '--wait', '-w', + help='Seconds between doing checks', + default=0 ) parser.add_argument( - '--wait', - help='Seconds between doing checks (works only in --force mode)', - default=0 + '--timeout', '-t', + help='Timeout for a single healthcheck to execute', + default=10 ) parser.add_argument( - '--lazy', - help='If check result is not ready, then the ' + - 'check result will be populated on-demand, even if --force is not active', - default=False, + '--refresh-time', '-r', + help='Refresh time in seconds - how often perform health checks. Defaults to 120 (seconds) = 2 minutes', + default=120 + ) + parser.add_argument( + '--no-server', '-n', + help='Do not run the server, just run all the checks from CLI', action='store_true' ) parser.add_argument( @@ -83,18 +92,38 @@ def main(): 'http://localhost:8000/this-is-a-secret/', default='' ) + parser.add_argument( + '--log-level', '-l', + help='Logging level name - debug, info, warning, error, fatal. Defaults to "info"', + type=LogLevel, + choices=list(LogLevel), + default='info' + ) + parser.add_argument( + '--version', '-v', + help='Get application version', + action='store_true' + ) parsed = parser.parse_args() project_dir = parsed.directory if parsed.directory else os.getcwd() server_port = int(parsed.server_port if parsed.server_port else 7422) server_path_prefix = parsed.server_path_prefix if parsed.server_path_prefix else '' wait_time = int(parsed.wait) + timeout = int(parsed.timeout) + + app = Controller( + project_dir=project_dir, + server_port=server_port, + server_path_prefix=server_path_prefix, + db_path=parsed.db_path, + wait_time=wait_time, + timeout=timeout, + log_level=str(parsed.log_level) + ) - app = Controller(project_dir, server_port, server_path_prefix, - parsed.db_path, wait_time, parsed.lazy, parsed.force) - - if parsed.server: - app.spawn_server() + if parsed.version: + print(app.get_version()['version']) sys.exit(0) # action: --list-all-configurations @@ -119,10 +148,18 @@ def main(): sys.exit(0) # action: perform health checking - result = app.perform_checks(force=parsed.force, wait_time=wait_time, lazy=parsed.lazy) - print(json.dumps(result, sort_keys=True, indent=4, separators=(',', ': '))) + if not parsed.no_server: + app.spawn_threaded_application(refresh_time=int(parsed.refresh_time)) + server = HttpServer(app=app, port=server_port, server_path_prefix=server_path_prefix) + server.run() + + sys.exit(0) + + if parsed.no_server: + result = app.perform_checks().to_hash() + print(json.dumps(result, sort_keys=True, indent=4, separators=(',', ': '))) - if not result['global_status']: - sys.exit(1) + if not result['global_status']: + sys.exit(1) sys.exit(0) diff --git a/infracheck/infracheck/config.py b/infracheck/infracheck/config.py index ba63f7d..70d0b88 100644 --- a/infracheck/infracheck/config.py +++ b/infracheck/infracheck/config.py @@ -2,6 +2,9 @@ import os import json +from .exceptions import ConfigurationException +from .rkd_support import is_rkd_check, rkd_module_exists + class ConfigLoader: paths = [] @@ -32,8 +35,14 @@ def _assert_valid_format(config_name: str, data): raise Exception('Configuration "' + config_name + '" needs to specify a name of a check in field "type"') def _assert_has_valid_type(self, type_name: str): + if is_rkd_check(type_name): + if not rkd_module_exists(type_name): + raise ConfigurationException.from_rkd_module_not_existing(type_name) + + return True + if not self._find_file_path('/checks/', type_name, ''): - raise Exception('Invalid check type "' + type_name + '", was looking in: ' + str(self.paths)) + raise ConfigurationException.from_binary_not_found(type_name, self.paths) return diff --git a/infracheck/infracheck/controller.py b/infracheck/infracheck/controller.py index 2d00a34..5560c7c 100644 --- a/infracheck/infracheck/controller.py +++ b/infracheck/infracheck/controller.py @@ -1,88 +1,83 @@ - +import os +import sys +from typing import List +from .model import ExecutedChecksResultList from .runner import Runner from .repository import Repository from .config import ConfigLoader -from .server import HttpServer +from .scheduler import Scheduler +from .versioning import get_version +from rkd.api.inputoutput import IO -import os -import time +class Controller(object): + """ + Constructs application context and passes actions to given services that are taking care about the processing + """ -class Controller: - project_dirs = None # type: list - runner = None # type: Runner - repository = None # type: Repository - config_loader = None # type: ConfigLoader - server = None # type: HttpServer + project_dirs: list + runner: Runner + repository: Repository + config_loader: ConfigLoader + io: IO def __init__(self, project_dir: str, server_port: int, server_path_prefix: str, - db_path: str, wait_time: int, lazy: bool, force: bool): + db_path: str, wait_time: int, timeout: int, log_level: str): + self.io = IO() + self.io.set_log_level(log_level) self.project_dirs = self._combine_project_dirs(project_dir) - self.runner = Runner(self.project_dirs) self.config_loader = ConfigLoader(self.project_dirs) self.repository = Repository(self.project_dirs, db_path) - self.server = HttpServer(self, server_port, server_path_prefix, wait_time, lazy, force) - def list_enabled_configs(self): + self.runner = Runner(dirs=self.project_dirs, config_loader=self.config_loader, + repository=self.repository, timeout=timeout, wait_time=wait_time, io=self.io) + + self.scheduler = Scheduler(self.runner, self.repository, self.io) + + def list_enabled_configs(self) -> List[str]: return self.repository.get_configured_checks(with_disabled=False) - def list_available_checks(self): + def list_available_checks(self) -> List[str]: return self.repository.get_available_checks() - def list_all_configs(self): + def list_all_configs(self) -> List[str]: return self.repository.get_configured_checks(with_disabled=True) - def spawn_server(self): - return self.server.run() - - def perform_checks(self, force: bool, wait_time: int = 0, lazy=False): + def spawn_threaded_application(self, refresh_time: int) -> None: """ - :param force: Perform checks and write results - :param wait_time: After each check wait (in seconds) - :param lazy: If force not specified, and there is no result, then allow to perform a check on-demand - :return: + Spawns a background worker """ - configs = self.list_enabled_configs() - results = {} - global_status = True - - for config_name in configs: - result = None - - if not force: - cache = self.repository.retrieve_cache(config_name) + self.scheduler.schedule_jobs_in_background(every_seconds=refresh_time) - if cache: - result = cache + @staticmethod + def get_version() -> dict: + """ + Gets Infracheck version + """ - config = self.config_loader.load(config_name) + return { + "version": get_version(), + "python": sys.version + } - if not result: - if lazy or force: - result = self.runner.run(config['type'], config['input'], config.get('hooks', {})) - self.repository.push_to_cache(config_name, result) - else: - result = ["Check not ready", False, ""] + def retrieve_checks(self) -> ExecutedChecksResultList: + """ + Only retrieves results of last checking + """ - results[config_name] = { - 'status': result[1], - 'output': result[0], - 'hooks_output': result[2], - 'ident': config_name + '=' + str(result[1]) - } + return self.runner.get_checks_results(self.list_enabled_configs()) - if not result[1]: - global_status = False + def perform_checks(self) -> ExecutedChecksResultList: + """ + Runs and returns results synchronously + """ - if force and wait_time > 0: - time.sleep(wait_time) + configs = self.list_enabled_configs() - return { - 'checks': results, - 'global_status': global_status - } + self.runner.run_checks(configs) + return self.runner.get_checks_results(configs) @staticmethod def _combine_project_dirs(project_dir: str) -> list: diff --git a/infracheck/infracheck/exceptions.py b/infracheck/infracheck/exceptions.py new file mode 100644 index 0000000..16e79cf --- /dev/null +++ b/infracheck/infracheck/exceptions.py @@ -0,0 +1,47 @@ +from typing import Dict + + +class InfracheckException(Exception): + pass + + +class RunnerException(InfracheckException): + @staticmethod + def from_invalid_variable_error(var_name: str, check_name: str, available_vars: Dict[str, str]) \ + -> 'RunnerException': + + return RunnerException( + 'Invalid variable "%s" in check %s. Available variables: %s' % + (var_name, check_name, str(available_vars.keys())) + ) + + @staticmethod + def from_non_existing_executable(check_name: str) -> 'RunnerException': + return RunnerException('Healthcheck executable "{check_name}" does not exist'.format(check_name=check_name)) + + @staticmethod + def from_expected_list_of_hooks(hook_name: str): + return RunnerException('Expected a LIST of hooks in "{hook_name}"'.format(hook_name=hook_name)) + + +class ConfigurationException(InfracheckException): + @classmethod + def from_rkd_module_not_existing(cls, check_name: str) -> 'ConfigurationException': + return cls( + 'RKD module cannot be imported, or task name is invalid: {check_name}'.format(check_name=check_name) + ) + + @classmethod + def from_binary_not_found(cls, check_name: str, paths: list) -> 'ConfigurationException': + return cls( + 'Invalid check type "{check_name}", was looking in: "{paths}"'.format( + check_name=check_name, + paths=str(paths) + ) + ) + + @classmethod + def from_rkd_check_url_error(cls, check_name): + return cls( + 'RiotKit-Do check syntax "{}" is invalid. Valid example: rkd://rkd.standardlib.shell:sh'.format(check_name) + ) diff --git a/infracheck/infracheck/http.py b/infracheck/infracheck/http.py new file mode 100644 index 0000000..02423bf --- /dev/null +++ b/infracheck/infracheck/http.py @@ -0,0 +1,66 @@ + +""" +HTTP +==== + +Exposes simple HTTP endpoint with JSON response +""" + +import tornado.ioloop +import tornado.web +import json +from .controller import Controller +from .model import ExecutedChecksResultList + + +class CheckExposingHandler(tornado.web.RequestHandler): # pragma: no cover + app: Controller + + def get(self): + result: ExecutedChecksResultList = self.app.retrieve_checks() + + self.set_status(500 if not result.is_global_status_success() else 200) + self.add_header('Content-Type', 'application/json') + self.write( + json.dumps(result.to_hash(), sort_keys=True, indent=4, separators=(',', ': ')) + ) + + def data_received(self, chunk): + pass + + +class VersionHandler(tornado.web.RequestHandler): # pragma: no cover + app: Controller + + def get(self): + self.set_status(200) + self.add_header('Content-Type', 'application/json') + self.write( + json.dumps(self.app.get_version(), sort_keys=True, indent=4, separators=(',', ': ')) + ) + + def data_received(self, chunk): + pass + + +class HttpServer(object): + app = None + port: int + path_prefix: str + + def __init__(self, app: Controller, port: int, server_path_prefix: str): + self.app = app + self.port = port + self.path_prefix = server_path_prefix + + def run(self): + CheckExposingHandler.app = self.app + VersionHandler.app = self.app + + srv = tornado.web.Application([ + (r"" + self.path_prefix + "/", CheckExposingHandler), + (r"" + self.path_prefix + "/version", VersionHandler) + ]) + + srv.listen(self.port) + tornado.ioloop.IOLoop.current().start() diff --git a/infracheck/infracheck/model.py b/infracheck/infracheck/model.py new file mode 100644 index 0000000..a9f3dd3 --- /dev/null +++ b/infracheck/infracheck/model.py @@ -0,0 +1,71 @@ +from datetime import datetime +from typing import Dict + + +class ExecutedCheckResult(object): + """ + Represents a single result of a single check + """ + + output: str + exit_status: bool + hooks_output: str + configured_name: str + refresh_time: datetime + + def __init__(self, configured_name: str, output: str, exit_status: bool, hooks_output: str): + self.configured_name = configured_name + self.output = output + self.exit_status = exit_status + self.hooks_output = hooks_output + self.refresh_time = datetime.now() + + @classmethod + def from_not_ready(cls, configured_name: str): + check = cls( + configured_name=configured_name, + output='Check not ready', + exit_status=False, + hooks_output='' + ) + + check.refresh_time = None + + return check + + def to_hash(self) -> dict: + return { + 'status': self.exit_status, + 'output': self.output, + 'hooks_output': self.hooks_output, + 'ident': self.configured_name + '=' + str(self.exit_status), + 'checked_at': self.refresh_time.strftime('%Y-%m-%d %H-%M-%S') if self.refresh_time else '' + } + + +class ExecutedChecksResultList(object): + checks: Dict[str, ExecutedCheckResult] + + def __init__(self): + self.checks = {} + + def add(self, config_name: str, result: ExecutedCheckResult) -> None: + self.checks[config_name] = result + + def to_hash(self) -> dict: + checks_as_hash = {} + + for name, details in self.checks.items(): + checks_as_hash[name] = details.to_hash() + + return { + 'checks': checks_as_hash, + 'global_status': self.is_global_status_success() + } + + def is_global_status_success(self) -> bool: + for name, details in self.checks.items(): + if not details.exit_status: + return False + + return True diff --git a/infracheck/infracheck/repository.py b/infracheck/infracheck/repository.py index e52c511..aa4ac2d 100644 --- a/infracheck/infracheck/repository.py +++ b/infracheck/infracheck/repository.py @@ -1,19 +1,36 @@ +""" +Repository +========== + +A data source - database, cache, list of checks, etc. +""" + import os import posix import sqlite3 -import json import time +from typing import Union, List +from .model import ExecutedCheckResult +from pickle import loads as deserialize, dumps as serialize +from threading import RLock class Repository: + """ + Provides data fetching and caching mechanism + """ + checks_dirs: list configured_dirs: list db: sqlite3.Connection + db_lock: RLock def __init__(self, project_dirs: list, db_path: str = '~/.infracheck.sqlite3'): self.checks_dirs = [] self.configured_dirs = [] + self.db_path = db_path + self.db_lock = RLock() self._connect_to_db(db_path) for path in project_dirs: @@ -21,10 +38,10 @@ def __init__(self, project_dirs: list, db_path: str = '~/.infracheck.sqlite3'): self.configured_dirs.append(path + '/configured') def _connect_to_db(self, db_path: str): - self.db = sqlite3.connect(os.path.expanduser(db_path)) + self.db = sqlite3.connect(os.path.expanduser(db_path), check_same_thread=False) try: - self.db.execute( + self._execute( ''' CREATE TABLE checks_cache ( id INTEGER PRIMARY KEY AUTOINCREMENT, @@ -38,13 +55,13 @@ def _connect_to_db(self, db_path: str): pass try: - self.db.execute('CREATE INDEX check_name_index ON checks_cache(check_name);') + self._execute('CREATE INDEX check_name_index ON checks_cache(check_name);') except sqlite3.OperationalError: pass self.db.commit() - def get_configured_checks(self, with_disabled=False): + def get_configured_checks(self, with_disabled: bool = False) -> List[str]: found = [] for path in self.configured_dirs: @@ -63,7 +80,7 @@ def get_configured_checks(self, with_disabled=False): return list(set(found)) - def get_available_checks(self): + def get_available_checks(self) -> List[str]: results = [] for path in self.checks_dirs: @@ -77,28 +94,37 @@ def map_checks_to_name(check: posix.DirEntry): return list(set(results)) def _purge_cache(self, check_name: str): - self.db.execute('DELETE FROM checks_cache WHERE check_name = ?', [check_name]) + self._execute('DELETE FROM checks_cache WHERE check_name = ?', [check_name]) - def push_to_cache(self, check_name: str, data): + def push_to_cache(self, check_name: str, data: ExecutedCheckResult): self._purge_cache(check_name) - self.db.execute( + + self._execute( ''' INSERT INTO checks_cache (id, check_name, data, date_added) VALUES (NULL, ?, ?, ?); ''', - [check_name, json.dumps(data), time.time()] + [check_name, serialize(data), time.time()] ) self.db.commit() - def retrieve_cache(self, check_name: str): - cursor = self.db.execute('SELECT data FROM checks_cache WHERE check_name = ?', [check_name]) + def _execute(self, query: str, parameters = None): + if parameters is None: + parameters = [] + + with self.db_lock: + return self.db.execute(query, parameters) + + def retrieve_from_cache(self, check_name: str) -> Union[ExecutedCheckResult, None]: + cursor = self._execute('SELECT data, date_added FROM checks_cache WHERE check_name = ? ' + + 'ORDER BY date_added DESC LIMIT 1', [check_name]) data = cursor.fetchone() if not data: return None try: - return json.loads(data[0]) + return deserialize(data[0]) except: return None diff --git a/infracheck/infracheck/rkd_support.py b/infracheck/infracheck/rkd_support.py new file mode 100644 index 0000000..cee4c90 --- /dev/null +++ b/infracheck/infracheck/rkd_support.py @@ -0,0 +1,62 @@ +""" +RKD Support +=========== + +Methods used to add RiotKit-Do support in the InfraCheck +""" + +import sys +from typing import Tuple + +from rkd.exception import ParsingException +from rkd.api.contract import TaskDeclarationInterface +from rkd.api.parsing import SyntaxParsing +from .exceptions import ConfigurationException + + +def is_rkd_check(check_name: str) -> bool: + schema_length = len('rkd://') + + if not check_name[0:schema_length] == 'rkd://': + return False + + return ":" in check_name[schema_length:] + + +def split_rkd_path(check_name: str) -> Tuple[str, str]: + if not is_rkd_check(check_name): + raise ConfigurationException.from_rkd_check_url_error(check_name) + + module_name, task_name = check_name[len('rkd://'):].split(':', 1) + + return module_name, ':' + task_name + + +def prepare_rkd_check_bin_path(check_name: str) -> list: + module_name, task_name = split_rkd_path(check_name) + + return [sys.executable, '-m', 'rkd', '--imports', module_name, task_name] + + +def rkd_module_exists(check_name: str) -> bool: + module_name, task_name = split_rkd_path(check_name) + + try: + tasks = SyntaxParsing.parse_imports_by_list_of_classes([module_name]) + + except ParsingException: + return False + + for task in tasks: + task: TaskDeclarationInterface + + if task.get_task_to_execute().get_name() == task_name: + return True + + return False + + +def add_rkd_environment_variables(env: dict, check_name: str) -> dict: + env['RKD_UI'] = 'false' + + return env diff --git a/infracheck/infracheck/runner.py b/infracheck/infracheck/runner.py index 5b94f35..8762bee 100644 --- a/infracheck/infracheck/runner.py +++ b/infracheck/infracheck/runner.py @@ -1,36 +1,168 @@ +""" +Runner +====== + +Executes checks and captures result +""" + import subprocess import os import json import re +import time from datetime import datetime +from rkd.api.inputoutput import IO +from .exceptions import RunnerException +from .model import ExecutedCheckResult, ExecutedChecksResultList +from .repository import Repository +from .config import ConfigLoader +from .rkd_support import is_rkd_check, prepare_rkd_check_bin_path, add_rkd_environment_variables -class Runner: - paths = [] +class Runner(object): + paths: list timeout: int + wait_time: int + config_loader: ConfigLoader + repository: Repository + io: IO + + def __init__(self, dirs: list, config_loader: ConfigLoader, repository: Repository, io: IO, + timeout: int = 1800, wait_time: int = 0): - def __init__(self, dirs, timeout: int = 1800): self.timeout = timeout + self.wait_time = wait_time + self.paths = [] + self.config_loader = config_loader + self.repository = repository + self.io = io for path in dirs: self.paths.append(path + '/checks') - def run(self, check_name: str, input_data: dict, hooks: dict) -> tuple: + def run_single_check(self, configured_name: str, check_name: str, input_data: dict, hooks: dict) \ + -> ExecutedCheckResult: + + """ + Runs a single check and returns result + + :param configured_name: + :param check_name: + :param input_data: + :param hooks: + :return: + """ + + self.io.debug('Executing check {}'.format(configured_name)) bin_path = self._get_check_path(check_name) + bin_path = self._append_commandline_switches(input_data, bin_path) try: + self.io.debug('bin_path=' + str(bin_path)) + env = {**dict(os.environ), **self._prepare_data(check_name, input_data)} - output = subprocess.check_output(bin_path, env=env, stderr=subprocess.PIPE, timeout=self.timeout) + env = self._add_environment_variables(env, check_name) + + timeout = env['INFRACHECK_TIMEOUT'] if 'INFRACHECK_TIMEOUT' in env else self.timeout + + output = subprocess.check_output(bin_path, env=env, stderr=subprocess.PIPE, timeout=timeout) exit_status = True except subprocess.CalledProcessError as e: output = e.output + e.stderr + self.io.warn('{} returned error: {}'.format(configured_name, output.decode('utf-8'))) exit_status = False - hooks_out = self._notify_hooks(hooks, exit_status) + except subprocess.TimeoutExpired as e: + output = b'Timed out: ' + + if e.output: + output += e.output + + if e.stderr: + output += e.stderr + + self.io.error('{} timed out and returned: {}'.format(configured_name, output.decode('utf-8'))) + exit_status = False + + self.io.debug('Execution finished, running hooks...') + hooks_out = self._notify_hooks(hooks, exit_status, self.io) + + return ExecutedCheckResult( + output=output.decode('utf-8'), + exit_status=exit_status, + hooks_output=hooks_out, + configured_name=configured_name + ) + + def run_checks(self, enabled_configs: list) -> None: + """ + Runs checks one-by-one and saves to cache + + :param enabled_configs: List of enabled configuration files (json files) + :return: + """ + + for config_name in enabled_configs: + result = None + config = self.config_loader.load(config_name) + + if not result: + result = self.run_single_check(config_name, config['type'], config['input'], config.get('hooks', {})) + self.repository.push_to_cache(config_name, result) + + if self.wait_time > 0: + time.sleep(self.wait_time) + + def get_checks_results(self, enabled_configs: list) -> ExecutedChecksResultList: + """ + Get results previously generated by runner - return output.decode('utf-8'), exit_status, hooks_out + :param enabled_configs: + :return: + """ + + results = ExecutedChecksResultList() + + for config_name in enabled_configs: + result = self.repository.retrieve_from_cache(config_name) + + if not result: + result = ExecutedCheckResult.from_not_ready(configured_name=config_name) + + results.add(config_name, result) + + return results + + @staticmethod + def _add_environment_variables(env: dict, check_name: str): + if is_rkd_check(check_name): + env = add_rkd_environment_variables(env, check_name) + + return env + + @staticmethod + def _append_commandline_switches(input_data: dict, bin_path: list) -> list: + """ + Inject commandline switches + + :param input_data: + :param bin_path: + :return: + """ + + for name, value in input_data.items(): + name: str + + if name.startswith('--'): + bin_path.append(name + '=' + str(value)) + + elif name.startswith('-'): + bin_path.append(name) + bin_path.append(str(value)) + + return bin_path @staticmethod def _prepare_data(check_name: str, input_data: dict): @@ -39,17 +171,24 @@ def _prepare_data(check_name: str, input_data: dict): output_data = {} for key, value in input_data.items(): + key: str + if type(value) == dict or type(value) == list: value = json.dumps(value) + if key.startswith('-'): + continue + output_data[key.upper()] = Runner._inject_variables(check_name, str(value)) return output_data @staticmethod def _inject_variables(check_name: str, value: str) -> str: - """ Inject variables, including environment variables from host, - to allow for example secure passing of passwords""" + """ + Inject variables, including environment variables from host, + to allow for example secure passing of passwords + """ matches = re.findall(r'\${([A-Za-z0-9_.]+)\}', value) @@ -59,6 +198,7 @@ def _inject_variables(check_name: str, value: str) -> str: variables = { 'checkName': check_name, 'date': datetime.now().isoformat(), + 'timestamp': str(datetime.now().timestamp()) } for env_name, env_value in os.environ.items(): @@ -66,22 +206,24 @@ def _inject_variables(check_name: str, value: str) -> str: for match in matches: if match not in variables: - raise Exception('Invalid variable "%s" in check %s. Available variables: %s' % - (match, check_name, str(variables.keys()))) + raise RunnerException.from_invalid_variable_error(match, check_name, variables) value = value.replace('${%s}' % match, variables[match]) return value - def _get_check_path(self, check_name: str): + def _get_check_path(self, check_name: str) -> list: + if is_rkd_check(check_name): + return prepare_rkd_check_bin_path(check_name) + for path in self.paths: if os.path.isfile(path + '/' + check_name): - return path + '/' + check_name + return [path + '/' + check_name] - raise Exception('Healthcheck executable "' + check_name + '" does not exist') + raise RunnerException.from_non_existing_executable(check_name) @staticmethod - def _notify_hooks(hooks: dict, exit_status: bool) -> str: + def _notify_hooks(hooks: dict, exit_status: bool, io: IO) -> str: mapping = { True: 'on_each_up', False: 'on_each_down' @@ -93,9 +235,23 @@ def _notify_hooks(hooks: dict, exit_status: bool) -> str: commands = hooks[mapping[exit_status]] if type(commands).__name__ != 'list': - raise Exception('Expected a LIST of hooks in "' + mapping[exit_status] + '"') + raise RunnerException.from_expected_list_of_hooks(mapping[exit_status]) for command in commands: - out += subprocess.check_output(command, shell=True).decode('utf-8').strip() + io.debug('Triggering hook command "{}"'.format(command)) + + try: + out += subprocess.check_output(command, shell=True, timeout=1800).decode('utf-8').strip() + + except subprocess.CalledProcessError as e: + io.error('Cannot execute hook command "{cmd}". Error: {err}'.format( + cmd=command, err=str(e.output) + str(e.stderr)) + ) + except subprocess.TimeoutExpired: + io.error('Cannot execute hook command "{cmd}. Timed out while executing command"'.format( + cmd=command) + ) + except Exception: + io.error('Cannot execute hook command "{cmd}. Unknown error"'.format(cmd=command)) return out diff --git a/infracheck/infracheck/scheduler.py b/infracheck/infracheck/scheduler.py new file mode 100644 index 0000000..4e89b52 --- /dev/null +++ b/infracheck/infracheck/scheduler.py @@ -0,0 +1,45 @@ + +""" +Scheduler +========= + +Schedules execution of health checks verification every X seconds. +""" +import time +from threading import Thread +from traceback import format_exc +from .repository import Repository +from .runner import Runner +from rkd.api.inputoutput import IO + + +class Scheduler(object): + runner: Runner + thread: Thread + repository: Repository + io: IO + + def __init__(self, runner: Runner, repository: Repository, io: IO): + self.runner = runner + self.repository = repository + self.io = io + + def schedule_jobs_in_background(self, every_seconds: int): + self.thread = Thread(target=self._run_checks_infinitely, args=(self, every_seconds)) + self.thread.setDaemon(True) + self.thread.start() + + def _run_checks_infinitely(self, scheduler, every_seconds: int): + while True: + try: + configured_checks = self.repository.get_configured_checks(with_disabled=False) + + self.io.info('Running {} checks...'.format(len(configured_checks))) + self.runner.run_checks(configured_checks) + + except Exception: + self.io.error('Exception happened during processing') + self.io.error(format_exc()) + + self.io.debug('Sleeping {}s'.format(every_seconds)) + time.sleep(every_seconds) diff --git a/infracheck/infracheck/server.py b/infracheck/infracheck/server.py deleted file mode 100644 index bafa0e9..0000000 --- a/infracheck/infracheck/server.py +++ /dev/null @@ -1,49 +0,0 @@ - -import tornado.ioloop -import tornado.web -import json - - -class MainHandler(tornado.web.RequestHandler): # pragma: no cover - app = None - wait_time = 0 - lazy = False - force = False - - def get(self): - result = self.app.perform_checks(force=self.force, wait_time=self.wait_time, lazy=self.lazy) - self.set_status(500 if not result['global_status'] else 200) - self.add_header('Content-Type', 'application/json') - self.write( - json.dumps(result, sort_keys=True, indent=4, separators=(',', ': ')) - ) - - def data_received(self, chunk): - pass - - -class HttpServer: - app = None - port: int - path_prefix: str - wait_time: int - lazy: bool - force: bool - - def __init__(self, app, port: int, server_path_prefix: str, wait_time: int, lazy: bool, force: bool): - self.app = app - self.port = port - self.path_prefix = server_path_prefix - self.wait_time = wait_time - self.lazy = lazy - self.force = force - - def run(self): - MainHandler.app = self.app - MainHandler.wait_time = self.wait_time - MainHandler.lazy = self.lazy - MainHandler.force = self.force - - srv = tornado.web.Application([(r"" + self.path_prefix + "/", MainHandler)]) - srv.listen(self.port) - tornado.ioloop.IOLoop.current().start() diff --git a/infracheck/infracheck/versioning.py b/infracheck/infracheck/versioning.py new file mode 100644 index 0000000..8a82c4d --- /dev/null +++ b/infracheck/infracheck/versioning.py @@ -0,0 +1,5 @@ +import pkg_resources + + +def get_version(): + return pkg_resources.get_distribution('infracheck').version diff --git a/requirements.txt b/requirements.txt index d3add8a..d75662b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -9,5 +9,5 @@ whois >= 0.9.4, < 1 influxdb >= 5.3.1, < 6 msgpack >= 1.0, < 2 -rkd>=2.2, <3 -rkd-python>=2.2, <3 +rkd>=2.3.3, <3 +rkd-python>=2.3.3, <3 diff --git a/setup.py b/setup.py old mode 100644 new mode 100755 diff --git a/tests/functional_test_config.py b/tests/functional_test_config.py new file mode 100644 index 0000000..c20a53a --- /dev/null +++ b/tests/functional_test_config.py @@ -0,0 +1,34 @@ +import os +import sys +import inspect +from rkd.api.testing import BasicTestingCase + + +TESTS_PATH = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) + '/../' +sys.path.insert(0, TESTS_PATH) + +from infracheck.infracheck.config import ConfigLoader + + +class ConfigTest(BasicTestingCase): + def test_load_finds_file_successfully(self): + """ + Loads a check configuration with a success + """ + + loader = ConfigLoader([TESTS_PATH + '/example/healthchecks', TESTS_PATH + '/infracheck']) + check = loader.load('ram') + + self.assertEqual({'type': 'free-ram', 'input': {'max_ram_percentage': '85'}}, check) + + def test_load_does_not_find_file(self): + loader = ConfigLoader([TESTS_PATH + '/example/healthchecks', TESTS_PATH + '/infracheck']) + + self.assertRaises(FileNotFoundError, lambda: loader.load('not-existing')) + + def test_assert_valid_format(self): + with self.subTest('Success case'): + ConfigLoader._assert_valid_format('Durruti', {'type': 'curl'}) + + with self.subTest('Failure case - missing "type" attribute'): + self.assertRaises(Exception, lambda: ConfigLoader._assert_valid_format('Some', {})) diff --git a/tests/functional_test_ssh_command_check.py b/tests/functional_test_ssh_command_check.py index 6bdb70a..2e03172 100644 --- a/tests/functional_test_ssh_command_check.py +++ b/tests/functional_test_ssh_command_check.py @@ -11,12 +11,9 @@ class SshCommandCheckTest(SSHServerContainerRequirement, unittest.TestCase): docker_client: docker.DockerClient def test_fingerprint_will_be_fetched_first_time(self): - stdout: str - result: int - known_hosts_file = NamedTemporaryFile(delete=False) - stdout, result, hooks_output = run_check('ssh-command', { + result = run_check('ssh-command', { 'HOST': 'localhost', 'PORT': 3222, 'USER': 'root', @@ -27,16 +24,13 @@ def test_fingerprint_will_be_fetched_first_time(self): }, {}) os.unlink(known_hosts_file.name) - self.assertTrue(result) + self.assertTrue(result.exit_status) def test_not_passed_host_raises_human_readable_message(self): - stdout: str - result: int - - stdout, result, hooks_output = run_check('ssh-command', {}, {}) + result = run_check('ssh-command', {}, {}) - self.assertIn('HOST is mandatory', stdout) - self.assertFalse(result) + self.assertIn('HOST is mandatory', result.output.strip()) + self.assertFalse(result.exit_status) def test_success_case(self): """ @@ -44,10 +38,7 @@ def test_success_case(self): :return: """ - stdout: str - result: int - - stdout, result, hooks_output = run_check('ssh-command', { + result = run_check('ssh-command', { 'HOST': 'localhost', 'PORT': 3222, 'USER': 'root', @@ -58,14 +49,11 @@ def test_success_case(self): 'UNEXPECTED_KEYWORDS': 'Darwin' }, {}) - self.assertEqual('OK', stdout.strip()) - self.assertTrue(result) + self.assertEqual('OK', result.output.strip()) + self.assertTrue(result.exit_status) def test_invalid_password(self): - stdout: str - result: int - - stdout, result, hooks_output = run_check('ssh-command', { + result = run_check('ssh-command', { 'HOST': 'localhost', 'PORT': 3222, 'USER': 'root', @@ -73,5 +61,5 @@ def test_invalid_password(self): 'SSH_OPTS': '-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null' }, {}) - self.assertIn('Permission denied, please try again.', stdout) - self.assertFalse(result) + self.assertIn('Permission denied, please try again.', result.output.strip()) + self.assertFalse(result.exit_status) diff --git a/tests/functional_test_ssh_files_checksum_check.py b/tests/functional_test_ssh_files_checksum_check.py index e4ef1d1..edec414 100644 --- a/tests/functional_test_ssh_files_checksum_check.py +++ b/tests/functional_test_ssh_files_checksum_check.py @@ -9,19 +9,13 @@ class SshCommandCheckTest(SSHServerContainerRequirement, unittest.TestCase): docker_client: docker.DockerClient def test_not_passed_host_raises_human_readable_message(self): - stdout: str - result: int + result = run_check('ssh-files-checksum', {}, {}) - stdout, result, hooks_output = run_check('ssh-files-checksum', {}, {}) - - self.assertIn('HOST is mandatory', stdout) - self.assertFalse(result) + self.assertIn('HOST is mandatory', result.output.strip()) + self.assertFalse(result.exit_status) def test_success_case(self): - stdout: str - result: int - - stdout, result, hooks_output = run_check('ssh-files-checksum', { + result = run_check('ssh-files-checksum', { 'HOST': 'localhost', 'PORT': 3222, 'USER': 'root', @@ -32,14 +26,11 @@ def test_success_case(self): } }, {}) - self.assertEqual('All checksums are matching', stdout.strip()) - self.assertTrue(result) + self.assertEqual('All checksums are matching', result.output.strip()) + self.assertTrue(result.exit_status) def test_at_least_one_checksum_not_matching(self): - stdout: str - result: int - - stdout, result, hooks_output = run_check('ssh-files-checksum', { + result = run_check('ssh-files-checksum', { 'HOST': 'localhost', 'PORT': 3222, 'USER': 'root', @@ -51,5 +42,5 @@ def test_at_least_one_checksum_not_matching(self): } }, {}) - self.assertIn("FAIL: '/bin/sh' checksum is not matching. Expected: 'will-not-match-this-one'", stdout.strip()) - self.assertFalse(result) + self.assertIn("FAIL: '/bin/sh' checksum is not matching. Expected: 'will-not-match-this-one'", result.output.strip()) + self.assertFalse(result.exit_status) diff --git a/tests/functional_test_ssh_fingerprint_check.py b/tests/functional_test_ssh_fingerprint_check.py index e21729e..15427c1 100644 --- a/tests/functional_test_ssh_fingerprint_check.py +++ b/tests/functional_test_ssh_fingerprint_check.py @@ -9,42 +9,33 @@ class SshFingerprintTest(SSHServerContainerRequirement, unittest.TestCase): docker_client: docker.DockerClient def test_success_case(self): - stdout: str - result: int - # we create SSH server for testing dynamically in a docker container, so each time it has a different identity current_expected_fingerprint = self.get_current_ssh_server_fingerprint() - stdout, result, hooks_output = run_check('ssh-fingerprint', { + result = run_check('ssh-fingerprint', { 'HOST': 'localhost', 'PORT': 3222, 'EXPECTED_FINGERPRINT': current_expected_fingerprint }, {}) - self.assertEqual('Fingerprint is OK', stdout.strip()) - self.assertTrue(result) + self.assertEqual('Fingerprint is OK', result.output.strip()) + self.assertTrue(result.exit_status) def test_invalid_fingerprint(self): - stdout: str - result: int - - stdout, result, hooks_output = run_check('ssh-fingerprint', { + result = run_check('ssh-fingerprint', { 'HOST': 'localhost', 'PORT': 3222, 'EXPECTED_FINGERPRINT': 'SOME FINGERPRINT THAT DOES NOT MATCH SERVER FINGERPRINT' }, {}) - self.assertIn('Fingerprint does not match', stdout.strip()) - self.assertFalse(result) + self.assertIn('Fingerprint does not match', result.output.strip()) + self.assertFalse(result.exit_status) def test_missing_host_parameter(self): - stdout: str - result: int - - stdout, result, hooks_output = run_check('ssh-fingerprint', { + result = run_check('ssh-fingerprint', { 'PORT': 3222, 'EXPECTED_FINGERPRINT': 'SOME FINGERPRINT THAT DOES NOT MATCH SERVER FINGERPRINT' }, {}) - self.assertIn('You need to provide a HOST', stdout.strip()) - self.assertFalse(result) + self.assertIn('You need to provide a HOST', result.output.strip()) + self.assertFalse(result.exit_status) diff --git a/tests/unit_test_controller.py b/tests/unit_test_controller.py index 19676fd..53a8a44 100644 --- a/tests/unit_test_controller.py +++ b/tests/unit_test_controller.py @@ -1,3 +1,4 @@ +import tempfile import unittest import sys import os @@ -142,30 +143,32 @@ def provide_data(): @data_provider(provide_data) def test_simply_perform_checks(self, config: dict, expected_result: bool, expected_ident: str, expected_hooks_output: str): - controller = Controller( - project_dir=path, - server_port=8000, - server_path_prefix='', - db_path='/tmp/.infracheck.sqlite3', - wait_time=0, - lazy=True, - force=True - ) - # mocks - controller.list_enabled_configs = get_enabled_configs_mock - controller.config_loader.load = mock.Mock() + with tempfile.NamedTemporaryFile() as f: + controller = Controller( + project_dir=path, + server_port=8000, + server_path_prefix='', + db_path=f.name, + wait_time=0, + timeout=60, + log_level='info' + ) - with mock.patch.object(controller.config_loader, 'load', return_value=config): - performed = controller.perform_checks(force=True, lazy=True) + # mocks + controller.list_enabled_configs = get_enabled_configs_mock + controller.config_loader.load = mock.Mock() - self.assertEqual(expected_result, performed['checks']['example-check']['status']) + with mock.patch.object(controller.config_loader, 'load', return_value=config): + performed = controller.perform_checks() + + self.assertEqual(expected_result, performed.to_hash()['checks']['example-check']['status']) # ident - important for monitoring - self.assertEqual(expected_ident, performed['checks']['example-check']['ident']) + self.assertEqual(expected_ident, performed.to_hash()['checks']['example-check']['ident']) # hooks for notifying - self.assertEqual(expected_hooks_output, performed['checks']['example-check']['hooks_output']) + self.assertEqual(expected_hooks_output, performed.to_hash()['checks']['example-check']['hooks_output']) def get_enabled_configs_mock(): diff --git a/tests/unit_test_executed_checks_result_list.py b/tests/unit_test_executed_checks_result_list.py new file mode 100644 index 0000000..ebbc092 --- /dev/null +++ b/tests/unit_test_executed_checks_result_list.py @@ -0,0 +1,67 @@ +import sys +import os +import inspect +from rkd.api.testing import BasicTestingCase + +path = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) + '/../' +sys.path.insert(0, path) + +from infracheck.infracheck.model import ExecutedChecksResultList, ExecutedCheckResult + + +class ExecutedChecksResultListTest(BasicTestingCase): + def test_is_global_status_success(self): + """ + Checks if global status of the endpoint works as expected in basing on checks results + """ + + with self.subTest('One check is failing, then global status is failure'): + results = ExecutedChecksResultList() + results.add('first', ExecutedCheckResult( + configured_name='first', + output='Test', + exit_status=False, + hooks_output='' + )) + results.add('second', ExecutedCheckResult( + configured_name='second', + output='Test', + exit_status=True, + hooks_output='' + )) + + self.assertFalse(results.is_global_status_success()) + + with self.subTest('All checks are passing, then we have a success'): + results = ExecutedChecksResultList() + results.add('first', ExecutedCheckResult( + configured_name='first', + output='Test', + exit_status=True, + hooks_output='' + )) + results.add('second', ExecutedCheckResult( + configured_name='second', + output='Test', + exit_status=True, + hooks_output='' + )) + + self.assertTrue(results.is_global_status_success()) + + with self.subTest('All checks are failing, then we have a failure'): + results = ExecutedChecksResultList() + results.add('first', ExecutedCheckResult( + configured_name='first', + output='Test', + exit_status=False, + hooks_output='' + )) + results.add('second', ExecutedCheckResult( + configured_name='second', + output='Test', + exit_status=False, + hooks_output='' + )) + + self.assertFalse(results.is_global_status_success()) diff --git a/tests/unit_test_model_executed_check_result.py b/tests/unit_test_model_executed_check_result.py new file mode 100644 index 0000000..b8e2dae --- /dev/null +++ b/tests/unit_test_model_executed_check_result.py @@ -0,0 +1,38 @@ +import sys +import os +import inspect +from datetime import datetime + +from rkd.api.testing import BasicTestingCase + +path = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) + '/../' +sys.path.insert(0, path) + +from infracheck.infracheck.model import ExecutedCheckResult + + +class ExecutedCheckResultTest(BasicTestingCase): + def test_from_not_ready(self): + result = ExecutedCheckResult.from_not_ready('Durruti') + + self.assertEqual(False, result.exit_status) + self.assertIsNone(result.refresh_time) + + def test_to_hash(self): + check = ExecutedCheckResult( + configured_name='Durruti', + output='Viva la revolution!', + exit_status=True, + hooks_output='A las barricadas!' + ) + + check.refresh_time = datetime(2020, 11, 27, 23, 40, 18) # mock the time + as_hash: dict = check.to_hash() + + self.assertEqual({ + 'checked_at': '2020-11-27 23-40-18', + 'hooks_output': 'A las barricadas!', + 'ident': 'Durruti=True', + 'output': 'Viva la revolution!', + 'status': True + }, as_hash) diff --git a/tests/unit_test_repository.py b/tests/unit_test_repository.py index 3219030..18386cb 100644 --- a/tests/unit_test_repository.py +++ b/tests/unit_test_repository.py @@ -17,7 +17,7 @@ def test_returns_all_checks(self): self.assertEqual( sorted([ 'is_dd_accessible', 'docker-health', 'domain-expiration', 'some_port_is_open', - 'disk-space', 'hello-test-custom-check-example', 'ram']), + 'disk-space', 'hello-test-custom-check-example', 'ram', 'rkd-sh']), sorted(repository.get_configured_checks(with_disabled=False)) ) diff --git a/tests/unit_test_rkd_support.py b/tests/unit_test_rkd_support.py new file mode 100644 index 0000000..0b47f93 --- /dev/null +++ b/tests/unit_test_rkd_support.py @@ -0,0 +1,57 @@ +import unittest +import sys +import os +import inspect + +TESTS_PATH = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) + '/../' +sys.path.insert(0, TESTS_PATH) + +from infracheck.infracheck.rkd_support import * + + +class RkdSupportTest(unittest.TestCase): + def test_is_rkd_check(self): + """ + Checks if RKD "url" has correct format + """ + + with self.subTest('Empty url'): + self.assertFalse(is_rkd_check('')) + + with self.subTest('Empty url - only schema present'): + self.assertFalse(is_rkd_check('rkd://')) + + with self.subTest('Url with only import name, without task name'): + self.assertFalse(is_rkd_check('rkd://rkd.standardlib.shell')) + + with self.subTest('Fully complete url'): + self.assertTrue(is_rkd_check('rkd://rkd.standardlib.shell:sh')) + + def test_split_rkd_path(self): + """ + Splits URL into module and task names + """ + + self.assertEqual( + ('rkd.standardlib.shell', ':test'), + split_rkd_path('rkd://rkd.standardlib.shell:test') + ) + + def test_prepare_rkd_check_bin_path(self): + self.assertIn( + "'-m', 'rkd', '--imports', 'rkd.standardlib.shell', ':test'", + str(prepare_rkd_check_bin_path('rkd://rkd.standardlib.shell:test')) + ) + + def test_rkd_module_exists(self): + with self.subTest('Success case - module and task are found'): + self.assertTrue(rkd_module_exists('rkd://rkd.standardlib.shell:sh')) + + with self.subTest('Failure case - module not found'): + self.assertFalse(rkd_module_exists('rkd://rkd.standardlib.not_existing_name:sh')) + + with self.subTest('Failure case - task not found in module'): + self.assertFalse(rkd_module_exists('rkd://rkd.standardlib.shell:non-existing-task')) + + def test_add_rkd_environment_variables(self): + self.assertEqual({'RKD_UI': 'false'}, add_rkd_environment_variables({}, 'test')) diff --git a/tests/unit_test_runner.py b/tests/unit_test_runner.py index 7912968..41e56c5 100644 --- a/tests/unit_test_runner.py +++ b/tests/unit_test_runner.py @@ -4,17 +4,16 @@ import inspect from unittest_data_provider import data_provider -path = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) + '/../' -sys.path.insert(0, path) +TESTS_PATH = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) + '/../' +sys.path.insert(0, TESTS_PATH) -try: - from .infracheck.infracheck.runner import Runner -except ImportError as e: - from infracheck.infracheck.runner import Runner +from infracheck.infracheck.runner import Runner +from infracheck.infracheck.config import ConfigLoader +from infracheck.infracheck.repository import Repository +from rkd.api.inputoutput import IO class RunnerTest(unittest.TestCase): - def provide_data(): return [ # url does not exist: will return False @@ -49,14 +48,26 @@ def provide_data(): @data_provider(provide_data) def test_run_with_hooks(self, check_type: str, input_data: dict, hooks: dict, expected_result: bool, expected_text: str): - runner = Runner([path + '/../example/healthchecks', path + '/infracheck/']) - out = runner.run(check_type, input_data, hooks) + result = self._create_runner().run_single_check('some-check-name', check_type, input_data, hooks) - self.assertEqual(expected_result, out[1]) - self.assertEqual(expected_text, out[2]) + self.assertEqual(expected_result, result.exit_status) + self.assertEqual(expected_text, result.hooks_output) def test_injects_variables(self): - runner = Runner([path + '/../example/healthchecks', path + '/infracheck/']) - out = runner.run('printr', {'message': 'Current user is ${ENV.USER}, running a ${checkName}'}, {}) + out = self._create_runner().run_single_check( + configured_name='some-check-name', + check_name='printr', + input_data={'message': 'Current user is ${ENV.USER}, running a ${checkName}'}, + hooks={} + ) + + self.assertEqual('Current user is ' + os.environ['USER'] + ', running a printr', out.output.strip()) + + @staticmethod + def _create_runner() -> Runner: + dirs = [TESTS_PATH + '/../example/healthchecks', TESTS_PATH + '/infracheck/'] - self.assertEqual('Current user is ' + os.environ['USER'] + ', running a printr', out[0].strip()) + return Runner(dirs, + config_loader=ConfigLoader(dirs), + repository=Repository(dirs), + io=IO()) diff --git a/tests/utils/__init__.py b/tests/utils/__init__.py index 07ba0c3..21334ae 100644 --- a/tests/utils/__init__.py +++ b/tests/utils/__init__.py @@ -7,8 +7,18 @@ sys.path.insert(0, path) from infracheck.infracheck.runner import Runner +from infracheck.infracheck.config import ConfigLoader +from infracheck.infracheck.repository import Repository +from infracheck.infracheck.model import ExecutedCheckResult +from rkd.api.inputoutput import IO -def run_check(check_type: str, input_data: dict, hooks: dict): - runner = Runner([path + '/../../example/healthchecks', path + '/../infracheck/']) - return runner.run(check_type, input_data, hooks) +def run_check(check_type: str, input_data: dict, hooks: dict) -> ExecutedCheckResult: + project_dirs = [path + '/../../example/healthchecks', path + '/../infracheck/'] + + runner = Runner(project_dirs, + config_loader=ConfigLoader(project_dirs), + repository=Repository(project_dirs), + io=IO()) + + return runner.run_single_check('example-check', check_type, input_data, hooks)