diff --git a/.github/CHANGELOG.md b/.github/CHANGELOG.md index 64a0697ece..344ad10e0f 100644 --- a/.github/CHANGELOG.md +++ b/.github/CHANGELOG.md @@ -2,7 +2,10 @@ [**Upgrade Guide**](https://intelowl.readthedocs.io/en/latest/Installation.md#update-to-the-most-recent-version) -## [v6.0.2](https://github.com/intelowlproject/IntelOwl/releases/tag/v6.0.1) +## [v6.0.4](https://github.com/intelowlproject/IntelOwl/releases/tag/v6.0.4) +Mostly adjusts and fixes with few new analyzers: Vulners and AILTypoSquatting Library. + +## [v6.0.2](https://github.com/intelowlproject/IntelOwl/releases/tag/v6.0.2) Major fixes and adjustments. We improved the documentation to help the transition to the new major version. We added **Pivot** buttons to enable manual Pivoting from an Observable/File analysis to another. See [Doc](https://intelowl.readthedocs.io/en/latest/Usage.html#pivots) for more info diff --git a/.github/workflows/pull_request_automation.yml b/.github/workflows/pull_request_automation.yml index 2de2556c45..55fc2e5e3a 100644 --- a/.github/workflows/pull_request_automation.yml +++ b/.github/workflows/pull_request_automation.yml @@ -87,7 +87,6 @@ jobs: BUILDKIT_PROGRESS: "plain" STAGE: "ci" REPO_DOWNLOADER_ENABLED: false - WATCHMAN: false - name: Startup script launch (Fast) if: "!contains(github.base_ref, 'master')" @@ -98,7 +97,6 @@ jobs: BUILDKIT_PROGRESS: "plain" STAGE: "ci" REPO_DOWNLOADER_ENABLED: false - WATCHMAN: false - name: Docker debug if: always() diff --git a/.gitignore b/.gitignore index 1ff56b270c..978b5a8152 100644 --- a/.gitignore +++ b/.gitignore @@ -50,3 +50,6 @@ coverage.xml *.cover .hypothesis/ /.env + +# post run dev +integrations/malware_tools_analyzers/clamav/sigs \ No newline at end of file diff --git a/api_app/analyzers_manager/file_analyzers/detectiteasy.py b/api_app/analyzers_manager/file_analyzers/detectiteasy.py new file mode 100644 index 0000000000..3ac5e35e49 --- /dev/null +++ b/api_app/analyzers_manager/file_analyzers/detectiteasy.py @@ -0,0 +1,60 @@ +import logging + +from api_app.analyzers_manager.classes import DockerBasedAnalyzer, FileAnalyzer +from tests.mock_utils import MockUpResponse + +logger = logging.getLogger(__name__) + + +class DetectItEasy(FileAnalyzer, DockerBasedAnalyzer): + name: str = "executable_analyzer" + url: str = "http://malware_tools_analyzers:4002/die" + # http request polling max number of tries + max_tries: int = 10 + # interval between http request polling (in secs) + poll_distance: int = 1 + + def update(self): + pass + + def run(self): + fname = str(self.filename).replace("/", "_").replace(" ", "_") + # get the file to send + binary = self.read_file_bytes() + args = [f"@{fname}", "--json"] + req_data = { + "args": args, + } + req_files = {fname: binary} + logger.info( + f"Running {self.analyzer_name} on {self.filename} with args: {args}" + ) + report = self._docker_run(req_data, req_files, analyzer_name=self.analyzer_name) + if not report: + self.report.errors.append("DIE did not detect the file type") + return {} + return report + + @staticmethod + def mocked_docker_analyzer_get(*args, **kwargs): + return MockUpResponse( + { + "report": { + "arch": "NOEXEC", + "mode": "Unknown", + "type": "Unknown", + "detects": [ + { + "name": "Zip", + "type": "archive", + "string": "archive: Zip(2.0)[38.5%,1 file]", + "options": "38.5%,1 file", + "version": "2.0", + } + ], + "filetype": "Binary", + "endianess": "LE", + } + }, + 200, + ) diff --git a/api_app/analyzers_manager/file_analyzers/malprob.py b/api_app/analyzers_manager/file_analyzers/malprob.py new file mode 100644 index 0000000000..174a53b183 --- /dev/null +++ b/api_app/analyzers_manager/file_analyzers/malprob.py @@ -0,0 +1,79 @@ +import logging + +import requests + +from api_app.analyzers_manager.classes import FileAnalyzer +from api_app.analyzers_manager.exceptions import AnalyzerRunException +from tests.mock_utils import MockUpResponse, if_mock_connections, patch + +logger = logging.getLogger(__name__) + + +class MalprobScan(FileAnalyzer): + url: str = "https://malprob.io/api" + private: bool = False + timeout: int = 60 + _api_key_name: str + + def update(self): + pass + + def run(self): + file_name = str(self.filename).replace("/", "_").replace(" ", "_") + headers = {"Authorization": f"Token {self._api_key_name}"} + binary_file = self.read_file_bytes() + + if self._job.tlp == self._job.TLP.CLEAR.value: + logger.info(f"uploading {file_name}:{self.md5} to MalProb.io for analysis") + scan = requests.post( + f"{self.url}/scan/", + files={"file": binary_file}, + data={"name": file_name, "private": self.private}, + headers=headers, + timeout=self.timeout, + ) + scan.raise_for_status() + if scan.status_code == 204: + self.disable_for_rate_limit() + raise AnalyzerRunException("Limit reached for API") + elif scan.status_code == 302: + logger.info( + f"status 302: file already exists | Rescanning the file: {self.md5}" + ) + else: + return scan.json() + + logger.info(f"rescanning {file_name} using {self.md5} on MalProb.io") + rescan = requests.post( + f"{self.url}/rescan/", + data={"hashcode": self.md5}, + headers=headers, + timeout=self.timeout, + ) + rescan.raise_for_status() + if rescan.status_code == 204: + self.disable_for_rate_limit() + raise AnalyzerRunException("Limit reached for API") + return rescan.json() + + @classmethod + def _monkeypatch(cls): + patches = [ + if_mock_connections( + patch( + "requests.post", + return_value=MockUpResponse( + { + "report": { + "md5": "8a05a189e58ccd7275f7ffdf88c2c191", + "sha1": "a7a70f2f482e6b26eedcf1781b277718078c743a", + "sha256": """ac24043d48dadc390877a6151515565b1fdc1da + b028ee2d95d80bd80085d9376""", + }, + }, + 200, + ), + ), + ) + ] + return super()._monkeypatch(patches=patches) diff --git a/api_app/analyzers_manager/migrations/0091_analyzer_config_vulners.py b/api_app/analyzers_manager/migrations/0091_analyzer_config_vulners.py new file mode 100644 index 0000000000..98efca1de6 --- /dev/null +++ b/api_app/analyzers_manager/migrations/0091_analyzer_config_vulners.py @@ -0,0 +1,235 @@ +from django.db import migrations +from django.db.models.fields.related_descriptors import ( + ForwardManyToOneDescriptor, + ForwardOneToOneDescriptor, + ManyToManyDescriptor, +) + +plugin = { + "python_module": { + "health_check_schedule": { + "minute": "0", + "hour": "0", + "day_of_week": "*", + "day_of_month": "*", + "month_of_year": "*", + }, + "update_schedule": None, + "module": "vulners.Vulners", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + "name": "Vulners", + "description": "[Vulners](vulners.com) is the most complete and the only fully correlated security intelligence database, which goes through constant updates and links 200+ data sources in a unified machine-readable format. It contains 8 mln+ entries, including CVEs, advisories, exploits, and IoCs — everything you need to stay abreast on the latest security threats.", + "disabled": False, + "soft_time_limit": 60, + "routing_key": "default", + "health_check_status": True, + "type": "observable", + "docker_based": False, + "maximum_tlp": "AMBER", + "observable_supported": ["generic"], + "supported_filetypes": [], + "run_hash": False, + "run_hash_type": "", + "not_supported_filetypes": [], + "model": "analyzers_manager.AnalyzerConfig", +} + +params = [ + { + "python_module": { + "module": "vulners.Vulners", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + "name": "score_AI", + "type": "bool", + "description": "Score any vulnerability with Vulners AI.\r\nDefault: False", + "is_secret": False, + "required": False, + }, + { + "python_module": { + "module": "vulners.Vulners", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + "name": "api_key_name", + "type": "str", + "description": "api key for vulners", + "is_secret": True, + "required": True, + }, + { + "python_module": { + "module": "vulners.Vulners", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + "name": "skip", + "type": "int", + "description": "skip parameter for vulners analyzer", + "is_secret": False, + "required": False, + }, + { + "python_module": { + "module": "vulners.Vulners", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + "name": "size", + "type": "int", + "description": "size parameter for vulners analyzer", + "is_secret": False, + "required": False, + }, +] + +values = [ + { + "parameter": { + "python_module": { + "module": "vulners.Vulners", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + "name": "score_AI", + "type": "bool", + "description": "Score any vulnerability with Vulners AI.\r\nDefault: False", + "is_secret": False, + "required": False, + }, + "analyzer_config": "Vulners", + "connector_config": None, + "visualizer_config": None, + "ingestor_config": None, + "pivot_config": None, + "for_organization": False, + "value": False, + "updated_at": "2024-05-22T18:49:52.056060Z", + "owner": None, + }, + { + "parameter": { + "python_module": { + "module": "vulners.Vulners", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + "name": "skip", + "type": "int", + "description": "skip parameter for vulners analyzer", + "is_secret": False, + "required": False, + }, + "analyzer_config": "Vulners", + "connector_config": None, + "visualizer_config": None, + "ingestor_config": None, + "pivot_config": None, + "for_organization": False, + "value": 0, + "updated_at": "2024-05-23T06:45:24.105426Z", + "owner": None, + }, + { + "parameter": { + "python_module": { + "module": "vulners.Vulners", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + "name": "size", + "type": "int", + "description": "size parameter for vulners analyzer", + "is_secret": False, + "required": False, + }, + "analyzer_config": "Vulners", + "connector_config": None, + "visualizer_config": None, + "ingestor_config": None, + "pivot_config": None, + "for_organization": False, + "value": 5, + "updated_at": "2024-05-23T06:45:24.109831Z", + "owner": None, + }, +] + + +def _get_real_obj(Model, field, value): + def _get_obj(Model, other_model, value): + if isinstance(value, dict): + real_vals = {} + for key, real_val in value.items(): + real_vals[key] = _get_real_obj(other_model, key, real_val) + value = other_model.objects.get_or_create(**real_vals)[0] + # it is just the primary key serialized + else: + if isinstance(value, int): + if Model.__name__ == "PluginConfig": + value = other_model.objects.get(name=plugin["name"]) + else: + value = other_model.objects.get(pk=value) + else: + value = other_model.objects.get(name=value) + return value + + if ( + type(getattr(Model, field)) + in [ForwardManyToOneDescriptor, ForwardOneToOneDescriptor] + and value + ): + other_model = getattr(Model, field).get_queryset().model + value = _get_obj(Model, other_model, value) + elif type(getattr(Model, field)) in [ManyToManyDescriptor] and value: + other_model = getattr(Model, field).rel.model + value = [_get_obj(Model, other_model, val) for val in value] + return value + + +def _create_object(Model, data): + mtm, no_mtm = {}, {} + for field, value in data.items(): + value = _get_real_obj(Model, field, value) + if type(getattr(Model, field)) is ManyToManyDescriptor: + mtm[field] = value + else: + no_mtm[field] = value + try: + o = Model.objects.get(**no_mtm) + except Model.DoesNotExist: + o = Model(**no_mtm) + o.full_clean() + o.save() + for field, value in mtm.items(): + attribute = getattr(o, field) + if value is not None: + attribute.set(value) + return False + return True + + +def migrate(apps, schema_editor): + Parameter = apps.get_model("api_app", "Parameter") + PluginConfig = apps.get_model("api_app", "PluginConfig") + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + if not Model.objects.filter(name=plugin["name"]).exists(): + exists = _create_object(Model, plugin) + if not exists: + for param in params: + _create_object(Parameter, param) + for value in values: + _create_object(PluginConfig, value) + + +def reverse_migrate(apps, schema_editor): + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + Model.objects.get(name=plugin["name"]).delete() + + +class Migration(migrations.Migration): + atomic = False + dependencies = [ + ("api_app", "0062_alter_parameter_python_module"), + ("analyzers_manager", "0090_analyzer_config_cycat"), + ] + + operations = [migrations.RunPython(migrate, reverse_migrate)] diff --git a/api_app/analyzers_manager/migrations/0092_alter_validin_desc.py b/api_app/analyzers_manager/migrations/0092_alter_validin_desc.py new file mode 100644 index 0000000000..a7476a1a84 --- /dev/null +++ b/api_app/analyzers_manager/migrations/0092_alter_validin_desc.py @@ -0,0 +1,36 @@ +from django.db import migrations + + +def migrate(apps, schema_editor): + AnalyzerConfig = apps.get_model("analyzers_manager", "AnalyzerConfig") + plugin_name = "Validin" + correct_description = "[Validin's](https://app.validin.com) API for threat researchers, teams, and companies to investigate historic and current data describing the structure and composition of the internet." + + try: + plugin = AnalyzerConfig.objects.get(name=plugin_name) + plugin.description = correct_description + plugin.save() + except AnalyzerConfig.DoesNotExist: + pass + + +def reverse_migrate(apps, schema_editor): + AnalyzerConfig = apps.get_model("analyzers_manager", "AnalyzerConfig") + plugin_name = "Validin" + original_description = "(Validin's)[https://app.validin.com/docs] API for threat researchers, teams, and companies to investigate historic and current data describing the structure and composition of the internet." + + try: + plugin = AnalyzerConfig.objects.get(name=plugin_name) + plugin.description = original_description + plugin.save() + except AnalyzerConfig.DoesNotExist: + pass + + +class Migration(migrations.Migration): + atomic = False + + dependencies = [ + ("analyzers_manager", "0091_analyzer_config_vulners"), + ] + operations = [migrations.RunPython(migrate, reverse_migrate)] diff --git a/api_app/analyzers_manager/migrations/0093_analyzer_config_ailtyposquatting.py b/api_app/analyzers_manager/migrations/0093_analyzer_config_ailtyposquatting.py new file mode 100644 index 0000000000..fc7ad9dff1 --- /dev/null +++ b/api_app/analyzers_manager/migrations/0093_analyzer_config_ailtyposquatting.py @@ -0,0 +1,151 @@ +from django.db import migrations +from django.db.models.fields.related_descriptors import ( + ForwardManyToOneDescriptor, + ForwardOneToOneDescriptor, + ManyToManyDescriptor, +) + +plugin = { + "python_module": { + "health_check_schedule": None, + "update_schedule": None, + "module": "ailtyposquatting.AilTypoSquatting", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + "name": "AILTypoSquatting", + "description": "[AILTypoSquatting](https://github.com/typosquatter/ail-typo-squatting) is a Python library to generate list of potential typo squatting domains with domain name permutation engine to feed AIL and other systems.", + "disabled": False, + "soft_time_limit": 60, + "routing_key": "default", + "health_check_status": True, + "type": "observable", + "docker_based": False, + "maximum_tlp": "RED", + "observable_supported": ["domain"], + "supported_filetypes": [], + "run_hash": False, + "run_hash_type": "", + "not_supported_filetypes": [], + "model": "analyzers_manager.AnalyzerConfig", +} + +params = [ + { + "python_module": { + "module": "ailtyposquatting.AilTypoSquatting", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + "name": "dns_resolving", + "type": "bool", + "description": "dns_resolving for AilTypoSquatting; only works for TLP CLEAR", + "is_secret": False, + "required": False, + }, +] +values = [ + { + "parameter": { + "python_module": { + "module": "ailtyposquatting.AilTypoSquatting", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + "name": "dns_resolving", + "type": "bool", + "description": "dns_resolving for AilTypoSquatting; only works for TLP CLEAR", + "is_secret": False, + "required": False, + }, + "analyzer_config": "AILTypoSquatting", + "connector_config": None, + "visualizer_config": None, + "ingestor_config": None, + "pivot_config": None, + "for_organization": False, + "value": False, + "updated_at": "2024-05-26T00:10:15.236358Z", + "owner": None, + }, +] + + +def _get_real_obj(Model, field, value): + def _get_obj(Model, other_model, value): + if isinstance(value, dict): + real_vals = {} + for key, real_val in value.items(): + real_vals[key] = _get_real_obj(other_model, key, real_val) + value = other_model.objects.get_or_create(**real_vals)[0] + # it is just the primary key serialized + else: + if isinstance(value, int): + if Model.__name__ == "PluginConfig": + value = other_model.objects.get(name=plugin["name"]) + else: + value = other_model.objects.get(pk=value) + else: + value = other_model.objects.get(name=value) + return value + + if ( + type(getattr(Model, field)) + in [ForwardManyToOneDescriptor, ForwardOneToOneDescriptor] + and value + ): + other_model = getattr(Model, field).get_queryset().model + value = _get_obj(Model, other_model, value) + elif type(getattr(Model, field)) in [ManyToManyDescriptor] and value: + other_model = getattr(Model, field).rel.model + value = [_get_obj(Model, other_model, val) for val in value] + return value + + +def _create_object(Model, data): + mtm, no_mtm = {}, {} + for field, value in data.items(): + value = _get_real_obj(Model, field, value) + if type(getattr(Model, field)) is ManyToManyDescriptor: + mtm[field] = value + else: + no_mtm[field] = value + try: + o = Model.objects.get(**no_mtm) + except Model.DoesNotExist: + o = Model(**no_mtm) + o.full_clean() + o.save() + for field, value in mtm.items(): + attribute = getattr(o, field) + if value is not None: + attribute.set(value) + return False + return True + + +def migrate(apps, schema_editor): + Parameter = apps.get_model("api_app", "Parameter") + PluginConfig = apps.get_model("api_app", "PluginConfig") + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + if not Model.objects.filter(name=plugin["name"]).exists(): + exists = _create_object(Model, plugin) + if not exists: + for param in params: + _create_object(Parameter, param) + for value in values: + _create_object(PluginConfig, value) + + +def reverse_migrate(apps, schema_editor): + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + Model.objects.get(name=plugin["name"]).delete() + + +class Migration(migrations.Migration): + atomic = False + dependencies = [ + ("api_app", "0062_alter_parameter_python_module"), + ("analyzers_manager", "0092_alter_validin_desc"), + ] + + operations = [migrations.RunPython(migrate, reverse_migrate)] diff --git a/api_app/analyzers_manager/migrations/0094_analyzer_config_detectiteasy.py b/api_app/analyzers_manager/migrations/0094_analyzer_config_detectiteasy.py new file mode 100644 index 0000000000..ce01d55109 --- /dev/null +++ b/api_app/analyzers_manager/migrations/0094_analyzer_config_detectiteasy.py @@ -0,0 +1,185 @@ +from django.db import migrations +from django.db.models.fields.related_descriptors import ( + ForwardManyToOneDescriptor, + ForwardOneToOneDescriptor, + ManyToManyDescriptor, +) + +plugin = { + "python_module": { + "health_check_schedule": None, + "update_schedule": None, + "module": "detectiteasy.DetectItEasy", + "base_path": "api_app.analyzers_manager.file_analyzers", + }, + "name": "DetectItEasy", + "description": "[DetectItEasy](https://github.com/horsicq/Detect-It-Easy) is a program for determining types of files.", + "disabled": False, + "soft_time_limit": 10, + "routing_key": "default", + "health_check_status": True, + "type": "file", + "docker_based": True, + "maximum_tlp": "RED", + "observable_supported": [], + "supported_filetypes": [], + "run_hash": False, + "run_hash_type": "", + "not_supported_filetypes": [], + "model": "analyzers_manager.AnalyzerConfig", +} + +params = [ + { + "python_module": { + "module": "detectiteasy.DetectItEasy", + "base_path": "api_app.analyzers_manager.file_analyzers", + }, + "name": "max_tries", + "type": "int", + "description": "max_tries for detect it easy", + "is_secret": False, + "required": False, + }, + { + "python_module": { + "module": "detectiteasy.DetectItEasy", + "base_path": "api_app.analyzers_manager.file_analyzers", + }, + "name": "poll_distance", + "type": "int", + "description": "poll_distance for detect it easy", + "is_secret": False, + "required": False, + }, +] + +values = [ + { + "parameter": { + "python_module": { + "module": "detectiteasy.DetectItEasy", + "base_path": "api_app.analyzers_manager.file_analyzers", + }, + "name": "max_tries", + "type": "int", + "description": "max_tries for detect it easy", + "is_secret": False, + "required": False, + }, + "analyzer_config": "DetectItEasy", + "connector_config": None, + "visualizer_config": None, + "ingestor_config": None, + "pivot_config": None, + "for_organization": False, + "value": 10, + "updated_at": "2024-06-05T10:38:28.119622Z", + "owner": None, + }, + { + "parameter": { + "python_module": { + "module": "detectiteasy.DetectItEasy", + "base_path": "api_app.analyzers_manager.file_analyzers", + }, + "name": "poll_distance", + "type": "int", + "description": "poll_distance for detect it easy", + "is_secret": False, + "required": False, + }, + "analyzer_config": "DetectItEasy", + "connector_config": None, + "visualizer_config": None, + "ingestor_config": None, + "pivot_config": None, + "for_organization": False, + "value": 1, + "updated_at": "2024-06-05T10:38:28.426691Z", + "owner": None, + }, +] + + +def _get_real_obj(Model, field, value): + def _get_obj(Model, other_model, value): + if isinstance(value, dict): + real_vals = {} + for key, real_val in value.items(): + real_vals[key] = _get_real_obj(other_model, key, real_val) + value = other_model.objects.get_or_create(**real_vals)[0] + # it is just the primary key serialized + else: + if isinstance(value, int): + if Model.__name__ == "PluginConfig": + value = other_model.objects.get(name=plugin["name"]) + else: + value = other_model.objects.get(pk=value) + else: + value = other_model.objects.get(name=value) + return value + + if ( + type(getattr(Model, field)) + in [ForwardManyToOneDescriptor, ForwardOneToOneDescriptor] + and value + ): + other_model = getattr(Model, field).get_queryset().model + value = _get_obj(Model, other_model, value) + elif type(getattr(Model, field)) in [ManyToManyDescriptor] and value: + other_model = getattr(Model, field).rel.model + value = [_get_obj(Model, other_model, val) for val in value] + return value + + +def _create_object(Model, data): + mtm, no_mtm = {}, {} + for field, value in data.items(): + value = _get_real_obj(Model, field, value) + if type(getattr(Model, field)) is ManyToManyDescriptor: + mtm[field] = value + else: + no_mtm[field] = value + try: + o = Model.objects.get(**no_mtm) + except Model.DoesNotExist: + o = Model(**no_mtm) + o.full_clean() + o.save() + for field, value in mtm.items(): + attribute = getattr(o, field) + if value is not None: + attribute.set(value) + return False + return True + + +def migrate(apps, schema_editor): + Parameter = apps.get_model("api_app", "Parameter") + PluginConfig = apps.get_model("api_app", "PluginConfig") + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + if not Model.objects.filter(name=plugin["name"]).exists(): + exists = _create_object(Model, plugin) + if not exists: + for param in params: + _create_object(Parameter, param) + for value in values: + _create_object(PluginConfig, value) + + +def reverse_migrate(apps, schema_editor): + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + Model.objects.get(name=plugin["name"]).delete() + + +class Migration(migrations.Migration): + atomic = False + dependencies = [ + ("api_app", "0062_alter_parameter_python_module"), + ("analyzers_manager", "0093_analyzer_config_ailtyposquatting"), + ] + + operations = [migrations.RunPython(migrate, reverse_migrate)] diff --git a/api_app/analyzers_manager/migrations/0095_analyzer_config_malprobsearch.py b/api_app/analyzers_manager/migrations/0095_analyzer_config_malprobsearch.py new file mode 100644 index 0000000000..98fa1bc801 --- /dev/null +++ b/api_app/analyzers_manager/migrations/0095_analyzer_config_malprobsearch.py @@ -0,0 +1,123 @@ +from django.db import migrations +from django.db.models.fields.related_descriptors import ( + ForwardManyToOneDescriptor, + ForwardOneToOneDescriptor, + ManyToManyDescriptor, +) + +plugin = { + "python_module": { + "health_check_schedule": { + "minute": "0", + "hour": "0", + "day_of_week": "*", + "day_of_month": "*", + "month_of_year": "*", + }, + "update_schedule": None, + "module": "malprob.MalprobSearch", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + "name": "MalprobSearch", + "description": "[Malprob](https://malprob.io/) is a leading malware detection and identification service, powered by cutting-edge AI technology.", + "disabled": False, + "soft_time_limit": 10, + "routing_key": "default", + "health_check_status": True, + "type": "observable", + "docker_based": False, + "maximum_tlp": "AMBER", + "observable_supported": ["hash"], + "supported_filetypes": [], + "run_hash": False, + "run_hash_type": "", + "not_supported_filetypes": [], + "model": "analyzers_manager.AnalyzerConfig", +} + +params = [] + +values = [] + + +def _get_real_obj(Model, field, value): + def _get_obj(Model, other_model, value): + if isinstance(value, dict): + real_vals = {} + for key, real_val in value.items(): + real_vals[key] = _get_real_obj(other_model, key, real_val) + value = other_model.objects.get_or_create(**real_vals)[0] + # it is just the primary key serialized + else: + if isinstance(value, int): + if Model.__name__ == "PluginConfig": + value = other_model.objects.get(name=plugin["name"]) + else: + value = other_model.objects.get(pk=value) + else: + value = other_model.objects.get(name=value) + return value + + if ( + type(getattr(Model, field)) + in [ForwardManyToOneDescriptor, ForwardOneToOneDescriptor] + and value + ): + other_model = getattr(Model, field).get_queryset().model + value = _get_obj(Model, other_model, value) + elif type(getattr(Model, field)) in [ManyToManyDescriptor] and value: + other_model = getattr(Model, field).rel.model + value = [_get_obj(Model, other_model, val) for val in value] + return value + + +def _create_object(Model, data): + mtm, no_mtm = {}, {} + for field, value in data.items(): + value = _get_real_obj(Model, field, value) + if type(getattr(Model, field)) is ManyToManyDescriptor: + mtm[field] = value + else: + no_mtm[field] = value + try: + o = Model.objects.get(**no_mtm) + except Model.DoesNotExist: + o = Model(**no_mtm) + o.full_clean() + o.save() + for field, value in mtm.items(): + attribute = getattr(o, field) + if value is not None: + attribute.set(value) + return False + return True + + +def migrate(apps, schema_editor): + Parameter = apps.get_model("api_app", "Parameter") + PluginConfig = apps.get_model("api_app", "PluginConfig") + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + if not Model.objects.filter(name=plugin["name"]).exists(): + exists = _create_object(Model, plugin) + if not exists: + for param in params: + _create_object(Parameter, param) + for value in values: + _create_object(PluginConfig, value) + + +def reverse_migrate(apps, schema_editor): + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + Model.objects.get(name=plugin["name"]).delete() + + +class Migration(migrations.Migration): + atomic = False + dependencies = [ + ("api_app", "0062_alter_parameter_python_module"), + ("analyzers_manager", "0094_analyzer_config_detectiteasy"), + ] + + operations = [migrations.RunPython(migrate, reverse_migrate)] diff --git a/api_app/analyzers_manager/migrations/0096_analyzer_config_malprobscan.py b/api_app/analyzers_manager/migrations/0096_analyzer_config_malprobscan.py new file mode 100644 index 0000000000..4761282db3 --- /dev/null +++ b/api_app/analyzers_manager/migrations/0096_analyzer_config_malprobscan.py @@ -0,0 +1,202 @@ +from django.db import migrations +from django.db.models.fields.related_descriptors import ( + ForwardManyToOneDescriptor, + ForwardOneToOneDescriptor, + ManyToManyDescriptor, +) + +plugin = { + "python_module": { + "health_check_schedule": { + "minute": "0", + "hour": "0", + "day_of_week": "*", + "day_of_month": "*", + "month_of_year": "*", + }, + "update_schedule": None, + "module": "malprob.MalprobScan", + "base_path": "api_app.analyzers_manager.file_analyzers", + }, + "name": "MalprobScan", + "description": "[Malprob](https://malprob.io/) is a malware detection and identification service, powered by cutting-edge AI technology.", + "disabled": False, + "soft_time_limit": 60, + "routing_key": "default", + "health_check_status": True, + "type": "file", + "docker_based": False, + "maximum_tlp": "AMBER", + "observable_supported": [], + "supported_filetypes": [], + "run_hash": False, + "run_hash_type": "", + "not_supported_filetypes": [], + "model": "analyzers_manager.AnalyzerConfig", +} + +params = [ + { + "python_module": { + "module": "malprob.MalprobScan", + "base_path": "api_app.analyzers_manager.file_analyzers", + }, + "name": "api_key_name", + "type": "str", + "description": "api key for MalprobScan", + "is_secret": True, + "required": True, + }, + { + "python_module": { + "module": "malprob.MalprobScan", + "base_path": "api_app.analyzers_manager.file_analyzers", + }, + "name": "private", + "type": "bool", + "description": "private scan for MalprobScan", + "is_secret": False, + "required": False, + }, + { + "python_module": { + "module": "malprob.MalprobScan", + "base_path": "api_app.analyzers_manager.file_analyzers", + }, + "name": "timeout", + "type": "int", + "description": "request timeout for MalprobScan", + "is_secret": False, + "required": False, + }, +] + +values = [ + { + "parameter": { + "python_module": { + "module": "malprob.MalprobScan", + "base_path": "api_app.analyzers_manager.file_analyzers", + }, + "name": "private", + "type": "bool", + "description": "private scan for MalprobScan", + "is_secret": False, + "required": False, + }, + "analyzer_config": "MalprobScan", + "connector_config": None, + "visualizer_config": None, + "ingestor_config": None, + "pivot_config": None, + "for_organization": False, + "value": False, + "updated_at": "2024-06-03T22:17:04.195860Z", + "owner": None, + }, + { + "parameter": { + "python_module": { + "module": "malprob.MalprobScan", + "base_path": "api_app.analyzers_manager.file_analyzers", + }, + "name": "timeout", + "type": "int", + "description": "request timeout for MalprobScan", + "is_secret": False, + "required": False, + }, + "analyzer_config": "MalprobScan", + "connector_config": None, + "visualizer_config": None, + "ingestor_config": None, + "pivot_config": None, + "for_organization": False, + "value": 60, + "updated_at": "2024-06-04T10:23:40.132533Z", + "owner": None, + }, +] + + +def _get_real_obj(Model, field, value): + def _get_obj(Model, other_model, value): + if isinstance(value, dict): + real_vals = {} + for key, real_val in value.items(): + real_vals[key] = _get_real_obj(other_model, key, real_val) + value = other_model.objects.get_or_create(**real_vals)[0] + # it is just the primary key serialized + else: + if isinstance(value, int): + if Model.__name__ == "PluginConfig": + value = other_model.objects.get(name=plugin["name"]) + else: + value = other_model.objects.get(pk=value) + else: + value = other_model.objects.get(name=value) + return value + + if ( + type(getattr(Model, field)) + in [ForwardManyToOneDescriptor, ForwardOneToOneDescriptor] + and value + ): + other_model = getattr(Model, field).get_queryset().model + value = _get_obj(Model, other_model, value) + elif type(getattr(Model, field)) in [ManyToManyDescriptor] and value: + other_model = getattr(Model, field).rel.model + value = [_get_obj(Model, other_model, val) for val in value] + return value + + +def _create_object(Model, data): + mtm, no_mtm = {}, {} + for field, value in data.items(): + value = _get_real_obj(Model, field, value) + if type(getattr(Model, field)) is ManyToManyDescriptor: + mtm[field] = value + else: + no_mtm[field] = value + try: + o = Model.objects.get(**no_mtm) + except Model.DoesNotExist: + o = Model(**no_mtm) + o.full_clean() + o.save() + for field, value in mtm.items(): + attribute = getattr(o, field) + if value is not None: + attribute.set(value) + return False + return True + + +def migrate(apps, schema_editor): + Parameter = apps.get_model("api_app", "Parameter") + PluginConfig = apps.get_model("api_app", "PluginConfig") + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + if not Model.objects.filter(name=plugin["name"]).exists(): + exists = _create_object(Model, plugin) + if not exists: + for param in params: + _create_object(Parameter, param) + for value in values: + _create_object(PluginConfig, value) + + +def reverse_migrate(apps, schema_editor): + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + Model.objects.get(name=plugin["name"]).delete() + + +class Migration(migrations.Migration): + atomic = False + dependencies = [ + ("api_app", "0062_alter_parameter_python_module"), + ("analyzers_manager", "0095_analyzer_config_malprobsearch"), + ] + + operations = [migrations.RunPython(migrate, reverse_migrate)] diff --git a/api_app/analyzers_manager/observable_analyzers/ailtyposquatting.py b/api_app/analyzers_manager/observable_analyzers/ailtyposquatting.py new file mode 100644 index 0000000000..0b79815813 --- /dev/null +++ b/api_app/analyzers_manager/observable_analyzers/ailtyposquatting.py @@ -0,0 +1,67 @@ +import logging +import math + +from ail_typo_squatting import typo +from ail_typo_squatting.dns_local import resolving + +from api_app.analyzers_manager import classes +from tests.mock_utils import if_mock_connections, patch + +logger = logging.getLogger(__name__) + + +class AilTypoSquatting(classes.ObservableAnalyzer): + """ + wrapper for https://github.com/typosquatter/ail-typo-squatting + """ + + dns_resolving: bool = False + + def update(self) -> bool: + pass + + def run(self): + response = {} + logger.info( + f"""running AilTypoSquatting on {self.observable_name} + with tlp {self._job.tlp} + and dns resolving {self.dns_resolving}""" + ) + + response["algorithms"] = typo.runAll( + domain=self.observable_name, + limit=math.inf, + formatoutput="text", + pathOutput=None, + ) + if self._job.tlp == self._job.TLP.CLEAR.value and self.dns_resolving: + # for "x.com", response["algorithms"][0]=".com" + # which is not valid for look up + if len(self.observable_name.split(".")[0]) == 1: + logger.info( + f"""running dns resolving on {self.observable_name} + excluding {response['algorithms'][0]}""" + ) + response["dnsResolving"] = resolving.dnsResolving( + resultList=response["algorithms"][1:], + domain=self.observable_name, + pathOutput=None, + ) + else: + response["dnsResolving"] = resolving.dnsResolving( + resultList=response["algorithms"], + domain=self.observable_name, + pathOutput=None, + ) + + return response + + @classmethod + def _monkeypatch(cls): + patches = [ + if_mock_connections( + patch.object(typo, "runAll", return_value=None), + patch.object(resolving, "dnsResolving", return_value=None), + ) + ] + return super()._monkeypatch(patches=patches) diff --git a/api_app/analyzers_manager/observable_analyzers/dns/dns_resolvers/dns0_eu_resolver.py b/api_app/analyzers_manager/observable_analyzers/dns/dns_resolvers/dns0_eu_resolver.py index 4439d92c4c..e9514e90a9 100644 --- a/api_app/analyzers_manager/observable_analyzers/dns/dns_resolvers/dns0_eu_resolver.py +++ b/api_app/analyzers_manager/observable_analyzers/dns/dns_resolvers/dns0_eu_resolver.py @@ -22,6 +22,9 @@ class DNS0EUResolver(classes.ObservableAnalyzer): class NotADomain(Exception): pass + url = "https://dns0.eu" + headers = {"Accept": "application/dns-json"} + query_type: str def run(self): @@ -38,11 +41,9 @@ def run(self): else: raise self.NotADomain() - headers = {"Accept": "application/dns-json"} - url = "https://dns0.eu" params = {"name": observable, "type": self.query_type} - response = requests.get(url, headers=headers, params=params) + response = requests.get(self.url, headers=self.headers, params=params) response.raise_for_status() resolutions = response.json().get("Answer", []) except requests.RequestException: diff --git a/api_app/analyzers_manager/observable_analyzers/malprob.py b/api_app/analyzers_manager/observable_analyzers/malprob.py new file mode 100644 index 0000000000..1316158398 --- /dev/null +++ b/api_app/analyzers_manager/observable_analyzers/malprob.py @@ -0,0 +1,134 @@ +import requests + +from api_app.analyzers_manager import classes +from tests.mock_utils import MockUpResponse, if_mock_connections, patch + + +class MalprobSearch(classes.ObservableAnalyzer): + url: str = "https://malprob.io/api" + + def update(self): + pass + + def run(self): + response = requests.get( + f"{self.url}/search/{self.observable_name}", + timeout=10, + ) + response.raise_for_status() + return response.json() + + @classmethod + def _monkeypatch(cls): + patches = [ + if_mock_connections( + patch( + "requests.get", + return_value=MockUpResponse( + { + "report": { + "md5": "8a05a189e58ccd7275f7ffdf88c2c191", + "mime": "application/java-archive", + "name": "sample.apk", + "sha1": "a7a70f2f482e6b26eedcf1781b277718078c743a", + "size": 3425, + "test": 0, + "trid": """Android Package (63.7%) | + Java Archive (26.4%) | + ZIP compressed archive (7.8%) | + PrintFox/Pagefox bitmap (1.9%)""", + "type": "ARCHIVE", + "label": "benign", + "magic": "application/java-archive", + "score": 0.0003923133846427324, + "nested": [ + { + "name": "MANIFEST.MF", + "size": 331, + "type": "text/plain", + "score": 0.0003923133846427324, + "sha256": """b093f736dac9f016788f59d6218eb + 2c9015e30e01ec88dc031863ff83e998e33""", + "complete": True, + "supported": True, + }, + { + "name": "CERT.SF", + "size": 384, + "type": "text/plain", + "score": 6.292509868171916e-06, + "sha256": """db5b14f8ccb0276e6db502e2b3ad1e + 75728a2d65c1798fcbe1ed8e153b0b17a6""", + "complete": True, + "supported": True, + }, + { + "name": "a.png", + "size": 87, + "type": "image/png", + "score": 0.0, + "sha256": """cc30bfc9a985956c833a135389743e96 + 835fdddae75aab5f06f3cb8d10f1af9f""", + "complete": True, + "supported": True, + }, + { + "name": "CERT.RSA", + "size": 481, + "type": "application/octet-stream", + "score": "NaN", + "sha256": """3b3b283f338421ae31532a508bbc6aa8c + 1da54fc75357cfa9ac97cd4e46040a7""", + "complete": True, + "supported": False, + }, + { + "name": "classes.dex", + "size": 920, + "type": "application/octet-stream", + "score": "NaN", + "sha256": """fab857801d10f45887ad376263de6bc1c + 9e1893060d63cb5ad4eefb72f354112""", + "complete": True, + "supported": False, + }, + { + "name": "resources.arsc", + "size": 560, + "type": "application/octet-stream", + "score": "NaN", + "sha256": """d118e4e8b4921dbcaa5874012fb8426a08 + a195461285dee7c42b1bd7c6028802""", + "complete": True, + "supported": False, + }, + { + "name": "AndroidManifest.xml", + "size": 1248, + "type": "application/octet-stream", + "score": "NaN", + "sha256": """a718ac6589ff638ba8d799824ecdf0a858 + 77f9e0381e6b573bf552875dd04ce9""", + "complete": True, + "supported": False, + }, + ], + "sha256": """ac24043d48dadc390877a6151515565b + 1fdc1dab028ee2d95d80bd80085d9376""", + "category": "ARCHIVE", + "complete": True, + "encoding": None, + "extracted": True, + "predicted": True, + "scan_time": 219511, + "supported": True, + "insert_date": 1717233771, + "parent_hash": [None], + }, + }, + 200, + ), + ), + ) + ] + return super()._monkeypatch(patches=patches) diff --git a/api_app/analyzers_manager/observable_analyzers/vulners.py b/api_app/analyzers_manager/observable_analyzers/vulners.py new file mode 100644 index 0000000000..2efc53f476 --- /dev/null +++ b/api_app/analyzers_manager/observable_analyzers/vulners.py @@ -0,0 +1,65 @@ +import logging + +import requests + +from api_app.analyzers_manager import classes +from tests.mock_utils import MockUpResponse, if_mock_connections, patch + +logger = logging.getLogger(__name__) + + +class Vulners(classes.ObservableAnalyzer): + """ + This analyzer is a wrapper for the vulners project. + """ + + score_AI: bool = False + skip: int = 0 + size: int = 5 + _api_key_name: str + url = "https://vulners.com/api/v3" + + def search_ai(self): + return requests.post( + url=self.url + "/ai/scoretext/", + headers={"Content-Type": "application/json"}, + json={"text": self.observable_name, "apiKey": self._api_key_name}, + ) + + def search_databse(self): + return requests.post( + url=self.url + "/search/lucene", + headers={"Content-Type": "application/json"}, + json={ + "query": self.observable_name, + "skip": self.size, + "size": self.skip, + "apiKey": self._api_key_name, + }, + ) + + def run(self): + response = None + if self.score_AI: + response = self.search_ai() + else: + response = self.search_databse() + response.raise_for_status() + return response.json() + + # this is a framework implication + def update(self) -> bool: + pass + + @classmethod + def _monkeypatch(cls): + response = {"result": "OK", "data": {"score": [6.5, "NONE"]}} + patches = [ + if_mock_connections( + patch( + "requests.post", + return_value=MockUpResponse(response, 200), + ), + ) + ] + return super()._monkeypatch(patches=patches) diff --git a/api_app/serializers/__init__.py b/api_app/serializers/__init__.py index 02441453f1..52d8cb1143 100644 --- a/api_app/serializers/__init__.py +++ b/api_app/serializers/__init__.py @@ -1,18 +1,16 @@ from django.conf import settings -from django.utils.timezone import now from rest_framework import serializers as rfs from rest_framework.exceptions import ValidationError from rest_framework.fields import Field -from rest_framework.serializers import ModelSerializer from api_app.interfaces import OwnershipAbstractModel from certego_saas.apps.organization.organization import Organization +from certego_saas.ext.upload.elastic import BISerializer -class AbstractBIInterface(ModelSerializer): +class AbstractBIInterface(BISerializer): application = rfs.CharField(read_only=True, default="IntelOwl") environment = rfs.SerializerMethodField(method_name="get_environment") - timestamp: Field username: Field name: Field class_instance = rfs.SerializerMethodField( @@ -23,10 +21,7 @@ class AbstractBIInterface(ModelSerializer): end_time: Field class Meta: - fields = [ - "application", - "environment", - "timestamp", + fields = BISerializer.Meta.fields + [ "username", "name", "class_instance", @@ -47,16 +42,9 @@ def get_environment(instance=None): else: return "test" - def to_elastic_dict(self, data): - return { - "_source": data, - "_index": settings.ELASTICSEARCH_BI_INDEX - + "-" - + self.get_environment() - + "-" - + now().strftime("%Y.%m"), - "_op_type": "index", - } + @staticmethod + def get_index(): + return settings.ELASTICSEARCH_BI_INDEX class ModelWithOwnershipSerializer(rfs.ModelSerializer): diff --git a/api_app/serializers/job.py b/api_app/serializers/job.py index eee03580fc..51d5f9cadc 100755 --- a/api_app/serializers/job.py +++ b/api_app/serializers/job.py @@ -1155,7 +1155,7 @@ class Meta: def to_representation(self, instance: Job): data = super().to_representation(instance) - return self.to_elastic_dict(data) + return self.to_elastic_dict(data, self.get_index()) @staticmethod def get_playbook(instance: Job): diff --git a/api_app/serializers/report.py b/api_app/serializers/report.py index bd8ec88fbe..adbbafc818 100644 --- a/api_app/serializers/report.py +++ b/api_app/serializers/report.py @@ -32,7 +32,7 @@ class Meta: def to_representation(self, instance: AbstractReport): data = super().to_representation(instance) - return self.to_elastic_dict(data) + return self.to_elastic_dict(data, self.get_index()) def get_class_instance(self, instance: AbstractReport): return super().get_class_instance(instance).split("report")[0] diff --git a/api_app/visualizers_manager/classes.py b/api_app/visualizers_manager/classes.py index f86b0002b0..9d9c57637f 100644 --- a/api_app/visualizers_manager/classes.py +++ b/api_app/visualizers_manager/classes.py @@ -16,6 +16,7 @@ VisualizableIcon, VisualizableLevelSize, VisualizableSize, + VisualizableTableColumnSize, ) from api_app.visualizers_manager.exceptions import ( VisualizerConfigurationException, @@ -208,7 +209,7 @@ def __init__( f"value {v} should be a VisualizableObject and not a string" ) if fill_empty and not value: - value = [VisualizableBase(value="no data available", disable=False)] + value = [VisualizableBase(value="no data available", disable=True)] if not name: start_open = True self.value = value @@ -261,23 +262,58 @@ def type(self) -> str: return "vertical_list" +class VisualizableTableColumn: + def __init__( + self, + name: str, + max_width: VisualizableTableColumnSize = VisualizableTableColumnSize.S_300, + description: str = "", + disable_filters: bool = False, + disable_sort_by: bool = False, + ): + self.name = name + self.description = description + self.disable_filters = disable_filters + self.disable_sort_by = disable_sort_by + self.max_width = max_width + + @property + def attributes(self) -> List[str]: + return [ + "name", + "description", + "disable_filters", + "disable_sort_by", + "max_width", + ] + + def to_dict(self) -> Dict: + if not self: + return {} + result = {attr: getattr(self, attr) for attr in self.attributes} + for key, value in result.items(): + if isinstance(value, Enum): + result[key] = value.value + return result + + class VisualizableTable(VisualizableObject): def __init__( self, - columns: List[str], + columns: List[VisualizableTableColumn], data: List[Dict[str, VisualizableObject]], size: VisualizableSize = VisualizableSize.S_AUTO, alignment: VisualizableAlignment = VisualizableAlignment.AROUND, page_size: int = 5, - disable_filters: bool = False, - disable_sort_by: bool = False, + sort_by_id: str = "", + sort_by_desc: bool = False, ): super().__init__(size=size, alignment=alignment, disable=False) self.data = data self.columns = columns self.page_size = page_size - self.disable_filters = disable_filters - self.disable_sort_by = disable_sort_by + self.sort_by_id = sort_by_id + self.sort_by_desc = sort_by_desc @property def attributes(self) -> List[str]: @@ -285,8 +321,8 @@ def attributes(self) -> List[str]: "data", "columns", "page_size", - "disable_filters", - "disable_sort_by", + "sort_by_id", + "sort_by_desc", ] @property @@ -296,6 +332,7 @@ def type(self) -> str: def to_dict(self) -> Dict: result = super().to_dict() data: List[Dict[str, VisualizableObject]] = result.pop("data", []) + columns: List[VisualizableTableColumn] = result.pop("columns", []) if any(x for x in data): new_data = [] for element in data: @@ -309,6 +346,12 @@ def to_dict(self) -> Dict: result["data"] = new_data else: result["data"] = [] + if any(x for x in columns): + result["columns"] = [ + column.to_dict() for column in columns if column is not None + ] + else: + result["columns"] = [] result.pop("disable") return result @@ -384,6 +427,8 @@ class Visualizer(Plugin, metaclass=abc.ABCMeta): HList = VisualizableHorizontalList Table = VisualizableTable + TableColumn = VisualizableTableColumn + LevelSize = VisualizableLevelSize Page = VisualizablePage Level = VisualizableLevel diff --git a/api_app/visualizers_manager/enums.py b/api_app/visualizers_manager/enums.py index 595e803262..a4883e8587 100644 --- a/api_app/visualizers_manager/enums.py +++ b/api_app/visualizers_manager/enums.py @@ -124,3 +124,17 @@ def __str__(self): def __bool__(self): return True + + +class VisualizableTableColumnSize(enum.Enum): + """Column size for VisualizebleTable elements""" + + S_50 = 50 + S_100 = 100 + S_150 = 150 + S_200 = 200 + S_250 = 250 + S_300 = 300 + + def __str__(self): + return self.value diff --git a/api_app/visualizers_manager/visualizers/quokka/observable.py b/api_app/visualizers_manager/visualizers/quokka/observable.py index 98d43debd9..6c89c707cd 100644 --- a/api_app/visualizers_manager/visualizers/quokka/observable.py +++ b/api_app/visualizers_manager/visualizers/quokka/observable.py @@ -12,6 +12,7 @@ from api_app.visualizers_manager.decorators import ( visualizable_error_handler_with_params, ) +from api_app.visualizers_manager.enums import VisualizableTableColumnSize from api_app.visualizers_manager.visualizers.quokka.field_description import ( FieldDescription, ) @@ -1198,20 +1199,67 @@ def _attacker_by_range(self) -> Visualizer.HList: } ) + columns = [ + Visualizer.TableColumn( + name="time_range", + max_width=VisualizableTableColumnSize.S_300, + disable_filters=True, + disable_sort_by=True, + ), + Visualizer.TableColumn( + name="total_hours_seen", + max_width=VisualizableTableColumnSize.S_300, + disable_filters=True, + disable_sort_by=True, + description=( + "Number of hours when the attacker was active." + " This is a datum about how much time has" + " been active the attacker." + ), + ), + Visualizer.TableColumn( + name="total_hits", + max_width=VisualizableTableColumnSize.S_300, + disable_filters=True, + disable_sort_by=True, + description=( + "Number of times the attacker tried an attack." + " This a datum about the aggressivity of the attack." + ), + ), + Visualizer.TableColumn( + name="submitters", + max_width=VisualizableTableColumnSize.S_300, + disable_filters=True, + disable_sort_by=True, + description="Sources that reported the attacker.", + ), + Visualizer.TableColumn( + name="exploits", + max_width=VisualizableTableColumnSize.S_300, + disable_filters=True, + disable_sort_by=True, + description=( + "If available, " "the CVEs that the attacker tried to exploit." + ), + ), + Visualizer.TableColumn( + name="attack_types", + max_width=VisualizableTableColumnSize.S_300, + disable_filters=True, + disable_sort_by=True, + description=( + "Category of the attack " + "(protocol, service and type) Example: http/scan" + ), + ), + ] + return Visualizer.HList( value=[ Visualizer.Table( data=time_range_data, - columns=[ - "time_range", - "total_hours_seen", - "total_hits", - "submitters", - "exploits", - "attack_types", - ], - disable_filters=True, - disable_sort_by=True, + columns=columns, size=Visualizer.Size.S_ALL, ) ], diff --git a/configuration/nginx/https.conf b/configuration/nginx/https.conf index 8cc801b3da..5abfbad906 100755 --- a/configuration/nginx/https.conf +++ b/configuration/nginx/https.conf @@ -14,7 +14,6 @@ limit_req_zone $binary_remote_addr zone=adminlimit:10m rate=1r/s; server { listen 443 ssl; - ssl on; ssl_protocols TLSv1.2 TLSv1.3; ssl_certificate /usr/local/share/ca-certificates/intelowl.crt; ssl_certificate_key /etc/ssl/private/intelowl.key; diff --git a/docker/.env b/docker/.env index bc984be7da..b9f079e8eb 100755 --- a/docker/.env +++ b/docker/.env @@ -1,6 +1,6 @@ ### DO NOT CHANGE THIS VALUE !! ### It should be updated only when you pull latest changes off from the 'master' branch of IntelOwl. # this variable must start with "REACT_APP_" to be used in the frontend too -REACT_APP_INTELOWL_VERSION="v6.0.2" +REACT_APP_INTELOWL_VERSION="v6.0.4" # if you want to use a nfs volume for shared files # NFS_ADDRESS= diff --git a/docker/Dockerfile_nginx b/docker/Dockerfile_nginx index a1f0c735e1..49ae03d40d 100755 --- a/docker/Dockerfile_nginx +++ b/docker/Dockerfile_nginx @@ -1,4 +1,4 @@ -FROM library/nginx:1.26.0-alpine +FROM library/nginx:1.27.0-alpine # do not remove this RUN apk update && apk upgrade && apk add bash diff --git a/docker/default.private.yml b/docker/default.private.yml index 9077180d97..2383e65eab 100755 --- a/docker/default.private.yml +++ b/docker/default.private.yml @@ -7,6 +7,9 @@ services: image: local/intel_owl_private_uwsgi volumes: - /etc/ssl/certs:/opt/deploy/intel_owl/certs + healthcheck: + start_period: 10s + retries: 20 daphne: container_name: intelowl_private_daphne diff --git a/docker/default.yml b/docker/default.yml index da93e5e589..9406d794ac 100755 --- a/docker/default.yml +++ b/docker/default.yml @@ -25,8 +25,8 @@ services: test: [ "CMD-SHELL", "nc -z localhost 8001 || exit 1" ] interval: 5s timeout: 2s - start_period: 10s - retries: 20 + start_period: 300s + retries: 2 daphne: image: intelowlproject/intelowl:${REACT_APP_INTELOWL_VERSION} @@ -52,7 +52,6 @@ services: uwsgi: condition: service_healthy - nginx: image: intelowlproject/intelowl_nginx:${REACT_APP_INTELOWL_VERSION} container_name: intelowl_nginx @@ -65,8 +64,6 @@ services: - ../configuration/nginx/locations.conf:/etc/nginx/locations.conf - nginx_logs:/var/log/nginx - static_content:/var/www/static - # ports: - # - "80:80" depends_on: uwsgi: condition: service_healthy diff --git a/docker/entrypoints/uwsgi.sh b/docker/entrypoints/uwsgi.sh index ed47c5b36c..0c60c67291 100755 --- a/docker/entrypoints/uwsgi.sh +++ b/docker/entrypoints/uwsgi.sh @@ -4,7 +4,8 @@ until cd /opt/deploy/intel_owl do echo "Waiting for server volume..." done -sudo su www-data -c "mkdir -p /var/log/intel_owl/django /var/log/intel_owl/uwsgi /var/log/intel_owl/asgi /opt/deploy/intel_owl/files_required/blint /opt/deploy/intel_owl/files_required/yara" +mkdir -p /var/log/intel_owl/django /var/log/intel_owl/uwsgi /var/log/intel_owl/asgi /opt/deploy/intel_owl/files_required/blint /opt/deploy/intel_owl/files_required/yara +chown -R www-data:www-data /var/log/intel_owl/django /var/log/intel_owl/uwsgi /var/log/intel_owl/asgi /opt/deploy/intel_owl/files_required/blint /opt/deploy/intel_owl/files_required/yara # Apply database migrations echo "Waiting for db to be ready..." @@ -34,6 +35,17 @@ CHANGELOG_NOTIFICATION_COMMAND='python manage.py changelog_notification .github/ if [[ $DEBUG == "True" ]] && [[ $DJANGO_TEST_SERVER == "True" ]]; then + # Create superuser if it does not exist + exists=$(echo "from django.contrib.auth import get_user_model; User = get_user_model(); print(User.objects.filter(username='admin').exists())" | python manage.py shell) + + if [ "$exists" == "True" ]; then + echo "Superuser 'admin' already exists." + else + echo "Creating superuser 'admin' with password 'admin'..." + echo "from django.contrib.auth import get_user_model; User = get_user_model(); User.objects.create_superuser('admin', 'admin@example.com', 'admin')" | python manage.py shell + echo "Superuser 'admin' created successfully." + fi + $CHANGELOG_NOTIFICATION_COMMAND --debug python manage.py runserver 0.0.0.0:8001 else diff --git a/docker/nginx.override.yml b/docker/nginx.override.yml new file mode 100644 index 0000000000..3b4d38efc1 --- /dev/null +++ b/docker/nginx.override.yml @@ -0,0 +1,4 @@ +services: + nginx: + ports: + - "80:80" diff --git a/docker/scripts/watchman_install.sh b/docker/scripts/watchman_install.sh index a2e067fb13..e3896d0daf 100755 --- a/docker/scripts/watchman_install.sh +++ b/docker/scripts/watchman_install.sh @@ -1,7 +1,10 @@ #!/bin/bash +echo "WATCHMAN value is " +echo $WATCHMAN + # This script can be disabled during development using WATCHMAN=false env variable -if [ "$WATCHMAN" == "False" ]; then echo "Skipping WATCHMAN installation because we are not in test mode"; exit 0; fi +if [ "$WATCHMAN" = "false" ]; then echo "Skipping WATCHMAN installation because we are not in test mode"; exit 0; fi pip3 install --compile -r requirements/django-server-requirements.txt diff --git a/docker/test.override.yml b/docker/test.override.yml index c9c1de84e9..24cda42f25 100755 --- a/docker/test.override.yml +++ b/docker/test.override.yml @@ -14,7 +14,6 @@ services: - DEBUG=True - DJANGO_TEST_SERVER=True - DJANGO_WATCHMAN_TIMEOUT=60 - - WATCHMAN=True daphne: image: intelowlproject/intelowl:test @@ -28,8 +27,6 @@ services: image: intelowlproject/intelowl_nginx:test volumes: - ../configuration/nginx/django_server.conf:/etc/nginx/conf.d/default.conf - ports: - - "80:80" celery_beat: image: intelowlproject/intelowl:test @@ -37,16 +34,10 @@ services: - ../:/opt/deploy/intel_owl environment: - DEBUG=True - - DJANGO_TEST_SERVER=True - - DJANGO_WATCHMAN_TIMEOUT=60 - - WATCHMAN=True celery_worker_default: image: intelowlproject/intelowl:test volumes: - ../:/opt/deploy/intel_owl environment: - - DEBUG=True - - DJANGO_TEST_SERVER=True - - DJANGO_WATCHMAN_TIMEOUT=60 - - WATCHMAN=True \ No newline at end of file + - DEBUG=True \ No newline at end of file diff --git a/docker/traefik.override.yml b/docker/traefik.override.yml deleted file mode 100755 index dada2fd93e..0000000000 --- a/docker/traefik.override.yml +++ /dev/null @@ -1,34 +0,0 @@ -services: - traefik: - image: "traefik:v2.2" - container_name: "traefik" - command: - #- "--log.level=DEBUG" - - "--api.insecure=true" - - "--providers.docker=true" - - "--providers.docker.exposedbydefault=false" - - "--entrypoints.web.address=:80" - - "--entrypoints.websecure.address=:443" - - "--certificatesresolvers.myresolver.acme.httpchallenge=true" - - "--certificatesresolvers.myresolver.acme.httpchallenge.entrypoint=web" - #- "--certificatesresolvers.myresolver.acme.caserver=https://acme-staging-v02.api.letsencrypt.org/directory" - - "--certificatesresolvers.myresolver.acme.email=postmaster@example.com" - - "--certificatesresolvers.myresolver.acme.storage=/letsencrypt/acme.json" - ports: - - "80:80" - - "443:443" - volumes: - - "./letsencrypt:/letsencrypt" - - "/var/run/docker.sock:/var/run/docker.sock:ro" - - nginx: - depends_on: - - traefik - labels: - - "traefik.enable=true" - - "traefik.http.routers.nginx.rule=Host(`example.com`)" - - "traefik.http.routers.nginx.entrypoints=websecure" - - "traefik.http.routers.nginx.tls.certresolver=myresolver" - ports: - - "443:443" - diff --git a/docker/traefik.yml b/docker/traefik.yml new file mode 100644 index 0000000000..dccf7cb452 --- /dev/null +++ b/docker/traefik.yml @@ -0,0 +1,15 @@ +services: + traefik: + image: "traefik:3.0" + labels: + - "traefik.enable=true" + + nginx: + depends_on: + - traefik + labels: + - "traefik.enable=true" + - "traefik.http.services.nginx.loadbalancer.server.port=80" + expose: + - "80" + diff --git a/docker/traefik_local.yml b/docker/traefik_local.yml new file mode 100644 index 0000000000..8254b0f2b6 --- /dev/null +++ b/docker/traefik_local.yml @@ -0,0 +1,28 @@ +services: + traefik: + container_name: "intelowl_traefik_local" + command: + # Pleases refer to the official documentation: https://doc.traefik.io/traefik/ + # LOGS + - "--log.level=DEBUG" + # DASHBOARD + - "--api.insecure=true" + - "--api.dashboard=true" + # ENTRYPOINTS - redirect every request to use HTTPS + - "--entrypoints.web.address=:80" + # PROVIDERS + - "--providers.docker=true" + - "--providers.docker.watch=true" + - "--providers.docker.exposedbydefault=false" + ports: + - "80:80" + - "8080:8080" + volumes: + - "/var/run/docker.sock:/var/run/docker.sock:ro" + + nginx: + depends_on: + - traefik + labels: + - "traefik.http.routers.nginx.rule=Host(`localhost`)" + - "traefik.http.routers.nginx.entrypoints=web" diff --git a/docker/traefik_prod.yml b/docker/traefik_prod.yml new file mode 100644 index 0000000000..ed71e33625 --- /dev/null +++ b/docker/traefik_prod.yml @@ -0,0 +1,66 @@ +services: + traefik: + container_name: "intelowl_traefik_prod" + command: + # Pleases refer to the official documentation: https://doc.traefik.io/traefik/ + # LOGS - may be omitted if you don't need logs + - "--accesslog=true" + - "--accesslog.filepath=/var/log/traefik/access.log" + - "--log.filePath=/var/log/traefik/traefik.log" + - "--log.level=DEBUG" + # DASHBOARD + - "--api.dashboard=true" + # PROVIDERS + - "--providers.docker=true" + - "--providers.docker.watch=true" + - "--providers.docker.exposedbydefault=false" + # ENTRYPOINTS - redirect every request to use HTTPS + - "--entrypoints.web.address=:80" + - "--entryPoints.web.http.redirections.entryPoint.to=websecure" + - "--entryPoints.web.http.redirections.entryPoint.scheme=https" + - "--entryPoints.web.http.redirections.entrypoint.permanent=true" + - "--entrypoints.websecure.address=:443" + # CERTIFICATE RESOLVERS + - "--certificatesresolvers.le.acme.httpchallenge=true" + - "--certificatesresolvers.le.acme.httpchallenge.entrypoint=web" + # DEV - use this for testing purposes or else you might get blocked - # CHANGE THIS + - "--certificatesresolvers.le.acme.caserver=https://acme-staging-v02.api.letsencrypt.org/directory" + # PROD - use this if everything works fine - # CHANGE THIS + #- "--certificatesresolvers.le.acme.caserver=https://acme-v02.api.letsencrypt.org/directory" + - "--certificatesresolvers.le.acme.email=postmaster@example.com" # CHANGE THIS + - "--certificatesresolvers.le.acme.storage=/etc/letsencrypt/acme.json" + labels: + # DASHBOARD - setup for secure dashboard access + - "traefik.http.routers.dashboard.rule=Host(`traefik.intelowl.example.com`) && (PathPrefix(`/api`) || PathPrefix(`/dashboard`))" # CHANGE THIS (Only "Host"!) + - "traefik.http.routers.dashboard.service=api@internal" + - "traefik.http.routers.dashboard.entrypoints=websecure" + - "traefik.http.routers.dashboard.tls=true" + - "traefik.http.routers.dashboard.tls.certresolver=le" + # auth/ipallowlist middlewares allow to limit/secure access - may be omitted + # Here you may define which IPs/CIDR ranges are allowed to access this resource - may be omitted + # - "traefik.http.routers.dashboard.middlewares=dashboard-ipallowlist" + # - "traefik.http.middlewares.dashboard-ipallowlist.ipallowlist.sourcerange=0.0.0.0" # CHANGE THIS + # You can create a new user and password for basic auth with this command: + # echo $(htpasswd -nbB user password) | sed -e s/\\$/\\$\\$/g + # - "traefik.http.routers.dashboard.middlewares=auth" + # - "traefik.http.middlewares.auth.basicauth.users=user:$$2y$$05$$v.ncVNXEJriELglCBEZJmu5I1VrhyhuaVCXATRQTUVuvOF1qgYwpa" # CHANGE THIS (default is user:password) + - "traefik.http.services.dashboard.loadbalancer.server.port=8080" + ports: + - "80:80" + - "443:443" + volumes: + - "/var/run/docker.sock:/var/run/docker.sock:ro" + - "/etc/letsencrypt:/etc/letsencrypt" + - "/var/log/traefik:/var/log/traefik" + + nginx: + depends_on: + - traefik + labels: + - "traefik.http.routers.nginx.rule=Host(`intelowl.example.com`)" # CHANGE THIS + - "traefik.http.routers.nginx.entrypoints=websecure" + - "traefik.http.routers.nginx.tls=true" + - "traefik.http.routers.nginx.tls.certresolver=le" + # Here you may define which IPs/CIDR ranges are allowed to access this resource + # - "traefik.http.routers.nginx.middlewares=nginx-ipallowlist" + # - "traefik.http.middlewares.nginx-ipallowlist.ipallowlist.sourcerange=0.0.0.0" # CHANGE THIS diff --git a/docs/source/Advanced-Usage.md b/docs/source/Advanced-Usage.md index f0200bb9eb..310cbb1605 100755 --- a/docs/source/Advanced-Usage.md +++ b/docs/source/Advanced-Usage.md @@ -60,7 +60,7 @@ After a user registration has been made, an email is sent to the user to verify Once the user has verified their email, they would be manually vetted before being allowed to use the IntelOwl platform. The registration requests would be handled in the Django Admin page by admins. If you have IntelOwl deployed on an AWS instance with an IAM role you can use the [SES](/Advanced-Usage.md#ses) service. -To have the "Registration" page to work correctly, you must configure some variables before starting IntelOwl. See [Optional Environment Configuration](/Installation.md#other-optional-configuration-to-enable-specific-services-features) +To have the "Registration" page to work correctly, you must configure some variables before starting IntelOwl. See [Optional Environment Configuration](https://intelowl.readthedocs.io/en/latest/Installation.html#other-optional-configuration-to-enable-specific-services-features) In a development environment the emails that would be sent are written to the standard output. diff --git a/docs/source/Contribute.md b/docs/source/Contribute.md index 848fb5c359..0d5ba976da 100755 --- a/docs/source/Contribute.md +++ b/docs/source/Contribute.md @@ -331,7 +331,11 @@ To do so, some utility classes have been made: