diff --git a/.github/CHANGELOG.md b/.github/CHANGELOG.md index 576a218511..79bfcc261e 100644 --- a/.github/CHANGELOG.md +++ b/.github/CHANGELOG.md @@ -2,13 +2,23 @@ [**Upgrade Guide**](https://intelowl.readthedocs.io/en/latest/Installation.md#update-to-the-most-recent-version) +## [v6.0.2](https://github.com/intelowlproject/IntelOwl/releases/tag/v6.0.1) +Major fixes and adjustments. We improved the documentation to help the transition to the new major version. + +We added **Pivot** buttons to enable manual Pivoting from an Observable/File analysis to another. See [Doc](https://intelowl.readthedocs.io/en/latest/Usage.html#pivots) for more info + +As usual, we add new plugins. This release brings the following new ones: +* a complete **TakedownRequest** playbook to automate TakeDown requests for malicious domains +* new File Analyzers for tools like [HFinger](https://github.com/CERT-Polska/hfinger), [Permhash](https://github.com/google/permhash) and [Blint](https://github.com/owasp-dep-scan/blint) +* improvement of the existing Maxmind analyzer: it now downloads the ASN database too. + ## [v6.0.1](https://github.com/intelowlproject/IntelOwl/releases/tag/v6.0.1) Little fixes for the major. ## [v6.0.0](https://github.com/intelowlproject/IntelOwl/releases/tag/v6.0.0) This major release is another important milestone for this project! We have been working hard to transform IntelOwl from a *Data Extraction Platform* to a complete *Investigation Platform*! -One of the most noticeable feature is the addition of the [**Investigation** framework](https://intelowl.readthedocs.io/en/latest/Usage.md#investigations-framework)! +One of the most noticeable feature is the addition of the [**Investigation** framework](https://intelowl.readthedocs.io/en/latest/Usage.html#investigations-framework)! Thanks to the this new feature, analysts can leverage IntelOwl as the starting point of their "Investigations", register their findings, correlate the information found, and collaborate...all in a single place. diff --git a/.github/ISSUE_TEMPLATE/new_scanner.md b/.github/ISSUE_TEMPLATE/new_scanner.md deleted file mode 100644 index d070bdb9cf..0000000000 --- a/.github/ISSUE_TEMPLATE/new_scanner.md +++ /dev/null @@ -1,21 +0,0 @@ ---- -name: New Scanner -about: A new scanner to integrate with IntelOwl -title: "[Scanner]" -labels: new_scanner -assignees: '' - ---- - -## Name - -## Link - -## Type of scanner -**this can be observable, file, and docker** - - -## Why should we use it - - -## Possible implementation diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index f7e8fa8f64..f22a1dfd92 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -46,7 +46,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5.1.0 with: - python-version: '3.9' + python-version: '3.11' - name: Install dependencies run: | diff --git a/.github/workflows/pull_request_automation.yml b/.github/workflows/pull_request_automation.yml index 4f28fbde73..a1588e7351 100644 --- a/.github/workflows/pull_request_automation.yml +++ b/.github/workflows/pull_request_automation.yml @@ -39,7 +39,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5.1.0 with: - python-version: 3.9 + python-version: 3.11 - name: Install Dependencies run: | diff --git a/api_app/admin.py b/api_app/admin.py index eb97fdf35b..8ad3daba92 100644 --- a/api_app/admin.py +++ b/api_app/admin.py @@ -190,6 +190,7 @@ def get_secrets(self, obj: PythonModule): class AbstractConfigAdminView(CustomAdminView): list_display = ("name", "description", "disabled", "disabled_in_orgs") search_fields = ("name",) + list_filter = ("disabled",) # allow to clone the object save_as = True diff --git a/api_app/analyzers_manager/file_analyzers/blint_scan.py b/api_app/analyzers_manager/file_analyzers/blint_scan.py new file mode 100644 index 0000000000..1e8de815f2 --- /dev/null +++ b/api_app/analyzers_manager/file_analyzers/blint_scan.py @@ -0,0 +1,37 @@ +import logging +import os +import shutil + +from blint.analysis import AnalysisRunner +from django.conf import settings + +from api_app.analyzers_manager.classes import FileAnalyzer +from intel_owl.settings._util import set_permissions + +logger = logging.getLogger(__name__) + + +class BlintAnalyzer(FileAnalyzer): + """ + Wrapper for Blint static analysis tool + """ + + def update(self) -> bool: + pass + + def run(self) -> dict: + logger.info(f"Running Blint on {self.filepath} for {self.md5}") + + reports_dir = settings.BLINT_REPORTS_PATH / f"blint_analysis_{self.md5}" + os.mkdir(reports_dir) + set_permissions(reports_dir) + + analyzer = AnalysisRunner() + findings, reviews, fuzzables = analyzer.start( + files=[self.filepath], reports_dir=reports_dir + ) + response = {"findings": findings, "reviews": reviews, "fuzzables": fuzzables} + + shutil.rmtree(reports_dir) + + return response diff --git a/api_app/analyzers_manager/file_analyzers/floss.py b/api_app/analyzers_manager/file_analyzers/floss.py index 9a622f0f8c..cc6d5a9d6d 100644 --- a/api_app/analyzers_manager/file_analyzers/floss.py +++ b/api_app/analyzers_manager/file_analyzers/floss.py @@ -4,6 +4,7 @@ from json import dumps as json_dumps from api_app.analyzers_manager.classes import DockerBasedAnalyzer, FileAnalyzer +from api_app.analyzers_manager.exceptions import AnalyzerRunException class Floss(FileAnalyzer, DockerBasedAnalyzer): @@ -23,15 +24,28 @@ class Floss(FileAnalyzer, DockerBasedAnalyzer): max_no_of_strings: dict rank_strings: dict + @classmethod + def update(cls) -> bool: + pass + def run(self): # get binary binary = self.read_file_bytes() # make request data fname = str(self.filename).replace("/", "_").replace(" ", "_") - args = [f"@{fname}"] + # From floss v3 there is prompt that can be overcome + # by using the flag --no static. + # We can lose static strings considering that we can easily + # retrieve them with more simple tools + args = [f"@{fname}", "--json", "--no", "static"] req_data = {"args": args, "timeout": self.timeout} req_files = {fname: binary} result = self._docker_run(req_data, req_files) + if not isinstance(result, dict): + raise AnalyzerRunException( + f"result from floss tool is not a dict but is {type(result)}." + f" Full dump: {result}" + ) result["exceeded_max_number_of_strings"] = {} # we are changing the endpoint of _docker_run to stringsifter self.url = self.ranking_url diff --git a/api_app/analyzers_manager/file_analyzers/hfinger.py b/api_app/analyzers_manager/file_analyzers/hfinger.py new file mode 100644 index 0000000000..2e0b678ee7 --- /dev/null +++ b/api_app/analyzers_manager/file_analyzers/hfinger.py @@ -0,0 +1,57 @@ +# This file is a part of IntelOwl https://github.com/intelowlproject/IntelOwl +# See the file 'LICENSE' for copying permission. + +from hfinger.analysis import hfinger_analyze + +from api_app.analyzers_manager.classes import FileAnalyzer +from tests.mock_utils import if_mock_connections, patch + + +class Hfinger(FileAnalyzer): + """ + Create fingerprints of malware HTTP + requests stored in pcap files. + """ + + fingerprint_report_mode: int = 2 + + def run(self): + return hfinger_analyze(self.filepath, self.fingerprint_report_mode) + + @classmethod + def update(cls) -> bool: + pass + + @classmethod + def _monkeypatch(cls): + patches = [ + if_mock_connections( + patch( + "hfinger.analysis.hfinger_analyze", + return_value=[ + { + "epoch_time": "1388111476.787707000", + "ip_src": "192.168.1.138", + "ip_dst": "173.194.115.80", + "port_src": "49209", + "port_dst": "80", + "fingerprint": "2.4|1|0.5||2.4|1.2|GE|1|ac,ac-la,us-ag,\ + ac-en,ho,co|ac:te-ht,ap-xh+xm,as-as/ac-la:75ef792f/\ + us-ag:ca0c4d71/ac-en:gz,de/co:Ke-Al|||", + }, + { + "epoch_time": "1388111477.142485000", + "ip_src": "192.168.1.138", + "ip_dst": "66.225.230.141", + "port_src": "49220", + "port_dst": "80", + "fingerprint": "1.5|3|1.0|html|||GE|1|ac,re,ac-la,us-ag,\ + ac-en,ho,co|ac:te-ht,ap-xh+xm,as-as/ac-la:75ef792f/\ + us-ag:ca0c4d71/ac-en:gz,de/co:Ke-Al|||", + }, + ], + ) + ) + ] + + return super()._monkeypatch(patches=patches) diff --git a/api_app/analyzers_manager/file_analyzers/perm_hash.py b/api_app/analyzers_manager/file_analyzers/perm_hash.py new file mode 100644 index 0000000000..b729c78221 --- /dev/null +++ b/api_app/analyzers_manager/file_analyzers/perm_hash.py @@ -0,0 +1,84 @@ +# This file is a part of IntelOwl https://github.com/intelowlproject/IntelOwl +# See the file 'LICENSE' for copying permission. + +import logging + +import magic +from permhash.functions import ( + APK_MANIFEST_MIMETYPES, + APK_MIMETYPES, + CRX_MANIFEST_MIMETYPES, + CRX_MIMETYPES, + permhash_apk, + permhash_apk_manifest, + permhash_crx, + permhash_crx_manifest, +) + +from api_app.analyzers_manager.classes import FileAnalyzer +from api_app.analyzers_manager.exceptions import AnalyzerRunException +from tests.mock_utils import if_mock_connections, patch + +logger = logging.getLogger(__name__) + + +class Permhash(FileAnalyzer): + """ + Create permissions hash of APK, Chrome extensions, + Android manifest and Chrome extension manifest files. + """ + + def run(self): + result = {} + mimetype = magic.from_file(self.filepath, mime=True) + + hash_val = "" + + if mimetype in APK_MIMETYPES: + hash_val = permhash_apk(self.filepath) + elif mimetype in APK_MANIFEST_MIMETYPES: + hash_val = permhash_apk_manifest(self.filepath) + elif mimetype in CRX_MIMETYPES: + hash_val = permhash_crx(self.filepath) + elif mimetype in CRX_MANIFEST_MIMETYPES: + hash_val = permhash_crx_manifest(self.filepath) + else: + raise AnalyzerRunException(f"Mimetype {mimetype} not supported.") + + # permhash returns False if for some reason the hash value can't be found + if hash_val: + result["hash"] = hash_val + else: + result["error"] = "Could not find permissions in the file." + + return result + + @classmethod + def update(cls) -> bool: + pass + + @classmethod + def _monkeypatch(cls): + hash_val = "aad106ceb64ac2a636ddec77c3feed4c2ffc5c27ab353660d8cb3e1c971ef278" + patches = [ + if_mock_connections( + patch( + "permhash.functions.permhash_apk", + return_value=hash_val, + ), + patch( + "permhash.functions.permhash_apk_manifest", + return_value=hash_val, + ), + patch( + "permhash.functions.permhash_crx", + return_value=hash_val, + ), + patch( + "permhash.functions.permhash_crx_manifest", + return_value=hash_val, + ), + ) + ] + + return super()._monkeypatch(patches=patches) diff --git a/api_app/analyzers_manager/file_analyzers/xlm_macro_deobfuscator.py b/api_app/analyzers_manager/file_analyzers/xlm_macro_deobfuscator.py index f9ca486d1a..e2425ed2c9 100644 --- a/api_app/analyzers_manager/file_analyzers/xlm_macro_deobfuscator.py +++ b/api_app/analyzers_manager/file_analyzers/xlm_macro_deobfuscator.py @@ -14,6 +14,10 @@ class XlmMacroDeobfuscator(FileAnalyzer): passwords_to_check: list + @classmethod + def update(cls) -> bool: + pass + def run(self): results = {} try: @@ -24,7 +28,7 @@ def run(self): if not results: results["error"] = "Can't decrypt with current passwords" except SoftTimeLimitExceeded: - self._handle_base_exception("Soft Time Limit Exceeded") + self._handle_exception("Soft Time Limit Exceeded", is_base_err=True) return results def decrypt(self, xlmpassword=""): diff --git a/api_app/analyzers_manager/file_analyzers/yara_scan.py b/api_app/analyzers_manager/file_analyzers/yara_scan.py index 490002b592..a94fda05c6 100644 --- a/api_app/analyzers_manager/file_analyzers/yara_scan.py +++ b/api_app/analyzers_manager/file_analyzers/yara_scan.py @@ -139,8 +139,13 @@ def _update_git(self): try: o.pull(allow_unrelated_histories=True, rebase=True) except git.exc.GitCommandError as e: - logger.exception(e) - return + if "index.lock" in e.stderr: + # for some reason the git process did not exit correctly + self.delete_lock_file() + o.pull(allow_unrelated_histories=True, rebase=True) + else: + logger.exception(e) + return else: logger.info(f"About to clone {self.url} at {self.directory}") git.Repo.clone_from(self.url, self.directory, depth=1) @@ -151,6 +156,10 @@ def _update_git(self): if settings.GIT_KEY_PATH.exists(): os.remove(settings.GIT_KEY_PATH) + def delete_lock_file(self): + lock_file_path = self.directory / ".git" / "index.lock" + lock_file_path.unlink(missing_ok=False) + @property def compiled_file_name(self): return "intel_owl_compiled.yas" @@ -328,7 +337,7 @@ def __repr__(self): class YaraScan(FileAnalyzer): ignore: list repositories: list - _private_repositories: dict + _private_repositories: dict = {} local_rules: str def _get_owner_and_key(self, url: str) -> Tuple[Union[str, None], Union[str, None]]: diff --git a/api_app/analyzers_manager/migrations/0075_adjust_greynoise.py b/api_app/analyzers_manager/migrations/0075_adjust_greynoise.py new file mode 100644 index 0000000000..e917ec7f83 --- /dev/null +++ b/api_app/analyzers_manager/migrations/0075_adjust_greynoise.py @@ -0,0 +1,28 @@ +from django.db import migrations + + +def migrate(apps, schema_editor): + PythonModule = apps.get_model("api_app", "PythonModule") + + pm = PythonModule.objects.get( + module="greynoiseintel.GreyNoiseAnalyzer", + base_path="api_app.analyzers_manager.observable_analyzers", + ) + param = pm.parameters.get(name="api_key_name") + param.required = False + param.values.filter(owner=None, for_organization=False).delete() + param.save() + + +def reverse_migrate(apps, schema_editor): + ... + + +class Migration(migrations.Migration): + dependencies = [ + ("api_app", "0062_alter_parameter_python_module"), + ("analyzers_manager", "0074_adjust_maximum_tlp"), + ] + operations = [ + migrations.RunPython(migrate, reverse_migrate), + ] diff --git a/api_app/analyzers_manager/migrations/0076_analyzer_config_greynoise_labs.py b/api_app/analyzers_manager/migrations/0076_analyzer_config_greynoise_labs.py new file mode 100644 index 0000000000..65bde2f393 --- /dev/null +++ b/api_app/analyzers_manager/migrations/0076_analyzer_config_greynoise_labs.py @@ -0,0 +1,135 @@ +from django.db import migrations +from django.db.models.fields.related_descriptors import ( + ForwardManyToOneDescriptor, + ForwardOneToOneDescriptor, + ManyToManyDescriptor, +) + +plugin = { + "python_module": { + "health_check_schedule": None, + "update_schedule": { + "minute": "0", + "hour": "*/6", + "day_of_week": "*", + "day_of_month": "*", + "month_of_year": "*", + }, + "module": "greynoise_labs.GreynoiseLabs", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + "name": "Greynoise_Labs", + "description": "scan an IP against the [Greynoise Labs API](https://www.greynoise.io/) (requires authentication token obtained from cookies on greynoise website)", + "disabled": False, + "soft_time_limit": 60, + "routing_key": "default", + "health_check_status": True, + "type": "observable", + "docker_based": False, + "maximum_tlp": "AMBER", + "observable_supported": ["ip"], + "supported_filetypes": [], + "run_hash": False, + "run_hash_type": "", + "not_supported_filetypes": [], + "model": "analyzers_manager.AnalyzerConfig", +} + +params = [ + { + "python_module": { + "module": "greynoise_labs.GreynoiseLabs", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + "name": "auth_token", + "type": "str", + "description": "Authentication token obtained from cookies on greynoise website.", + "is_secret": True, + "required": True, + } +] + +values = [] + + +def _get_real_obj(Model, field, value): + def _get_obj(Model, other_model, value): + if isinstance(value, dict): + real_vals = {} + for key, real_val in value.items(): + real_vals[key] = _get_real_obj(other_model, key, real_val) + value = other_model.objects.get_or_create(**real_vals)[0] + # it is just the primary key serialized + else: + if isinstance(value, int): + if Model.__name__ == "PluginConfig": + value = other_model.objects.get(name=plugin["name"]) + else: + value = other_model.objects.get(pk=value) + else: + value = other_model.objects.get(name=value) + return value + + if ( + type(getattr(Model, field)) + in [ForwardManyToOneDescriptor, ForwardOneToOneDescriptor] + and value + ): + other_model = getattr(Model, field).get_queryset().model + value = _get_obj(Model, other_model, value) + elif type(getattr(Model, field)) in [ManyToManyDescriptor] and value: + other_model = getattr(Model, field).rel.model + value = [_get_obj(Model, other_model, val) for val in value] + return value + + +def _create_object(Model, data): + mtm, no_mtm = {}, {} + for field, value in data.items(): + value = _get_real_obj(Model, field, value) + if type(getattr(Model, field)) is ManyToManyDescriptor: + mtm[field] = value + else: + no_mtm[field] = value + try: + o = Model.objects.get(**no_mtm) + except Model.DoesNotExist: + o = Model(**no_mtm) + o.full_clean() + o.save() + for field, value in mtm.items(): + attribute = getattr(o, field) + if value is not None: + attribute.set(value) + return False + return True + + +def migrate(apps, schema_editor): + Parameter = apps.get_model("api_app", "Parameter") + PluginConfig = apps.get_model("api_app", "PluginConfig") + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + if not Model.objects.filter(name=plugin["name"]).exists(): + exists = _create_object(Model, plugin) + if not exists: + for param in params: + _create_object(Parameter, param) + for value in values: + _create_object(PluginConfig, value) + + +def reverse_migrate(apps, schema_editor): + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + Model.objects.get(name=plugin["name"]).delete() + + +class Migration(migrations.Migration): + atomic = False + dependencies = [ + ("api_app", "0062_alter_parameter_python_module"), + ("analyzers_manager", "0075_adjust_greynoise"), + ] + + operations = [migrations.RunPython(migrate, reverse_migrate)] diff --git a/api_app/analyzers_manager/migrations/0077_analyzer_config_abusix.py b/api_app/analyzers_manager/migrations/0077_analyzer_config_abusix.py new file mode 100644 index 0000000000..9e211aed6e --- /dev/null +++ b/api_app/analyzers_manager/migrations/0077_analyzer_config_abusix.py @@ -0,0 +1,117 @@ +from django.db import migrations +from django.db.models.fields.related_descriptors import ( + ForwardManyToOneDescriptor, + ForwardOneToOneDescriptor, + ManyToManyDescriptor, +) + +plugin = { + "python_module": { + "health_check_schedule": None, + "update_schedule": None, + "module": "abusix.Abusix", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + "name": "Abusix", + "description": "get abuse contacts of an IP from [Abusix](https://abusix.com/contact-db/)", + "disabled": False, + "soft_time_limit": 60, + "routing_key": "default", + "health_check_status": False, + "type": "observable", + "docker_based": False, + "maximum_tlp": "AMBER", + "observable_supported": ["ip"], + "supported_filetypes": [], + "run_hash": False, + "run_hash_type": "", + "not_supported_filetypes": [], + "model": "analyzers_manager.AnalyzerConfig", +} + +params = [] + +values = [] + + +def _get_real_obj(Model, field, value): + def _get_obj(Model, other_model, value): + if isinstance(value, dict): + real_vals = {} + for key, real_val in value.items(): + real_vals[key] = _get_real_obj(other_model, key, real_val) + value = other_model.objects.get_or_create(**real_vals)[0] + # it is just the primary key serialized + else: + if isinstance(value, int): + if Model.__name__ == "PluginConfig": + value = other_model.objects.get(name=plugin["name"]) + else: + value = other_model.objects.get(pk=value) + else: + value = other_model.objects.get(name=value) + return value + + if ( + type(getattr(Model, field)) + in [ForwardManyToOneDescriptor, ForwardOneToOneDescriptor] + and value + ): + other_model = getattr(Model, field).get_queryset().model + value = _get_obj(Model, other_model, value) + elif type(getattr(Model, field)) in [ManyToManyDescriptor] and value: + other_model = getattr(Model, field).rel.model + value = [_get_obj(Model, other_model, val) for val in value] + return value + + +def _create_object(Model, data): + mtm, no_mtm = {}, {} + for field, value in data.items(): + value = _get_real_obj(Model, field, value) + if type(getattr(Model, field)) is ManyToManyDescriptor: + mtm[field] = value + else: + no_mtm[field] = value + try: + o = Model.objects.get(**no_mtm) + except Model.DoesNotExist: + o = Model(**no_mtm) + o.full_clean() + o.save() + for field, value in mtm.items(): + attribute = getattr(o, field) + if value is not None: + attribute.set(value) + return False + return True + + +def migrate(apps, schema_editor): + Parameter = apps.get_model("api_app", "Parameter") + PluginConfig = apps.get_model("api_app", "PluginConfig") + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + if not Model.objects.filter(name=plugin["name"]).exists(): + exists = _create_object(Model, plugin) + if not exists: + for param in params: + _create_object(Parameter, param) + for value in values: + _create_object(PluginConfig, value) + + +def reverse_migrate(apps, schema_editor): + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + Model.objects.get(name=plugin["name"]).delete() + + +class Migration(migrations.Migration): + atomic = False + dependencies = [ + ("api_app", "0062_alter_parameter_python_module"), + ("analyzers_manager", "0076_analyzer_config_greynoise_labs"), + ] + + operations = [migrations.RunPython(migrate, reverse_migrate)] diff --git a/api_app/analyzers_manager/migrations/0078_analyzer_config_hfinger.py b/api_app/analyzers_manager/migrations/0078_analyzer_config_hfinger.py new file mode 100644 index 0000000000..719a890b3d --- /dev/null +++ b/api_app/analyzers_manager/migrations/0078_analyzer_config_hfinger.py @@ -0,0 +1,152 @@ +from django.db import migrations +from django.db.models.fields.related_descriptors import ( + ForwardManyToOneDescriptor, + ForwardOneToOneDescriptor, + ManyToManyDescriptor, +) + +plugin = { + "python_module": { + "health_check_schedule": None, + "update_schedule": None, + "module": "hfinger.Hfinger", + "base_path": "api_app.analyzers_manager.file_analyzers", + }, + "name": "Hfinger", + "description": "create fingerprints of malware HTTPs requests using [Hfinger](https://github.com/CERT-Polska/hfinger)", + "disabled": False, + "soft_time_limit": 30, + "routing_key": "default", + "health_check_status": True, + "type": "file", + "docker_based": False, + "maximum_tlp": "RED", + "observable_supported": [], + "supported_filetypes": ["application/vnd.tcpdump.pcap"], + "run_hash": False, + "run_hash_type": "", + "not_supported_filetypes": [], + "model": "analyzers_manager.AnalyzerConfig", +} + +params = [ + { + "python_module": { + "module": "hfinger.Hfinger", + "base_path": "api_app.analyzers_manager.file_analyzers", + }, + "name": "fingerprint_report_mode", + "type": "int", + "description": "Fingerprint report mode. \r\n0 - similar number of collisions and fingerprints as mode 2, but using fewer features, \r\n1 - representation of all designed features, but a little more collisions than modes 0, 2, and 4, \r\n2 - optimal (the default mode), \r\n3 - the lowest number of generated fingerprints, but the highest number of collisions, \r\n4 - the highest fingerprint entropy, but slightly more fingerprints than modes 0-2", + "is_secret": False, + "required": False, + } +] + +values = [ + { + "parameter": { + "python_module": { + "module": "hfinger.Hfinger", + "base_path": "api_app.analyzers_manager.file_analyzers", + }, + "name": "fingerprint_report_mode", + "type": "int", + "description": "Fingerprint report mode. \r\n0 - similar number of collisions and fingerprints as mode 2, but using fewer features, \r\n1 - representation of all designed features, but a little more collisions than modes 0, 2, and 4, \r\n2 - optimal (the default mode), \r\n3 - the lowest number of generated fingerprints, but the highest number of collisions, \r\n4 - the highest fingerprint entropy, but slightly more fingerprints than modes 0-2", + "is_secret": False, + "required": False, + }, + "analyzer_config": "Hfinger", + "connector_config": None, + "visualizer_config": None, + "ingestor_config": None, + "pivot_config": None, + "for_organization": False, + "value": 2, + "updated_at": "2024-04-03T19:33:51.679066Z", + "owner": None, + } +] + + +def _get_real_obj(Model, field, value): + def _get_obj(Model, other_model, value): + if isinstance(value, dict): + real_vals = {} + for key, real_val in value.items(): + real_vals[key] = _get_real_obj(other_model, key, real_val) + value = other_model.objects.get_or_create(**real_vals)[0] + # it is just the primary key serialized + else: + if isinstance(value, int): + if Model.__name__ == "PluginConfig": + value = other_model.objects.get(name=plugin["name"]) + else: + value = other_model.objects.get(pk=value) + else: + value = other_model.objects.get(name=value) + return value + + if ( + type(getattr(Model, field)) + in [ForwardManyToOneDescriptor, ForwardOneToOneDescriptor] + and value + ): + other_model = getattr(Model, field).get_queryset().model + value = _get_obj(Model, other_model, value) + elif type(getattr(Model, field)) in [ManyToManyDescriptor] and value: + other_model = getattr(Model, field).rel.model + value = [_get_obj(Model, other_model, val) for val in value] + return value + + +def _create_object(Model, data): + mtm, no_mtm = {}, {} + for field, value in data.items(): + value = _get_real_obj(Model, field, value) + if type(getattr(Model, field)) is ManyToManyDescriptor: + mtm[field] = value + else: + no_mtm[field] = value + try: + o = Model.objects.get(**no_mtm) + except Model.DoesNotExist: + o = Model(**no_mtm) + o.full_clean() + o.save() + for field, value in mtm.items(): + attribute = getattr(o, field) + if value is not None: + attribute.set(value) + return False + return True + + +def migrate(apps, schema_editor): + Parameter = apps.get_model("api_app", "Parameter") + PluginConfig = apps.get_model("api_app", "PluginConfig") + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + if not Model.objects.filter(name=plugin["name"]).exists(): + exists = _create_object(Model, plugin) + if not exists: + for param in params: + _create_object(Parameter, param) + for value in values: + _create_object(PluginConfig, value) + + +def reverse_migrate(apps, schema_editor): + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + Model.objects.get(name=plugin["name"]).delete() + + +class Migration(migrations.Migration): + atomic = False + dependencies = [ + ("api_app", "0062_alter_parameter_python_module"), + ("analyzers_manager", "0077_analyzer_config_abusix"), + ] + + operations = [migrations.RunPython(migrate, reverse_migrate)] diff --git a/api_app/analyzers_manager/migrations/0079_remove_dns0_rrsets_analyzer.py b/api_app/analyzers_manager/migrations/0079_remove_dns0_rrsets_analyzer.py new file mode 100644 index 0000000000..25df9bbc4c --- /dev/null +++ b/api_app/analyzers_manager/migrations/0079_remove_dns0_rrsets_analyzer.py @@ -0,0 +1,27 @@ +from django.db import migrations + + +def migrate(apps, schema_editor): + PythonModule = apps.get_model("api_app", "PythonModule") + pm = PythonModule.objects.filter( + module="dns0.dns0_rrsets.DNS0Rrsets", + base_path="api_app.analyzers_manager.observable_analyzers", + ).first() + if pm: + pm.analyzerconfigs.all().delete() + pm.delete() + + +def reverse_migrate(apps, schema_editor): + pass + + +class Migration(migrations.Migration): + dependencies = [ + ("api_app", "0062_alter_parameter_python_module"), + ("playbooks_manager", "0032_delete_dns0_playbook_free_to_use_analyzers"), + ("analyzers_manager", "0078_analyzer_config_hfinger"), + ] + operations = [ + migrations.RunPython(migrate, reverse_migrate), + ] diff --git a/api_app/analyzers_manager/migrations/0080_remove_dns0_names_analyzer.py b/api_app/analyzers_manager/migrations/0080_remove_dns0_names_analyzer.py new file mode 100644 index 0000000000..8868a52580 --- /dev/null +++ b/api_app/analyzers_manager/migrations/0080_remove_dns0_names_analyzer.py @@ -0,0 +1,27 @@ +from django.db import migrations + + +def migrate(apps, schema_editor): + PythonModule = apps.get_model("api_app", "PythonModule") + pm = PythonModule.objects.filter( + module="dns0.dns0_names.DNS0Names", + base_path="api_app.analyzers_manager.observable_analyzers", + ).first() + if pm: + pm.analyzerconfigs.all().delete() + pm.delete() + + +def reverse_migrate(apps, schema_editor): + pass + + +class Migration(migrations.Migration): + dependencies = [ + ("api_app", "0062_alter_parameter_python_module"), + ("playbooks_manager", "0032_delete_dns0_playbook_free_to_use_analyzers"), + ("analyzers_manager", "0079_remove_dns0_rrsets_analyzer"), + ] + operations = [ + migrations.RunPython(migrate, reverse_migrate), + ] diff --git a/api_app/analyzers_manager/migrations/0081_adjust_abusix.py b/api_app/analyzers_manager/migrations/0081_adjust_abusix.py new file mode 100644 index 0000000000..c32e125b9a --- /dev/null +++ b/api_app/analyzers_manager/migrations/0081_adjust_abusix.py @@ -0,0 +1,23 @@ +from django.db import migrations + + +def migrate(apps, schema_editor): + AnalyzerConfig = apps.get_model("analyzers_manager", "AnalyzerConfig") + + AnalyzerConfig.objects.filter( + name="Abusix", + ).update(health_check_status=True) + + +def reverse_migrate(apps, schema_editor): + pass + + +class Migration(migrations.Migration): + dependencies = [ + ("api_app", "0062_alter_parameter_python_module"), + ("analyzers_manager", "0080_remove_dns0_names_analyzer"), + ] + operations = [ + migrations.RunPython(migrate, reverse_migrate), + ] diff --git a/api_app/analyzers_manager/migrations/0082_analyzer_config_ip2whois.py b/api_app/analyzers_manager/migrations/0082_analyzer_config_ip2whois.py new file mode 100644 index 0000000000..dba0ab1df5 --- /dev/null +++ b/api_app/analyzers_manager/migrations/0082_analyzer_config_ip2whois.py @@ -0,0 +1,131 @@ +from django.db import migrations +from django.db.models.fields.related_descriptors import ( + ForwardManyToOneDescriptor, + ForwardOneToOneDescriptor, + ManyToManyDescriptor, +) + +plugin = { + "python_module": { + "health_check_schedule": None, + "update_schedule": None, + "module": "ip2whois.Ip2whois", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + "name": "IP2WHOIS", + "description": "[API Docs](https://www.ip2location.io/ip2whois-documentation)" + " IP2Location.io IP2WHOIS Domain WHOIS API helps users to obtain" + " domain information and WHOIS record by using a domain name.", + "disabled": False, + "soft_time_limit": 60, + "routing_key": "default", + "health_check_status": True, + "type": "observable", + "docker_based": False, + "maximum_tlp": "RED", + "observable_supported": ["domain"], + "supported_filetypes": [], + "run_hash": False, + "run_hash_type": "", + "not_supported_filetypes": [], + "model": "analyzers_manager.AnalyzerConfig", +} + +params = [ + { + "python_module": { + "module": "ip2whois.Ip2whois", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + "name": "api_key_name", + "type": "str", + "description": "IP2WHOIS API key name.", + "is_secret": True, + "required": True, + } +] + +values = [] + + +def _get_real_obj(Model, field, value): + def _get_obj(Model, other_model, value): + if isinstance(value, dict): + real_vals = {} + for key, real_val in value.items(): + real_vals[key] = _get_real_obj(other_model, key, real_val) + value = other_model.objects.get_or_create(**real_vals)[0] + # it is just the primary key serialized + else: + if isinstance(value, int): + if Model.__name__ == "PluginConfig": + value = other_model.objects.get(name=plugin["name"]) + else: + value = other_model.objects.get(pk=value) + else: + value = other_model.objects.get(name=value) + return value + + if ( + type(getattr(Model, field)) + in [ForwardManyToOneDescriptor, ForwardOneToOneDescriptor] + and value + ): + other_model = getattr(Model, field).get_queryset().model + value = _get_obj(Model, other_model, value) + elif type(getattr(Model, field)) in [ManyToManyDescriptor] and value: + other_model = getattr(Model, field).rel.model + value = [_get_obj(Model, other_model, val) for val in value] + return value + + +def _create_object(Model, data): + mtm, no_mtm = {}, {} + for field, value in data.items(): + value = _get_real_obj(Model, field, value) + if type(getattr(Model, field)) is ManyToManyDescriptor: + mtm[field] = value + else: + no_mtm[field] = value + try: + o = Model.objects.get(**no_mtm) + except Model.DoesNotExist: + o = Model(**no_mtm) + o.full_clean() + o.save() + for field, value in mtm.items(): + attribute = getattr(o, field) + if value is not None: + attribute.set(value) + return False + return True + + +def migrate(apps, schema_editor): + Parameter = apps.get_model("api_app", "Parameter") + PluginConfig = apps.get_model("api_app", "PluginConfig") + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + if not Model.objects.filter(name=plugin["name"]).exists(): + exists = _create_object(Model, plugin) + if not exists: + for param in params: + _create_object(Parameter, param) + for value in values: + _create_object(PluginConfig, value) + + +def reverse_migrate(apps, schema_editor): + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + Model.objects.get(name=plugin["name"]).delete() + + +class Migration(migrations.Migration): + atomic = False + dependencies = [ + ("api_app", "0062_alter_parameter_python_module"), + ("analyzers_manager", "0081_adjust_abusix"), + ] + + operations = [migrations.RunPython(migrate, reverse_migrate)] diff --git a/api_app/analyzers_manager/migrations/0083_adjust_docinfo.py b/api_app/analyzers_manager/migrations/0083_adjust_docinfo.py new file mode 100644 index 0000000000..d26b6e19be --- /dev/null +++ b/api_app/analyzers_manager/migrations/0083_adjust_docinfo.py @@ -0,0 +1,25 @@ +from django.db import migrations + + +def migrate(apps, schema_editor): + AnalyzerConfig = apps.get_model("analyzers_manager", "AnalyzerConfig") + + ac = AnalyzerConfig.objects.get( + name="Doc_Info", + ) + ac.supported_filetypes.remove("application/onenote") + ac.save() + + +def reverse_migrate(apps, schema_editor): + pass + + +class Migration(migrations.Migration): + dependencies = [ + ("api_app", "0062_alter_parameter_python_module"), + ("analyzers_manager", "0082_analyzer_config_ip2whois"), + ] + operations = [ + migrations.RunPython(migrate, reverse_migrate), + ] diff --git a/api_app/analyzers_manager/migrations/0084_alter_analyzerconfig_not_supported_filetypes_and_more.py b/api_app/analyzers_manager/migrations/0084_alter_analyzerconfig_not_supported_filetypes_and_more.py new file mode 100644 index 0000000000..53852709e9 --- /dev/null +++ b/api_app/analyzers_manager/migrations/0084_alter_analyzerconfig_not_supported_filetypes_and_more.py @@ -0,0 +1,169 @@ +# Generated by Django 4.2.11 on 2024-04-11 14:07 + +from django.db import migrations, models + +import api_app.fields + + +class Migration(migrations.Migration): + dependencies = [ + ("api_app", "0062_alter_parameter_python_module"), + ("analyzers_manager", "0083_adjust_docinfo"), + ] + + operations = [ + migrations.AlterField( + model_name="analyzerconfig", + name="not_supported_filetypes", + field=api_app.fields.ChoiceArrayField( + base_field=models.CharField( + choices=[ + ("application/w-script-file", "Wscript"), + ("application/javascript", "Javascript1"), + ("application/x-javascript", "Javascript2"), + ("text/javascript", "Javascript3"), + ("application/x-vbscript", "Vb Script"), + ("text/x-ms-iqy", "Iqy"), + ("application/vnd.android.package-archive", "Apk"), + ("application/x-dex", "Dex"), + ("application/onenote", "One Note"), + ("application/zip", "Zip1"), + ("multipart/x-zip", "Zip2"), + ("application/java-archive", "Java"), + ("text/rtf", "Rtf1"), + ("application/rtf", "Rtf2"), + ("application/x-sharedlib", "Shared Lib"), + ("application/vnd.microsoft.portable-executable", "Exe"), + ("application/x-elf", "Elf"), + ("application/octet-stream", "Octet"), + ("application/vnd.tcpdump.pcap", "Pcap"), + ("application/pdf", "Pdf"), + ("text/html", "Html"), + ("application/x-mspublisher", "Pub"), + ("application/vnd.ms-excel.addin.macroEnabled", "Excel Macro1"), + ( + "application/vnd.ms-excel.sheet.macroEnabled.12", + "Excel Macro2", + ), + ("application/vnd.ms-excel", "Excel1"), + ("application/excel", "Excel2"), + ( + "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet", + "Doc", + ), + ("application/xml", "Xml1"), + ("text/xml", "Xml2"), + ("application/encrypted", "Encrypted"), + ("text/plain", "Plain"), + ("text/csv", "Csv"), + ( + "application/vnd.openxmlformats-officedocument.presentationml.presentation", + "Pptx", + ), + ("application/msword", "Word1"), + ( + "application/vnd.openxmlformats-officedocument.wordprocessingml.document", + "Word2", + ), + ("application/vnd.ms-powerpoint", "Powerpoint"), + ("application/vnd.ms-office", "Office"), + ("application/x-binary", "Binary"), + ("application/x-macbinary", "Mac1"), + ("application/mac-binary", "Mac2"), + ("application/x-mach-binary", "Mac3"), + ("application/x-zip-compressed", "Compress1"), + ("application/x-compressed", "Compress2"), + ("application/vnd.ms-outlook", "Outlook"), + ("message/rfc822", "Eml"), + ("application/pkcs7-signature", "Pkcs7"), + ("application/x-pkcs7-signature", "Xpkcs7"), + ("multipart/mixed", "Mixed"), + ("text/x-shellscript", "X Shellscript"), + ("application/x-chrome-extension", "Crx"), + ("application/json", "Json"), + ], + max_length=90, + ), + blank=True, + default=list, + size=None, + ), + ), + migrations.AlterField( + model_name="analyzerconfig", + name="supported_filetypes", + field=api_app.fields.ChoiceArrayField( + base_field=models.CharField( + choices=[ + ("application/w-script-file", "Wscript"), + ("application/javascript", "Javascript1"), + ("application/x-javascript", "Javascript2"), + ("text/javascript", "Javascript3"), + ("application/x-vbscript", "Vb Script"), + ("text/x-ms-iqy", "Iqy"), + ("application/vnd.android.package-archive", "Apk"), + ("application/x-dex", "Dex"), + ("application/onenote", "One Note"), + ("application/zip", "Zip1"), + ("multipart/x-zip", "Zip2"), + ("application/java-archive", "Java"), + ("text/rtf", "Rtf1"), + ("application/rtf", "Rtf2"), + ("application/x-sharedlib", "Shared Lib"), + ("application/vnd.microsoft.portable-executable", "Exe"), + ("application/x-elf", "Elf"), + ("application/octet-stream", "Octet"), + ("application/vnd.tcpdump.pcap", "Pcap"), + ("application/pdf", "Pdf"), + ("text/html", "Html"), + ("application/x-mspublisher", "Pub"), + ("application/vnd.ms-excel.addin.macroEnabled", "Excel Macro1"), + ( + "application/vnd.ms-excel.sheet.macroEnabled.12", + "Excel Macro2", + ), + ("application/vnd.ms-excel", "Excel1"), + ("application/excel", "Excel2"), + ( + "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet", + "Doc", + ), + ("application/xml", "Xml1"), + ("text/xml", "Xml2"), + ("application/encrypted", "Encrypted"), + ("text/plain", "Plain"), + ("text/csv", "Csv"), + ( + "application/vnd.openxmlformats-officedocument.presentationml.presentation", + "Pptx", + ), + ("application/msword", "Word1"), + ( + "application/vnd.openxmlformats-officedocument.wordprocessingml.document", + "Word2", + ), + ("application/vnd.ms-powerpoint", "Powerpoint"), + ("application/vnd.ms-office", "Office"), + ("application/x-binary", "Binary"), + ("application/x-macbinary", "Mac1"), + ("application/mac-binary", "Mac2"), + ("application/x-mach-binary", "Mac3"), + ("application/x-zip-compressed", "Compress1"), + ("application/x-compressed", "Compress2"), + ("application/vnd.ms-outlook", "Outlook"), + ("message/rfc822", "Eml"), + ("application/pkcs7-signature", "Pkcs7"), + ("application/x-pkcs7-signature", "Xpkcs7"), + ("multipart/mixed", "Mixed"), + ("text/x-shellscript", "X Shellscript"), + ("application/x-chrome-extension", "Crx"), + ("application/json", "Json"), + ], + max_length=90, + ), + blank=True, + default=list, + size=None, + ), + ), + ] diff --git a/api_app/analyzers_manager/migrations/0085_analyzer_config_permhash.py b/api_app/analyzers_manager/migrations/0085_analyzer_config_permhash.py new file mode 100644 index 0000000000..be8d2394d1 --- /dev/null +++ b/api_app/analyzers_manager/migrations/0085_analyzer_config_permhash.py @@ -0,0 +1,127 @@ +from django.db import migrations +from django.db.models.fields.related_descriptors import ( + ForwardManyToOneDescriptor, + ForwardOneToOneDescriptor, + ManyToManyDescriptor, +) + +plugin = { + "python_module": { + "health_check_schedule": None, + "update_schedule": None, + "module": "perm_hash.Permhash", + "base_path": "api_app.analyzers_manager.file_analyzers", + }, + "name": "Permhash", + "description": "create a hash of permissions in APK, Android manifest, chrome extension, chrome extension manifest files using [permhash](https://github.com/google/permhash).", + "disabled": False, + "soft_time_limit": 20, + "routing_key": "default", + "health_check_status": True, + "type": "file", + "docker_based": False, + "maximum_tlp": "RED", + "observable_supported": [], + "supported_filetypes": [ + "application/vnd.android.package-archive", + "application/zip", + "application/java-archive", + "application/octet-stream", + "text/plain", + "application/x-chrome-extension", + "application/json", + ], + "run_hash": False, + "run_hash_type": "", + "not_supported_filetypes": [], + "model": "analyzers_manager.AnalyzerConfig", +} + +params = [] + +values = [] + + +def _get_real_obj(Model, field, value): + def _get_obj(Model, other_model, value): + if isinstance(value, dict): + real_vals = {} + for key, real_val in value.items(): + real_vals[key] = _get_real_obj(other_model, key, real_val) + value = other_model.objects.get_or_create(**real_vals)[0] + # it is just the primary key serialized + else: + if isinstance(value, int): + if Model.__name__ == "PluginConfig": + value = other_model.objects.get(name=plugin["name"]) + else: + value = other_model.objects.get(pk=value) + else: + value = other_model.objects.get(name=value) + return value + + if ( + type(getattr(Model, field)) + in [ForwardManyToOneDescriptor, ForwardOneToOneDescriptor] + and value + ): + other_model = getattr(Model, field).get_queryset().model + value = _get_obj(Model, other_model, value) + elif type(getattr(Model, field)) in [ManyToManyDescriptor] and value: + other_model = getattr(Model, field).rel.model + value = [_get_obj(Model, other_model, val) for val in value] + return value + + +def _create_object(Model, data): + mtm, no_mtm = {}, {} + for field, value in data.items(): + value = _get_real_obj(Model, field, value) + if type(getattr(Model, field)) is ManyToManyDescriptor: + mtm[field] = value + else: + no_mtm[field] = value + try: + o = Model.objects.get(**no_mtm) + except Model.DoesNotExist: + o = Model(**no_mtm) + o.full_clean() + o.save() + for field, value in mtm.items(): + attribute = getattr(o, field) + if value is not None: + attribute.set(value) + return False + return True + + +def migrate(apps, schema_editor): + Parameter = apps.get_model("api_app", "Parameter") + PluginConfig = apps.get_model("api_app", "PluginConfig") + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + if not Model.objects.filter(name=plugin["name"]).exists(): + exists = _create_object(Model, plugin) + if not exists: + for param in params: + _create_object(Parameter, param) + for value in values: + _create_object(PluginConfig, value) + + +def reverse_migrate(apps, schema_editor): + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + Model.objects.get(name=plugin["name"]).delete() + + +class Migration(migrations.Migration): + atomic = False + dependencies = [ + ( + "analyzers_manager", + "0084_alter_analyzerconfig_not_supported_filetypes_and_more", + ), + ] + + operations = [migrations.RunPython(migrate, reverse_migrate)] diff --git a/api_app/analyzers_manager/migrations/0086_analyzer_config_blint.py b/api_app/analyzers_manager/migrations/0086_analyzer_config_blint.py new file mode 100755 index 0000000000..d9192ab228 --- /dev/null +++ b/api_app/analyzers_manager/migrations/0086_analyzer_config_blint.py @@ -0,0 +1,128 @@ +from django.db import migrations +from django.db.models.fields.related_descriptors import ( + ForwardManyToOneDescriptor, + ForwardOneToOneDescriptor, + ManyToManyDescriptor, +) + +plugin = { + "python_module": { + "health_check_schedule": None, + "update_schedule": None, + "module": "blint_scan.BlintAnalyzer", + "base_path": "api_app.analyzers_manager.file_analyzers", + }, + "name": "Blint", + "description": "[Blint](https://github.com/owasp-dep-scan/blint) is a Binary Linter that checks the security properties and capabilities of your executables.\r\nSupported binary formats:\r\n- Android (apk, aab)\r\n- ELF (GNU, musl)\r\n- PE (exe, dll)\r\n- Mach-O (x64, arm64)", + "disabled": False, + "soft_time_limit": 60, + "routing_key": "default", + "health_check_status": True, + "type": "file", + "docker_based": False, + "maximum_tlp": "RED", + "observable_supported": [], + "supported_filetypes": [ + "application/vnd.android.package-archive", + "application/vnd.microsoft.portable-executable", + "application/x-binary", + "application/x-macbinary", + "application/mac-binary", + "application/x-mach-binary", + "application/x-elf", + "application/x-sharedlib", + "application/java-archive", + "application/x-dex", + "application/zip", + ], + "run_hash": False, + "run_hash_type": "", + "not_supported_filetypes": [], + "model": "analyzers_manager.AnalyzerConfig", +} + +params = [] + +values = [] + + +def _get_real_obj(Model, field, value): + def _get_obj(Model, other_model, value): + if isinstance(value, dict): + real_vals = {} + for key, real_val in value.items(): + real_vals[key] = _get_real_obj(other_model, key, real_val) + value = other_model.objects.get_or_create(**real_vals)[0] + # it is just the primary key serialized + else: + if isinstance(value, int): + if Model.__name__ == "PluginConfig": + value = other_model.objects.get(name=plugin["name"]) + else: + value = other_model.objects.get(pk=value) + else: + value = other_model.objects.get(name=value) + return value + + if ( + type(getattr(Model, field)) + in [ForwardManyToOneDescriptor, ForwardOneToOneDescriptor] + and value + ): + other_model = getattr(Model, field).get_queryset().model + value = _get_obj(Model, other_model, value) + elif type(getattr(Model, field)) in [ManyToManyDescriptor] and value: + other_model = getattr(Model, field).rel.model + value = [_get_obj(Model, other_model, val) for val in value] + return value + + +def _create_object(Model, data): + mtm, no_mtm = {}, {} + for field, value in data.items(): + value = _get_real_obj(Model, field, value) + if type(getattr(Model, field)) is ManyToManyDescriptor: + mtm[field] = value + else: + no_mtm[field] = value + try: + o = Model.objects.get(**no_mtm) + except Model.DoesNotExist: + o = Model(**no_mtm) + o.full_clean() + o.save() + for field, value in mtm.items(): + attribute = getattr(o, field) + if value is not None: + attribute.set(value) + return False + return True + + +def migrate(apps, schema_editor): + Parameter = apps.get_model("api_app", "Parameter") + PluginConfig = apps.get_model("api_app", "PluginConfig") + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + if not Model.objects.filter(name=plugin["name"]).exists(): + exists = _create_object(Model, plugin) + if not exists: + for param in params: + _create_object(Parameter, param) + for value in values: + _create_object(PluginConfig, value) + + +def reverse_migrate(apps, schema_editor): + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + Model.objects.get(name=plugin["name"]).delete() + + +class Migration(migrations.Migration): + atomic = False + dependencies = [ + ("analyzers_manager", "0085_analyzer_config_permhash"), + ] + + operations = [migrations.RunPython(migrate, reverse_migrate)] diff --git a/api_app/analyzers_manager/models.py b/api_app/analyzers_manager/models.py index b9851ae8a9..eb1d76265c 100644 --- a/api_app/analyzers_manager/models.py +++ b/api_app/analyzers_manager/models.py @@ -84,6 +84,8 @@ class MimeTypes(models.TextChoices): XPKCS7 = "application/x-pkcs7-signature" MIXED = "multipart/mixed" X_SHELLSCRIPT = "text/x-shellscript" + CRX = "application/x-chrome-extension" + JSON = "application/json" @classmethod def _calculate_from_filename(cls, file_name: str) -> Optional["MimeTypes"]: diff --git a/api_app/analyzers_manager/observable_analyzers/abusix.py b/api_app/analyzers_manager/observable_analyzers/abusix.py new file mode 100644 index 0000000000..440a6bb0be --- /dev/null +++ b/api_app/analyzers_manager/observable_analyzers/abusix.py @@ -0,0 +1,38 @@ +# This file is a part of IntelOwl https://github.com/intelowlproject/IntelOwl +# See the file 'LICENSE' for copying permission. + +import logging + +import querycontacts + +from api_app.analyzers_manager import classes +from tests.mock_utils import if_mock_connections, patch + +logger = logging.getLogger(__name__) + + +class Abusix(classes.ObservableAnalyzer): + def run(self): + result = {} + ip_addr = self.observable_name + cf = querycontacts.ContactFinder() + abuse_contacts = cf.find(ip_addr) + if not abuse_contacts: + abuse_contacts = [] + result["abuse_contacts"] = abuse_contacts + return result + + def update(self) -> bool: + pass + + @classmethod + def _monkeypatch(cls): + patches = [ + if_mock_connections( + patch( + "querycontacts.ContactFinder.find", + return_value=["network-abuse@google.com"], + ) + ) + ] + return super()._monkeypatch(patches=patches) diff --git a/api_app/analyzers_manager/observable_analyzers/auth0.py b/api_app/analyzers_manager/observable_analyzers/auth0.py index 35b60ef180..0d8f26d8fb 100644 --- a/api_app/analyzers_manager/observable_analyzers/auth0.py +++ b/api_app/analyzers_manager/observable_analyzers/auth0.py @@ -9,13 +9,17 @@ class Auth0(classes.ObservableAnalyzer): name: str = "Auth0" - base_url: str = "https://signals.api.auth0.com/v2.0/ip" + url: str = "https://signals.api.auth0.com/v2.0/ip" _api_key_name: str + @classmethod + def update(cls) -> bool: + pass + def run(self): headers = {"X-Auth-Token": self._api_key_name} - url = f"{self.base_url}/{self.observable_name}" + url = f"{self.url}/{self.observable_name}" response = requests.get(url, headers=headers) response.raise_for_status() diff --git a/api_app/analyzers_manager/observable_analyzers/binaryedge.py b/api_app/analyzers_manager/observable_analyzers/binaryedge.py index 830a3fb80c..5db03d71e3 100644 --- a/api_app/analyzers_manager/observable_analyzers/binaryedge.py +++ b/api_app/analyzers_manager/observable_analyzers/binaryedge.py @@ -10,10 +10,14 @@ class BinaryEdge(classes.ObservableAnalyzer): - base_url: str = "https://api.binaryedge.io/v2/query/" + url: str = "https://api.binaryedge.io/v2/query/" _api_key_name: str + @classmethod + def update(cls) -> bool: + pass + def config(self, runtime_configuration: Dict): super().config(runtime_configuration) self.headers = {"X-Key": self._api_key_name} @@ -23,12 +27,12 @@ def run(self): if self.observable_classification == self.ObservableTypes.IP: try: response_recent_ip_info = requests.get( - self.base_url + "ip/" + self.observable_name, headers=self.headers + self.url + "ip/" + self.observable_name, headers=self.headers ) response_recent_ip_info.raise_for_status() response_query_ip = requests.get( - self.base_url + "search?query=ip:" + self.observable_name, + self.url + "search?query=ip:" + self.observable_name, headers=self.headers, ) response_query_ip.raise_for_status() @@ -43,7 +47,7 @@ def run(self): elif self.observable_classification == self.ObservableTypes.DOMAIN: try: response_domain_report = requests.get( - self.base_url + "domains/subdomain/" + self.observable_name, + self.url + "domains/subdomain/" + self.observable_name, headers=self.headers, ) results = response_domain_report.json() diff --git a/api_app/analyzers_manager/observable_analyzers/censys.py b/api_app/analyzers_manager/observable_analyzers/censys.py index fe0508c8d4..fa6bfbf78f 100644 --- a/api_app/analyzers_manager/observable_analyzers/censys.py +++ b/api_app/analyzers_manager/observable_analyzers/censys.py @@ -18,7 +18,7 @@ class Censys(classes.ObservableAnalyzer): def update(self): pass - base_url = "https://search.censys.io/api/v2" + url = "https://search.censys.io/api/v2" censys_analysis: str _api_id_name: str @@ -33,7 +33,7 @@ def run(self): "Supported is IP" ) response = requests.get( - self.base_url + uri, + self.url + uri, auth=(self._api_id_name, self._api_secret_name), headers={ "Accept": "application/json", diff --git a/api_app/analyzers_manager/observable_analyzers/checkphish.py b/api_app/analyzers_manager/observable_analyzers/checkphish.py index f0b0204c6c..dc64fb8cf0 100644 --- a/api_app/analyzers_manager/observable_analyzers/checkphish.py +++ b/api_app/analyzers_manager/observable_analyzers/checkphish.py @@ -11,21 +11,25 @@ class CheckPhish(classes.ObservableAnalyzer): - base_url: str = "https://developers.checkphish.ai/api/neo/scan" - status_url: str = base_url + "/status" + url: str = "https://developers.checkphish.ai/api/neo/scan" + status_url: str = url + "/status" polling_tries: int polling_time: float _api_key_name: str + @classmethod + def update(cls) -> bool: + pass + def run(self): json_data = { "apiKey": self._api_key_name, "urlInfo": {"url": self.observable_name}, } - response = requests.post(CheckPhish.base_url, json=json_data) + response = requests.post(CheckPhish.url, json=json_data) response.raise_for_status() job_id = response.json().get("jobID") diff --git a/api_app/analyzers_manager/observable_analyzers/crowdsec.py b/api_app/analyzers_manager/observable_analyzers/crowdsec.py index f94ba94f5f..20f9c081b0 100644 --- a/api_app/analyzers_manager/observable_analyzers/crowdsec.py +++ b/api_app/analyzers_manager/observable_analyzers/crowdsec.py @@ -10,13 +10,18 @@ class Crowdsec(ObservableAnalyzer): _api_key_name: str + url: str = "https://cti.api.crowdsec.net" + + @classmethod + def update(cls) -> bool: + pass def run(self): headers = { "x-api-key": self._api_key_name, "User-Agent": f"crowdsec-intelowl/{settings.VERSION}", } - url = f"https://cti.api.crowdsec.net/v2/smoke/{self.observable_name}" + url = f"{self.url}/v2/smoke/{self.observable_name}" response = requests.get(url, headers=headers) if response.status_code == 404: result = {"not_found": True} diff --git a/api_app/analyzers_manager/observable_analyzers/crxcavator.py b/api_app/analyzers_manager/observable_analyzers/crxcavator.py index 2e1b1400d6..7c4e057d24 100644 --- a/api_app/analyzers_manager/observable_analyzers/crxcavator.py +++ b/api_app/analyzers_manager/observable_analyzers/crxcavator.py @@ -10,11 +10,15 @@ class CRXcavator(classes.ObservableAnalyzer): name: str = "CRXcavator" - base_url: str = "https://api.crxcavator.io/v1/report/" + url: str = "https://api.crxcavator.io/v1/report/" + + @classmethod + def update(cls) -> bool: + pass def run(self): try: - response = requests.get(self.base_url + self.observable_name) + response = requests.get(self.url + self.observable_name) response.raise_for_status() except requests.RequestException as e: raise AnalyzerRunException(e) diff --git a/api_app/analyzers_manager/observable_analyzers/dns/dns_malicious_detectors/quad9_malicious_detector.py b/api_app/analyzers_manager/observable_analyzers/dns/dns_malicious_detectors/quad9_malicious_detector.py index 561691df2d..bb94d9a793 100644 --- a/api_app/analyzers_manager/observable_analyzers/dns/dns_malicious_detectors/quad9_malicious_detector.py +++ b/api_app/analyzers_manager/observable_analyzers/dns/dns_malicious_detectors/quad9_malicious_detector.py @@ -10,7 +10,6 @@ import requests from api_app.analyzers_manager import classes -from api_app.analyzers_manager.exceptions import AnalyzerRunException from tests.mock_utils import MockUpResponse, if_mock_connections, patch from ..dns_responses import malicious_detector_response @@ -28,6 +27,13 @@ class Quad9MaliciousDetector(classes.ObservableAnalyzer): we can guess that the domain was in the Quad9 blacklist. """ + HEADERS = {"Accept": "application/dns-json"} + QUAD9_URL = "https://dns.quad9.net:5053/dns-query" + GOOGLE_URL = "https://dns.google.com/resolve" + + def update(self) -> bool: + pass + def run(self): observable = self.observable_name # for URLs we are checking the relative domain @@ -57,32 +63,27 @@ def _quad9_dns_query(self, observable) -> Tuple[bool, bool]: """ answer_found = False timeout = False - try: - headers = {"Accept": "application/dns-json"} - url = "https://dns.quad9.net:5053/dns-query" - params = {"name": observable} - - quad9_response = requests.get(url, headers=headers, params=params) - if quad9_response.status_code == 503: - msg = ( - "503 status code! " - "It may be normal for this service to" - " happen from time to time" - ) - logger.info(msg) - self.report.errors.append(msg) - timeout = True - return answer_found, timeout - quad9_response.raise_for_status() - except requests.RequestException as e: - raise AnalyzerRunException(e) - else: - answer_found = bool(quad9_response.json().get("Answer", None)) + params = {"name": observable} + + quad9_response = requests.get( + self.QUAD9_URL, headers=self.HEADERS, params=params + ) + if quad9_response.status_code == 503: + msg = ( + "503 status code! " + "It may be normal for this service to" + " happen from time to time" + ) + logger.info(msg) + self.report.errors.append(msg) + timeout = True + return answer_found, timeout + quad9_response.raise_for_status() + answer_found = bool(quad9_response.json().get("Answer", None)) return answer_found, timeout - @staticmethod - def _google_dns_query(observable) -> bool: + def _google_dns_query(self, observable) -> bool: """Perform a DNS query with Google service, return True if Google answer the DNS query. @@ -91,14 +92,9 @@ def _google_dns_query(observable) -> bool: :return: True in case of answer for the DNS query else False. :rtype: bool """ - try: - params = {"name": observable} - google_response = requests.get( - "https://dns.google.com/resolve", params=params - ) - google_response.raise_for_status() - except requests.RequestException as e: - raise AnalyzerRunException(e) + params = {"name": observable} + google_response = requests.get(self.GOOGLE_URL, params=params) + google_response.raise_for_status() return bool(google_response.json().get("Answer", None)) diff --git a/api_app/analyzers_manager/observable_analyzers/dns0/__init__.py b/api_app/analyzers_manager/observable_analyzers/dns0/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/api_app/analyzers_manager/observable_analyzers/dns0/dns0_base.py b/api_app/analyzers_manager/observable_analyzers/dns0/dns0_base.py deleted file mode 100644 index e80dd98983..0000000000 --- a/api_app/analyzers_manager/observable_analyzers/dns0/dns0_base.py +++ /dev/null @@ -1,133 +0,0 @@ -import re -import typing -from abc import ABCMeta -from logging import getLogger - -import dateparser - -from api_app.analyzers_manager.classes import BaseAnalyzerMixin -from api_app.analyzers_manager.exceptions import ( - AnalyzerConfigurationException, - AnalyzerRunException, -) -from certego_saas.apps.user.models import User - -_supported_sort_types = [ - "first_seen", - "last_seen", -] - -_min_limit_value = 0 -_max_limit_value = 50000 - -_min_offset_value = 0 - -logger = getLogger(__name__) - - -class DNS0Mixin(BaseAnalyzerMixin, metaclass=ABCMeta): - base_url: str = "https://api.dns0.eu/" - - _api_key: str - from_date: str = "-1M" - sort: str - format: str - limit: int = 100 - offset: int - - def config(self, runtime_configuration: typing.Dict): - super().config(runtime_configuration) - # workaround to not being able to use "from" as variable name - if not hasattr(self, "from"): - setattr(self, "from", self.from_date) - - def _create_headers(self): - headers = {"Accept": "application/json", "User-Agent": "IntelOwl"} - if hasattr(self, "_api_key") and self._api_key: - headers["Authorization"] = f"Bearer {self._api_key}" - return headers - - @staticmethod - def convert_date_type(date_string): - if not date_string: - return False - - date_parsed = ( - DNS0Mixin.convert_unix_timestamp(date_string) - or DNS0Mixin.convert_relative_date(date_string) - or DNS0Mixin.convert_date(date_string) - ) - if not date_parsed: - raise AnalyzerRunException("Error in date format!") - return date_parsed - - @staticmethod - def convert_relative_date(date): - # accepts string matching the format: - # - at the beginning - # a number - # a character indicating Year, Month or Day - pattern = re.compile(r"-\d+[YMD]") - if match := pattern.match(date): - return match.group() - return False - - @staticmethod - def convert_date(date): - pattern = re.compile(r"^(\d{4}-\d{2}-\d{2})$") - if match := pattern.match(date): - return dateparser.parse(match.group()) - return False - - @staticmethod - def convert_unix_timestamp(timestamp): - try: - return str(int(timestamp)) - except Exception: - return False - - def _validate_params(self): - if ( - hasattr(self, "sort") - and self.sort - and self.sort not in _supported_sort_types - ): - raise AnalyzerConfigurationException( - f"Sort type {self.sort} not supported! " - f"Available sort types are: {_supported_sort_types}" - ) - - if ( - hasattr(self, "limit") - and self.limit - and not _min_limit_value < self.limit <= _max_limit_value - ): - raise AnalyzerConfigurationException( - f"{self.limit} is out of bound! " - f"Max value is {_max_limit_value}, min value is {_min_limit_value}" - ) - - if hasattr(self, "offset") and self.offset and self.offset < _min_offset_value: - raise AnalyzerConfigurationException( - f"{self.offset} can't be below {_min_offset_value}" - ) - - def _create_params(self): - params = {} - # convert dates to correct format - dates = ["from", "to", "not_before"] - parameters = ["sort", "format", "limit", "offset"] - - for date in dates: - if getattr(self, date, None): - if result := self.convert_date_type(getattr(self, date)): - params[date] = result - - for p in parameters: - if getattr(self, p, None): - params[p] = getattr(self, p) - - return params - - def _get_health_check_url(self, user: User = None) -> typing.Optional[str]: - return self.base_url diff --git a/api_app/analyzers_manager/observable_analyzers/dns0/dns0_names.py b/api_app/analyzers_manager/observable_analyzers/dns0/dns0_names.py deleted file mode 100644 index b62681ad74..0000000000 --- a/api_app/analyzers_manager/observable_analyzers/dns0/dns0_names.py +++ /dev/null @@ -1,209 +0,0 @@ -from logging import getLogger -from typing import Dict -from urllib.parse import urlparse - -import requests - -from api_app.analyzers_manager import classes -from api_app.analyzers_manager.exceptions import AnalyzerConfigurationException -from api_app.analyzers_manager.models import AnalyzerConfig -from api_app.analyzers_manager.observable_analyzers.dns0.dns0_base import DNS0Mixin -from api_app.models import Parameter, PluginConfig -from tests.mock_utils import MockUpResponse, if_mock_connections, patch - -logger = getLogger(__name__) - -_supported_fuzzy_params = [ - "swap", - "omit", - "repeat", - "add", - "typo", - "bitflip", - "hyphen", - "fatfinger", - "subdomain", - "vowels", - "homoglyph", - "all", -] - -_supported_format_types = [ - "json", - "dig", -] - - -class DNS0Names(classes.ObservableAnalyzer, DNS0Mixin): - endpoint: str = "names" - - root: bool - fuzzy: list[str] - - def config(self, runtime_configuration: Dict): - super().config(runtime_configuration) - self._validate_params() - - def run(self): - params = self._create_params() - headers = self._create_headers() - - response = requests.get( - self.base_url + self.endpoint, params=params, headers=headers - ) - response.raise_for_status() - - return response.json() - - def update(self) -> bool: - pass - - def _validate_params(self): - super()._validate_params() - if ( - hasattr(self, "fuzzy") - and self.fuzzy - and any( - fuzzy_params not in _supported_fuzzy_params - for fuzzy_params in self.fuzzy - ) - ): - raise AnalyzerConfigurationException( - "Fuzzy type not supported! " - "The list of supported fuzzy is at: " - "https://docs.dns0.eu/dns-api/names#fuzziness" - ) - - if ( - hasattr(self, "format") - and self.format - and self.format not in _supported_format_types - ): - raise AnalyzerConfigurationException( - f"Format type {self.format} not supported! " - f"Available format types are: {_supported_format_types}" - ) - - def _create_params(self): - params = super()._create_params() - target_observable = self.observable_name - if self.observable_classification == self.ObservableTypes.URL: - target_observable = urlparse(self.observable_name).hostname - params["q"] = target_observable - - # convert root parameter into 1 or 0 - if hasattr(self, "root") and self.root: - params["root"] = int(self.root) - - # pass list of fuzzy parameter - if hasattr(self, "fuzzy") and self.fuzzy: - params["fuzzy"] = self.fuzzy - - return params - - @classmethod - def _monkeypatch(cls): - ac = AnalyzerConfig.objects.get(name="DNS0_names") - PluginConfig.objects.get_or_create( - analyzer_config=ac, - parameter=Parameter.objects.get( - name="from", python_module__pk=ac.python_module_id - ), - for_organization=False, - owner=None, - value="-1M", - ) - PluginConfig.objects.get_or_create( - analyzer_config=ac, - parameter=Parameter.objects.get( - name="to", python_module__pk=ac.python_module_id - ), - for_organization=False, - owner=None, - value="", - ) - PluginConfig.objects.get_or_create( - analyzer_config=ac, - parameter=Parameter.objects.get( - name="not_before", python_module__pk=ac.python_module_id - ), - for_organization=False, - owner=None, - value="", - ) - PluginConfig.objects.get_or_create( - analyzer_config=ac, - parameter=Parameter.objects.get( - name="sort", python_module__pk=ac.python_module_id - ), - for_organization=False, - owner=None, - value="first_seen", - ) - PluginConfig.objects.get_or_create( - analyzer_config=ac, - parameter=Parameter.objects.get( - name="format", python_module__pk=ac.python_module_id - ), - for_organization=False, - owner=None, - value="json", - ) - PluginConfig.objects.get_or_create( - analyzer_config=ac, - parameter=Parameter.objects.get( - name="limit", python_module__pk=ac.python_module_id - ), - for_organization=False, - owner=None, - value=100, - ) - PluginConfig.objects.get_or_create( - analyzer_config=ac, - parameter=Parameter.objects.get( - name="offset", python_module__pk=ac.python_module_id - ), - for_organization=False, - owner=None, - value=0, - ) - PluginConfig.objects.get_or_create( - analyzer_config=ac, - parameter=Parameter.objects.get( - name="fuzzy", python_module__pk=ac.python_module_id - ), - for_organization=False, - owner=None, - value=[], - ) - PluginConfig.objects.get_or_create( - analyzer_config=ac, - parameter=Parameter.objects.get( - name="root", python_module__pk=ac.python_module_id - ), - for_organization=False, - owner=None, - value=True, - ) - - patches = [ - if_mock_connections( - patch( - "requests.get", - return_value=MockUpResponse( - { - "data": [ - { - "first_seen": "2023-12-14T16:37:44.000Z", - "last_seen": "2023-12-14T16:37:44.000Z", - "name": "gcfr2.example.opentlc.com.", - } - ], - "meta": {"results": 834824}, - }, - 200, - ), - ), - ) - ] - return super()._monkeypatch(patches=patches) diff --git a/api_app/analyzers_manager/observable_analyzers/dns0/dns0_rrsets.py b/api_app/analyzers_manager/observable_analyzers/dns0/dns0_rrsets.py deleted file mode 100644 index e2b12f185e..0000000000 --- a/api_app/analyzers_manager/observable_analyzers/dns0/dns0_rrsets.py +++ /dev/null @@ -1,225 +0,0 @@ -from logging import getLogger -from typing import Dict - -import requests - -from api_app.analyzers_manager import classes -from api_app.analyzers_manager.exceptions import AnalyzerConfigurationException -from api_app.analyzers_manager.models import AnalyzerConfig -from api_app.analyzers_manager.observable_analyzers.dns0.dns0_base import DNS0Mixin -from api_app.models import Parameter, PluginConfig -from tests.mock_utils import MockUpResponse, if_mock_connections, patch - -logger = getLogger(__name__) - -_supported_format_types = [ - "json", - "cof", - "dig", -] - -_supported_directions = [ - "right", - "left", -] - - -class DNS0Rrsets(classes.ObservableAnalyzer, DNS0Mixin): - endpoint: str = "rrsets" - - direction: str - name: str - data: str - type: list[str] - include_subdomain: bool - - def config(self, runtime_configuration: Dict): - super().config(runtime_configuration) - self._validate_params() - - def run(self): - params = self._create_params() - headers = self._create_headers() - - response = requests.get( - self.base_url + self.endpoint, params=params, headers=headers - ) - response.raise_for_status() - - return response.json() - - def update(self) -> bool: - pass - - def _validate_params(self): - super()._validate_params() - if ( - hasattr(self, "direction") - and self.direction - and self.direction not in _supported_directions - ): - raise AnalyzerConfigurationException("Matching direction not specified!") - - if ( - hasattr(self, "format") - and self.format - and self.format not in _supported_format_types - ): - raise AnalyzerConfigurationException( - f"Format type {self.format} not supported! " - f"Available format types are: {_supported_format_types}" - ) - - def _create_params(self): - params = super()._create_params() - query_type = None - if hasattr(self, "direction") and self.direction: - if self.direction == "left": - query_type = "name" - elif self.direction == "right": - query_type = "data" - - query = self.observable_name - if hasattr(self, "include_subdomain") and self.include_subdomain: - query = "." + query - params[query_type] = query - - # pass list of dns types parameter - if hasattr(self, "type") and self.type: - # convert the element that are int - res = [int(elem) if elem.isdigit() else elem for elem in self.type] - params["type"] = res - - return params - - @classmethod - def _monkeypatch(cls): - for config in ["DNS0_rrsets_data", "DNS0_rrsets_name"]: - ac = AnalyzerConfig.objects.get(name=config) - PluginConfig.objects.get_or_create( - analyzer_config=ac, - parameter=Parameter.objects.get( - name="from", python_module__pk=ac.python_module_id - ), - for_organization=False, - owner=None, - value="-1M", - ) - PluginConfig.objects.get_or_create( - analyzer_config=ac, - parameter=Parameter.objects.get( - name="to", python_module__pk=ac.python_module_id - ), - for_organization=False, - owner=None, - value="", - ) - PluginConfig.objects.get_or_create( - analyzer_config=ac, - parameter=Parameter.objects.get( - name="not_before", python_module__pk=ac.python_module_id - ), - for_organization=False, - owner=None, - value="", - ) - PluginConfig.objects.get_or_create( - analyzer_config=ac, - parameter=Parameter.objects.get( - name="sort", python_module__pk=ac.python_module_id - ), - for_organization=False, - owner=None, - value="first_seen", - ) - PluginConfig.objects.get_or_create( - analyzer_config=ac, - parameter=Parameter.objects.get( - name="format", python_module__pk=ac.python_module_id - ), - for_organization=False, - owner=None, - value="json", - ) - PluginConfig.objects.get_or_create( - analyzer_config=ac, - parameter=Parameter.objects.get( - name="limit", python_module__pk=ac.python_module_id - ), - for_organization=False, - owner=None, - value=100, - ) - PluginConfig.objects.get_or_create( - analyzer_config=ac, - parameter=Parameter.objects.get( - name="offset", python_module__pk=ac.python_module_id - ), - for_organization=False, - owner=None, - value=0, - ) - PluginConfig.objects.get_or_create( - analyzer_config=ac, - parameter=Parameter.objects.get( - name="type", python_module__pk=ac.python_module_id - ), - for_organization=False, - owner=None, - value=[], - ) - PluginConfig.objects.get_or_create( - analyzer_config=ac, - parameter=Parameter.objects.get( - name="include_subdomain", python_module__pk=ac.python_module_id - ), - for_organization=False, - owner=None, - value=False, - ) - - ac = AnalyzerConfig.objects.get(name="DNS0_rrsets_name") - PluginConfig.objects.get_or_create( - analyzer_config=ac, - parameter=Parameter.objects.get( - name="direction", python_module__pk=ac.python_module_id - ), - for_organization=False, - owner=None, - value="left", - ) - - ac = AnalyzerConfig.objects.get(name="DNS0_rrsets_data") - PluginConfig.objects.get_or_create( - analyzer_config=ac, - parameter=Parameter.objects.get( - name="direction", python_module__pk=ac.python_module_id - ), - for_organization=False, - owner=None, - value="right", - ) - - patches = [ - if_mock_connections( - patch( - "requests.get", - return_value=MockUpResponse( - { - "data": [ - { - "first_seen": "2023-04-15T16:50:52.000Z", - "last_seen": "2023-12-14T00:23:52.000Z", - "name": "example.com.", - "type": "A", - "data": ["93.184.216.34"], - } - ], - "meta": {"results": 6}, - }, - 200, - ), - ), - ) - ] - return super()._monkeypatch(patches=patches) diff --git a/api_app/analyzers_manager/observable_analyzers/docguard_get.py b/api_app/analyzers_manager/observable_analyzers/docguard_get.py index 4438aab706..b938bc58ac 100644 --- a/api_app/analyzers_manager/observable_analyzers/docguard_get.py +++ b/api_app/analyzers_manager/observable_analyzers/docguard_get.py @@ -13,10 +13,14 @@ class DocGuard_Hash(classes.ObservableAnalyzer): - base_url: str = "https://api.docguard.net:8443/api/FileAnalyzing/GetByHash/" + url: str = "https://api.docguard.net:8443/api/FileAnalyzing/GetByHash/" _api_key_name: str + @classmethod + def update(cls) -> bool: + pass + @property def hash_type(self): hash_lengths = {32: "md5", 64: "sha256"} @@ -43,7 +47,7 @@ def run(self): uri = f"{self.observable_name}" if self.observable_classification == self.ObservableTypes.HASH: try: - response = requests.get(self.base_url + uri, headers=headers) + response = requests.get(self.url + uri, headers=headers) response.raise_for_status() except requests.RequestException as e: raise AnalyzerRunException(e) diff --git a/api_app/analyzers_manager/observable_analyzers/emailrep.py b/api_app/analyzers_manager/observable_analyzers/emailrep.py index a0b7408780..4d7d46cae3 100644 --- a/api_app/analyzers_manager/observable_analyzers/emailrep.py +++ b/api_app/analyzers_manager/observable_analyzers/emailrep.py @@ -9,10 +9,14 @@ class EmailRep(classes.ObservableAnalyzer): - base_url: str = "https://emailrep.io/{}" + url: str = "https://emailrep.io/{}" _api_key_name: str + @classmethod + def update(cls) -> bool: + pass + def run(self): """ API key is not mandatory, emailrep supports requests with no key: @@ -32,7 +36,7 @@ def run(self): f" Supported: generic" ) - url = self.base_url.format(self.observable_name) + url = self.url.format(self.observable_name) response = requests.get(url, headers=headers) response.raise_for_status() diff --git a/api_app/analyzers_manager/observable_analyzers/filescan_search.py b/api_app/analyzers_manager/observable_analyzers/filescan_search.py index 3b925ec9fb..c74749b7ce 100644 --- a/api_app/analyzers_manager/observable_analyzers/filescan_search.py +++ b/api_app/analyzers_manager/observable_analyzers/filescan_search.py @@ -13,16 +13,20 @@ class FileScanSearch(ObservableAnalyzer): """FileScan_Search analyzer""" - base_url: str = "https://www.filescan.io/api/reports/search" + url: str = "https://www.filescan.io/api/reports/search" _api_key: str + @classmethod + def update(cls) -> bool: + pass + def run(self): """Runs the FileScan_Search analyzer""" observable_name_base64 = base64.b64encode( self.observable_name.encode() ).decode() endpoint = "?query={input}" - url = f"{self.base_url}/{endpoint.format(input=observable_name_base64)}" + url = f"{self.url}/{endpoint.format(input=observable_name_base64)}" try: response = requests.get(url, headers={"X-Api-Key": self._api_key}) response.raise_for_status() diff --git a/api_app/analyzers_manager/observable_analyzers/greynoise_labs.py b/api_app/analyzers_manager/observable_analyzers/greynoise_labs.py new file mode 100644 index 0000000000..16b90c1ed4 --- /dev/null +++ b/api_app/analyzers_manager/observable_analyzers/greynoise_labs.py @@ -0,0 +1,216 @@ +# This file is a part of IntelOwl https://github.com/intelowlproject/IntelOwl +# See the file 'LICENSE' for copying permission. + +import logging +import os + +import requests +from django.conf import settings + +from api_app.analyzers_manager.classes import ObservableAnalyzer +from api_app.models import PluginConfig +from tests.mock_utils import MockUpResponse, if_mock_connections, patch + +logger = logging.getLogger(__name__) + +url = "https://api.labs.greynoise.io/1/query" +db_name = "topc2s_ips.txt" +db_location = f"{settings.MEDIA_ROOT}/{db_name}" + +queries = { + "noiserank": { + "query_string": "query NoiseRank($ip: String) { noiseRank(ip: $ip) \ + { queryInfo { resultsAvailable resultsLimit } ips { ip noise_score \ + sensor_pervasiveness country_pervasiveness payload_diversity \ + port_diversity request_rate } } }", + "ip_required": True, + }, + "topknocks": { + "query_string": "query TopKnocks($ip: String) { topKnocks(ip: $ip) \ + { queryInfo { resultsAvailable resultsLimit } knock { last_crawled \ + last_seen source_ip knock_port title favicon_mmh3_32 \ + favicon_mmh3_128 jarm ips emails links tor_exit headers apps } } } ", + "ip_required": True, + }, + "topc2s": { + "query_string": "query TopC2s { topC2s { queryInfo \ + { resultsAvailable resultsLimit } c2s { source_ip c2_ips \ + c2_domains payload hits pervasiveness } } } ", + "ip_required": False, + "db_location": db_location, + }, +} + + +class GreynoiseLabs(ObservableAnalyzer): + _auth_token: str + + def run(self): + result = {} + headers = { + "Content-Type": "application/json", + "Authorization": f"Bearer {self._auth_token}", + } + + for key, value in queries.items(): + if not value["ip_required"]: + if not os.path.isfile(value["db_location"]) and not self.update(): + error_message = f"Failed extraction from {key} db" + self.report.errors.append(error_message) + self.report.save() + logger.error(error_message) + continue + + with open(value["db_location"], "r", encoding="utf-8") as f: + db = f.read() + + db_list = db.split("\n") + if self.observable_name in db_list: + result[key] = {"found": True} + else: + result[key] = {"found": False} + continue + + json_body = { + "query": value["query_string"], + "variables": {"ip": f"{self.observable_name}"}, + } + response = requests.post(headers=headers, json=json_body, url=url) + response.raise_for_status() + result[key] = response.json() + + return result + + @classmethod + def _get_auth_token(cls): + for plugin in PluginConfig.objects.filter( + parameter__python_module=cls.python_module, + parameter__is_secret=True, + parameter__name="auth_token", + ): + if plugin.value: + return plugin.value + return None + + @classmethod + def _update_db(cls, auth_token: str): + headers = { + "Content-Type": "application/json", + "Authorization": f"Bearer {auth_token}", + } + + try: + logger.info("Fetching data from greynoise API (Greynoise_Labs).....") + response = requests.post( + headers=headers, + json={"query": queries["topc2s"]["query_string"]}, + url=url, + ) + response.raise_for_status() + topc2s_data = response.json() + + with open(db_location, "w", encoding="utf-8") as f: + for value in topc2s_data["data"]["topC2s"]["c2s"]: + ip = value["source_ip"] + if ip: + f.write(f"{ip}\n") + + if not os.path.exists(db_location): + return False + + logger.info("Data fetched from greynoise API (Greynoise_Labs).....") + return True + except Exception as e: + logger.exception(e) + + @classmethod + def update(cls): + auth_token = cls._get_auth_token() + if auth_token: + return cls._update_db(auth_token=auth_token) + return False + + @classmethod + def _monkeypatch(cls): + patches = [ + if_mock_connections( + patch( + "requests.post", + side_effect=[ + MockUpResponse( + { + "data": { + "noiseRank": { + "queryInfo": { + "resultsAvailable": 1, + "resultsLimit": 1, + }, + "ips": [ + { + "ip": "20.235.249.22", + "noise_score": 12, + "sensor_pervasiveness": "very low", + "country_pervasiveness": "low", + "payload_diversity": "very low", + "port_diversity": "very low", + "request_rate": "low", + } + ], + } + } + }, + 200, + ), + MockUpResponse( + { + "data": { + "topKnocks": { + "queryInfo": { + "resultsAvailable": 1, + "resultsLimit": 1, + }, + } + } + }, + 200, + ), + MockUpResponse( + { + "data": { + "topC2s": { + "queryInfo": { + "resultsAvailable": 1914, + "resultsLimit": 191, + }, + "c2s": [ + { + "source_ip": "91.92.247.12", + "c2_ips": ["103.245.236.120"], + "c2_domains": [], + "hits": 11608, + }, + { + "source_ip": "14.225.208.190", + "c2_ips": ["14.225.213.142"], + "c2_domains": [], + "hits": 2091, + "pervasiveness": 26, + }, + { + "source_ip": "157.10.53.101", + "c2_ips": ["14.225.208.190"], + "c2_domains": [], + "hits": 1193, + "pervasiveness": 23, + }, + ], + }, + }, + }, + 200, + ), + ], + ) + ) + ] + return super()._monkeypatch(patches=patches) diff --git a/api_app/analyzers_manager/observable_analyzers/greynoiseintel.py b/api_app/analyzers_manager/observable_analyzers/greynoiseintel.py index 572b6ebb5c..75b1d95dd3 100644 --- a/api_app/analyzers_manager/observable_analyzers/greynoiseintel.py +++ b/api_app/analyzers_manager/observable_analyzers/greynoiseintel.py @@ -17,31 +17,41 @@ class GreyNoiseAnalyzer(classes.ObservableAnalyzer): greynoise_api_version: str max_records_to_retrieve: int - _api_key_name: str + _api_key_name: str = None + + @classmethod + def update(cls) -> bool: + pass + + @property + def integration_name(self): + if self.greynoise_api_version == "v2": + return "greynoise-intelowl-v1.0" + elif self.greynoise_api_version == "v3": + return "greynoise-community-intelowl-v1.0" + raise RuntimeError(f"Version {self.greynoise_api_version} not configured") def run(self): response = {} + if self.greynoise_api_version == "v2": + session = GreyNoise( + api_key=self._api_key_name, + integration_name=self.integration_name, + ) + elif self.greynoise_api_version == "v3": + session = GreyNoise( + api_key=self._api_key_name, + integration_name=self.integration_name, + offering="Community", + ) + else: + raise AnalyzerRunException( + "Invalid API Version. Supported are: v2 (paid), v3 (community)" + ) try: + response = session.ip(self.observable_name) if self.greynoise_api_version == "v2": - session = GreyNoise( - api_key=self._api_key_name, - integration_name="greynoise-intelowl-v1.0", - ) - response = session.ip(self.observable_name) response |= session.riot(self.observable_name) - - elif self.greynoise_api_version == "v3": - # this allows to use this service without an API key set - session = GreyNoise( - api_key=self._api_key_name, - integration_name="greynoise-community-intelowl-v1.0", - offering="Community", - ) - response = session.ip(self.observable_name) - else: - raise AnalyzerRunException( - "Invalid API Version. Supported are: v2 (paid), v3 (community)" - ) # greynoise library does provide empty messages in case of these errors... # so it's better to catch them and create custom management except RateLimitError as e: diff --git a/api_app/analyzers_manager/observable_analyzers/ha_get.py b/api_app/analyzers_manager/observable_analyzers/ha_get.py index 40ffdf7acc..4146675f5a 100644 --- a/api_app/analyzers_manager/observable_analyzers/ha_get.py +++ b/api_app/analyzers_manager/observable_analyzers/ha_get.py @@ -9,12 +9,16 @@ class HybridAnalysisGet(ObservableAnalyzer): - base_url: str = "https://www.hybrid-analysis.com" - api_url: str = f"{base_url}/api/v2/" - sample_url: str = f"{base_url}/sample" + url: str = "https://www.hybrid-analysis.com" + api_url: str = f"{url}/api/v2/" + sample_url: str = f"{url}/sample" _api_key_name: str + @classmethod + def update(cls) -> bool: + pass + def run(self): headers = { "api-key": self._api_key_name, diff --git a/api_app/analyzers_manager/observable_analyzers/haveibeenpwned.py b/api_app/analyzers_manager/observable_analyzers/haveibeenpwned.py index 52859493af..b4dee7fc24 100644 --- a/api_app/analyzers_manager/observable_analyzers/haveibeenpwned.py +++ b/api_app/analyzers_manager/observable_analyzers/haveibeenpwned.py @@ -8,13 +8,17 @@ class HaveIBeenPwned(classes.ObservableAnalyzer): - base_url: str = "https://haveibeenpwned.com/api/v3/breachedaccount/" + url: str = "https://haveibeenpwned.com/api/v3/breachedaccount/" truncate_response: bool include_unverified: bool domain: str _api_key_name: str + @classmethod + def update(cls) -> bool: + pass + def run(self): params = { "truncateResponse": self.truncate_response, @@ -26,7 +30,7 @@ def run(self): headers = {"hibp-api-key": self._api_key_name} response = requests.get( - self.base_url + self.observable_name, params=params, headers=headers + self.url + self.observable_name, params=params, headers=headers ) response.raise_for_status() diff --git a/api_app/analyzers_manager/observable_analyzers/honeydb.py b/api_app/analyzers_manager/observable_analyzers/honeydb.py index 87e2cb152d..e16565435a 100644 --- a/api_app/analyzers_manager/observable_analyzers/honeydb.py +++ b/api_app/analyzers_manager/observable_analyzers/honeydb.py @@ -14,12 +14,16 @@ class HoneyDB(classes.ObservableAnalyzer): - base_url = "https://honeydb.io/api" + url = "https://honeydb.io/api" # set secrets _api_key_name: str _api_id_name: str honeydb_analysis: str + @classmethod + def update(cls) -> bool: + pass + def config(self, runtime_configuration: Dict): super().config(runtime_configuration) self.headers = { @@ -53,15 +57,15 @@ def run(self): def _request_analysis(self, endpoint): if endpoint == "scan_twitter": - url = f"{self.base_url}/twitter-threat-feed/{self.observable_name}" + url = f"{self.url}/twitter-threat-feed/{self.observable_name}" elif endpoint == "ip_query": - url = f"{self.base_url}/netinfo/lookup/{self.observable_name}" + url = f"{self.url}/netinfo/lookup/{self.observable_name}" elif endpoint == "ip_history": - url = f"{self.base_url}/ip-history/{self.observable_name}" + url = f"{self.url}/ip-history/{self.observable_name}" elif endpoint == "internet_scanner": - url = f"{self.base_url}/internet-scanner/info/{self.observable_name}" + url = f"{self.url}/internet-scanner/info/{self.observable_name}" elif endpoint == "ip_info": - url = f"{self.base_url}/ipinfo/{self.observable_name}" + url = f"{self.url}/ipinfo/{self.observable_name}" else: logger.error(f"endpoint {endpoint} not supported") return diff --git a/api_app/analyzers_manager/observable_analyzers/hunter_io.py b/api_app/analyzers_manager/observable_analyzers/hunter_io.py index f266187c48..e86d265c0c 100644 --- a/api_app/analyzers_manager/observable_analyzers/hunter_io.py +++ b/api_app/analyzers_manager/observable_analyzers/hunter_io.py @@ -8,14 +8,16 @@ class Hunter_Io(classes.ObservableAnalyzer): - base_url: str = "https://api.hunter.io/v2/domain-search?" + url: str = "https://api.hunter.io/v2/domain-search?" _api_key_name: str + @classmethod + def update(cls) -> bool: + pass + def run(self): - url = ( - f"{self.base_url}domain={self.observable_name}&api_key={self._api_key_name}" - ) + url = f"{self.url}domain={self.observable_name}&api_key={self._api_key_name}" response = requests.get(url) response.raise_for_status() diff --git a/api_app/analyzers_manager/observable_analyzers/inquest.py b/api_app/analyzers_manager/observable_analyzers/inquest.py index 4244a6f0ff..ad2e95ef7b 100644 --- a/api_app/analyzers_manager/observable_analyzers/inquest.py +++ b/api_app/analyzers_manager/observable_analyzers/inquest.py @@ -18,11 +18,15 @@ class InQuest(ObservableAnalyzer): - base_url: str = "https://labs.inquest.net" + url: str = "https://labs.inquest.net" _api_key_name: str inquest_analysis: str + @classmethod + def update(cls) -> bool: + pass + def config(self, runtime_configuration: Dict): super().config(runtime_configuration) self.generic_identifier_mode = "user-defined" # Or auto @@ -104,7 +108,7 @@ def run(self): "Supported are: 'dfi_search', 'iocdb_search', 'repdb_search'." ) - response = requests.get(self.base_url + uri, headers=headers, timeout=30) + response = requests.get(self.url + uri, headers=headers, timeout=30) response.raise_for_status() result = response.json() if ( diff --git a/api_app/analyzers_manager/observable_analyzers/ip2location.py b/api_app/analyzers_manager/observable_analyzers/ip2location.py index 6fe7f1efaa..ac4966bfde 100644 --- a/api_app/analyzers_manager/observable_analyzers/ip2location.py +++ b/api_app/analyzers_manager/observable_analyzers/ip2location.py @@ -9,12 +9,16 @@ class Ip2location(classes.ObservableAnalyzer): - base_url: str = "https://api.ip2location.io/" + url: str = "https://api.ip2location.io/" _api_key_name: str api_version: str + @classmethod + def update(cls) -> bool: + pass + def get_response(self, payload): - return requests.get(self.base_url, params=payload) + return requests.get(self.url, params=payload) def run(self): try: diff --git a/api_app/analyzers_manager/observable_analyzers/ip2whois.py b/api_app/analyzers_manager/observable_analyzers/ip2whois.py new file mode 100644 index 0000000000..50c9c5e7b3 --- /dev/null +++ b/api_app/analyzers_manager/observable_analyzers/ip2whois.py @@ -0,0 +1,114 @@ +# This file is a part of IntelOwl https://github.com/intelowlproject/IntelOwl +# See the file 'LICENSE' for copying permission. + +import requests + +from api_app.analyzers_manager import classes +from api_app.analyzers_manager.exceptions import AnalyzerRunException +from tests.mock_utils import MockUpResponse, if_mock_connections, patch + + +class Ip2whois(classes.ObservableAnalyzer): + base_url: str = "https://api.ip2whois.com/v2" + _api_key_name: str + + def update(self): + pass + + def get_response(self, payload): + return requests.get(self.base_url, params=payload) + + def run(self): + try: + params = { + "key": self._api_key_name, + "domain": self.observable_name, + } + + location_info = self.get_response(params) + location_info.raise_for_status() + + except requests.RequestException as e: + raise AnalyzerRunException(e) + + response = location_info.json() + return response + + @classmethod + def _monkeypatch(cls): + sample_response = { + "domain": "msn.com", + "domain_id": "4569290_DOMAIN_COM-VRSN", + "status": "client delete prohibited", + "create_date": "1994-11-10T05:00:00Z", + "update_date": "2023-05-03T11:39:17Z", + "expire_date": "2024-06-04T16:44:29Z", + "domain_age": 10766, + "whois_server": "", + "registrar": {"iana_id": "292", "name": "MarkMonitor Inc.", "url": ""}, + "registrant": { + "name": "", + "organization": "", + "street_address": "", + "city": "", + "region": "", + "zip_code": "", + "country": "", + "phone": "", + "fax": "", + "email": "", + }, + "admin": { + "name": "", + "organization": "", + "street_address": "", + "city": "", + "region": "", + "zip_code": "", + "country": "", + "phone": "", + "fax": "", + "email": "", + }, + "tech": { + "name": "", + "organization": "", + "street_address": "", + "city": "", + "region": "", + "zip_code": "", + "country": "", + "phone": "", + "fax": "", + "email": "", + }, + "billing": { + "name": "", + "organization": "", + "street_address": "", + "city": "", + "region": "", + "zip_code": "", + "country": "", + "phone": "", + "fax": "", + "email": "", + }, + "nameservers": [ + "dns1.p09.nsone.net", + "ns1-204.azure-dns.com", + "ns2-204.azure-dns.net", + "ns3-204.azure-dns.org", + "ns4-204.azure-dns.info", + ], + } + + patches = [ + if_mock_connections( + patch( + "requests.get", + return_value=MockUpResponse(sample_response, 200), + ), + ) + ] + return super()._monkeypatch(patches=patches) diff --git a/api_app/analyzers_manager/observable_analyzers/ipinfo.py b/api_app/analyzers_manager/observable_analyzers/ipinfo.py index 31f09563fe..abae077044 100644 --- a/api_app/analyzers_manager/observable_analyzers/ipinfo.py +++ b/api_app/analyzers_manager/observable_analyzers/ipinfo.py @@ -9,14 +9,18 @@ class IPInfo(classes.ObservableAnalyzer): - base_url: str = "https://ipinfo.io/" + url: str = "https://ipinfo.io/" _api_key_name: str + @classmethod + def update(cls) -> bool: + pass + def run(self): try: response = requests.get( - self.base_url + self.observable_name, + self.url + self.observable_name, params={"token": self._api_key_name}, ) response.raise_for_status() diff --git a/api_app/analyzers_manager/observable_analyzers/koodous.py b/api_app/analyzers_manager/observable_analyzers/koodous.py index 9fefb8894c..8fcc6a0d12 100644 --- a/api_app/analyzers_manager/observable_analyzers/koodous.py +++ b/api_app/analyzers_manager/observable_analyzers/koodous.py @@ -8,18 +8,22 @@ class Koodous(classes.ObservableAnalyzer): - base_url: str = "https://developer.koodous.com/apks/" + url: str = "https://developer.koodous.com/apks/" query_analysis = "/analysis" _api_key_name: str + @classmethod + def update(cls) -> bool: + pass + def get_response(self, url): return requests.get( url, headers={"Authorization": f"Token {self._api_key_name}"} ) def run(self): - common_url = self.base_url + self.observable_name + common_url = self.url + self.observable_name apk_info = self.get_response(common_url) apk_info.raise_for_status() diff --git a/api_app/analyzers_manager/observable_analyzers/maxmind.py b/api_app/analyzers_manager/observable_analyzers/maxmind.py index 3b502d49c7..639acf198f 100644 --- a/api_app/analyzers_manager/observable_analyzers/maxmind.py +++ b/api_app/analyzers_manager/observable_analyzers/maxmind.py @@ -6,11 +6,13 @@ import os import shutil import tarfile -from typing import Optional import maxminddb import requests from django.conf import settings +from geoip2.database import Reader +from geoip2.errors import AddressNotFoundError, GeoIP2Error +from geoip2.models import ASN, City, Country from api_app.analyzers_manager import classes from api_app.analyzers_manager.exceptions import ( @@ -22,53 +24,83 @@ logger = logging.getLogger(__name__) -db_names = ["GeoLite2-Country.mmdb", "GeoLite2-City.mmdb"] +class MaxmindDBManager: + _supported_dbs: [str] = ["GeoLite2-Country", "GeoLite2-City", "GeoLite2-ASN"] + _default_db_extension: str = ".mmdb" -class Maxmind(classes.ObservableAnalyzer): - _api_key_name: str + @classmethod + def get_supported_dbs(cls) -> [str]: + return [db_name + cls._default_db_extension for db_name in cls._supported_dbs] - def run(self): - maxmind_final_result = {} - for db in db_names: - try: - db_location = _get_db_location(db) - if not os.path.isfile(db_location) and not self._update_db( - db, self._api_key_name - ): - raise AnalyzerRunException( - f"failed extraction of maxmind db {db}," - " reached max number of attempts" - ) - if not os.path.exists(db_location): - raise maxminddb.InvalidDatabaseError( - "database location does not exist" - ) - reader = maxminddb.open_database(db_location) - maxmind_result = reader.get(self.observable_name) - reader.close() - except maxminddb.InvalidDatabaseError as e: - error_message = f"Invalid database error: {e}" - logger.exception(error_message) - maxmind_result = {"error": error_message} - logger.info(f"maxmind result: {maxmind_result}") - if maxmind_result: + @classmethod + def update_all_dbs(cls, api_key: str) -> bool: + return all(cls._update_db(db, api_key) for db in cls._supported_dbs) + + def query_all_dbs(self, observable_query: str, api_key: str) -> (dict, dict): + maxmind_final_result: {} = {} + maxmind_errors: [] = [] + for db in self._supported_dbs: + maxmind_result, maxmind_error = self._query_single_db( + observable_query, db, api_key + ) + + if maxmind_error: + maxmind_errors.append(maxmind_error["error"]) + elif maxmind_result: + logger.info(f"maxmind result: {maxmind_result} in {db=}") maxmind_final_result.update(maxmind_result) else: - logger.warning("maxmind result not available") + logger.warning(f"maxmind result not available in {db=}") - return maxmind_final_result + return maxmind_final_result, maxmind_errors @classmethod - def _get_api_key(cls) -> Optional[str]: - for plugin in PluginConfig.objects.filter( - parameter__python_module=cls.python_module, - parameter__is_secret=True, - parameter__name="api_key_name", - ): - if plugin.value: - return plugin.value - return None + def _get_physical_location(cls, db: str) -> str: + return f"{settings.MEDIA_ROOT}/{db}{cls._default_db_extension}" + + def _query_single_db( + self, query_ip: str, db_name: str, api_key: str + ) -> (dict, dict): + result: ASN | City | Country + db_path: str = self._get_physical_location(db_name) + self._check_and_update_db(api_key, db_name) + + logger.info(f"Query {db_name=} for {query_ip=}") + with Reader(db_path) as reader: + try: + if "ASN" in db_name: + result = reader.asn(query_ip) + elif "Country" in db_name: + result = reader.country(query_ip) + elif "City" in db_name: + result = reader.city(query_ip) + except AddressNotFoundError: + reader.close() + logger.info( + f"Query for observable '{query_ip}' " + "didn't produce any results in any db." + ) + return {}, {} + except (GeoIP2Error, maxminddb.InvalidDatabaseError) as e: + error_message = f"GeoIP2 database error: {e}" + logger.exception(error_message) + return {}, {"error": error_message} + else: + reader.close() + return result.raw, {} + + def _check_and_update_db(self, api_key: str, db_name: str): + db_path = self._get_physical_location(db_name) + if not os.path.isfile(db_path) and not self._update_db(db_name, api_key): + raise AnalyzerRunException( + f"failed extraction of maxmind db {db_name}," + " reached max number of attempts" + ) + if not os.path.exists(db_path): + raise maxminddb.InvalidDatabaseError( + f"database location '{db_path}' does not exist" + ) @classmethod def _update_db(cls, db: str, api_key: str) -> bool: @@ -77,78 +109,122 @@ def _update_db(cls, db: str, api_key: str) -> bool: f"Unable to find api key for {cls.__name__}" ) - db_location = _get_db_location(db) try: - db_name_wo_ext = db[:-5] - logger.info(f"starting download of db {db_name_wo_ext} from maxmind") - url = ( - "https://download.maxmind.com/app/geoip_download?edition_id=" - f"{db_name_wo_ext}&license_key={api_key}&suffix=tar.gz" - ) - r = requests.get(url) - if r.status_code >= 300: - raise AnalyzerRunException( - f"failed request for new maxmind db {db_name_wo_ext}." - f" Status code: {r.status_code}" - ) + logger.info(f"starting download of {db=} from maxmind") - tar_db_path = f"/tmp/{db_name_wo_ext}.tar.gz" - with open(tar_db_path, "wb") as f: - f.write(r.content) # lgtm [py/clear-text-storage-sensitive-data] - - tf = tarfile.open(tar_db_path) - directory_to_extract_files = settings.MEDIA_ROOT - tf.extractall(str(directory_to_extract_files)) - - today = datetime.datetime.now().date() - counter = 0 - directory_found = False - downloaded_db_path = "" - # this is because we do not know the exact date of the db we downloaded - while counter < 10 or not directory_found: - date_to_check = today - datetime.timedelta(days=counter) - formatted_date = date_to_check.strftime("%Y%m%d") - downloaded_db_path = ( - f"{directory_to_extract_files}/" - f"{db_name_wo_ext}_{formatted_date}/{db}" - ) - try: - os.rename(downloaded_db_path, db_location) - except FileNotFoundError: - logger.debug( - f"{downloaded_db_path} not found move to the day before" - ) - counter += 1 - else: - directory_found = True - shutil.rmtree( - f"{directory_to_extract_files}/" - f"{db_name_wo_ext}_{formatted_date}" - ) - - if directory_found: - logger.info(f"maxmind directory found {downloaded_db_path}") - else: + tar_db_path = cls._download_db(db, api_key) + cls._extract_db_to_media_root(tar_db_path) + directory_found = cls._remove_old_db(db) + + if not directory_found: return False - logger.info(f"ended download of db {db_name_wo_ext} from maxmind") + logger.info(f"ended download of {db=} from maxmind") return True except Exception as e: logger.exception(e) return False + @classmethod + def _download_db(cls, db_name: str, api_key: str) -> str: + url = ( + "https://download.maxmind.com/app/geoip_download?edition_id=" + f"{db_name}&license_key={api_key}&suffix=tar.gz" + ) + response = requests.get(url) + if response.status_code >= 300: + raise AnalyzerRunException( + f"failed request for new maxmind db {db_name}." + f" Status code: {response.status_code}" + f"\nResponse: {response.raw}" + ) + + return cls._write_db_to_filesystem(db_name, response.content) + + @classmethod + def _write_db_to_filesystem(cls, db_name: str, content: bytes) -> str: + tar_db_path = f"/tmp/{db_name}.tar.gz" + logger.info( + f"starting writing db {db_name} downloaded from maxmind to {tar_db_path}" + ) + with open(tar_db_path, "wb") as f: + f.write(content) + + return tar_db_path + + @classmethod + def _extract_db_to_media_root(cls, tar_db_path: str): + logger.info(f"Started extracting {tar_db_path} to {settings.MEDIA_ROOT}.") + tf = tarfile.open(tar_db_path) + tf.extractall(str(settings.MEDIA_ROOT)) + logger.info(f"Finished extracting {tar_db_path} to {settings.MEDIA_ROOT}.") + + @classmethod + def _remove_old_db(cls, db: str) -> bool: + physical_db_location = cls._get_physical_location(db) + today = datetime.datetime.now().date() + counter = 0 + directory_found = False + # this is because we do not know the exact date of the db we downloaded + while counter < 10 or not directory_found: + formatted_date = (today - datetime.timedelta(days=counter)).strftime( + "%Y%m%d" + ) + downloaded_db_path = ( + f"{settings.MEDIA_ROOT}/" + f"{db}_{formatted_date}/{db}{cls._default_db_extension}" + ) + try: + os.rename(downloaded_db_path, physical_db_location) + except FileNotFoundError: + logger.debug(f"{downloaded_db_path} not found move to the day before") + counter += 1 + else: + directory_found = True + shutil.rmtree(f"{settings.MEDIA_ROOT}/" f"{db}_{formatted_date}") + logger.info(f"maxmind directory found {downloaded_db_path}") + return directory_found + + +class Maxmind(classes.ObservableAnalyzer): + _api_key_name: str + _maxmind_db_manager: "MaxmindDBManager" = MaxmindDBManager() + + def run(self): + maxmind_final_result, maxmind_errors = self._maxmind_db_manager.query_all_dbs( + self.observable_name, self._api_key_name + ) + if maxmind_errors: + for error_msg in maxmind_errors: + self.report.errors.append(error_msg) + self.report.save() + return maxmind_final_result + + @classmethod + def get_db_names(cls) -> [str]: + return cls._maxmind_db_manager.get_supported_dbs() + + @classmethod + def _get_api_key(cls): + for plugin in PluginConfig.objects.filter( + parameter__python_module=cls.python_module, + parameter__is_secret=True, + parameter__name="_api_key_name", + ): + if plugin.value: + return plugin.value + return None + @classmethod def update(cls) -> bool: - api_key = cls._get_api_key() - return all(cls._update_db(db, api_key) for db in db_names) + auth_token = cls._get_api_key() + if auth_token: + return cls._maxmind_db_manager.update_all_dbs(cls._api_key_name) + return False @classmethod def _monkeypatch(cls): # completely skip because does not work without connection. patches = [if_mock_connections(patch.object(cls, "run", return_value={}))] return super()._monkeypatch(patches=patches) - - -def _get_db_location(db): - return f"{settings.MEDIA_ROOT}/{db}" diff --git a/api_app/analyzers_manager/observable_analyzers/mnemonic_pdns.py b/api_app/analyzers_manager/observable_analyzers/mnemonic_pdns.py index ef37bc16c2..d92b33a8fb 100644 --- a/api_app/analyzers_manager/observable_analyzers/mnemonic_pdns.py +++ b/api_app/analyzers_manager/observable_analyzers/mnemonic_pdns.py @@ -10,16 +10,20 @@ class MnemonicPassiveDNS(classes.ObservableAnalyzer): - base_url: str = "https://api.mnemonic.no/pdns/v3/" + url: str = "https://api.mnemonic.no/pdns/v3/" cof_format: bool limit: int + @classmethod + def update(cls) -> bool: + pass + def run(self): if self.cof_format: - self.base_url += "cof/" + self.url += "cof/" response = requests.get( - self.base_url + self.observable_name, data={"limit": self.limit} + self.url + self.observable_name, data={"limit": self.limit} ) response.raise_for_status() diff --git a/api_app/analyzers_manager/observable_analyzers/netlas.py b/api_app/analyzers_manager/observable_analyzers/netlas.py index e1e45c484d..c7afe7d0d9 100644 --- a/api_app/analyzers_manager/observable_analyzers/netlas.py +++ b/api_app/analyzers_manager/observable_analyzers/netlas.py @@ -10,10 +10,14 @@ class Netlas(classes.ObservableAnalyzer): - base_url: str = "https://app.netlas.io/api/whois_ip/" + url: str = "https://app.netlas.io/api/whois_ip/" _api_key_name: str + @classmethod + def update(cls) -> bool: + pass + def config(self, runtime_configuration: Dict): super().config(runtime_configuration) self.query = self.observable_name @@ -25,7 +29,7 @@ def config(self, runtime_configuration: Dict): def run(self): try: response = requests.get( - self.base_url, params=self.parameters, headers=self.headers + self.url, params=self.parameters, headers=self.headers ) response.raise_for_status() except requests.RequestException as e: diff --git a/api_app/analyzers_manager/observable_analyzers/onyphe.py b/api_app/analyzers_manager/observable_analyzers/onyphe.py index aec616e067..d6f08d3806 100644 --- a/api_app/analyzers_manager/observable_analyzers/onyphe.py +++ b/api_app/analyzers_manager/observable_analyzers/onyphe.py @@ -9,10 +9,14 @@ class Onyphe(classes.ObservableAnalyzer): - base_url: str = "https://www.onyphe.io/api/v2/summary/" + url: str = "https://www.onyphe.io/api/v2/summary/" _api_key_name: str + @classmethod + def update(cls) -> bool: + pass + def run(self): headers = { "Authorization": f"apikey {self._api_key_name}", @@ -33,7 +37,7 @@ def run(self): ) try: - response = requests.get(self.base_url + uri, headers=headers) + response = requests.get(self.url + uri, headers=headers) response.raise_for_status() except requests.RequestException as e: raise AnalyzerRunException(e) diff --git a/api_app/analyzers_manager/observable_analyzers/phishstats.py b/api_app/analyzers_manager/observable_analyzers/phishstats.py index f83744707e..5ee7b77cff 100644 --- a/api_app/analyzers_manager/observable_analyzers/phishstats.py +++ b/api_app/analyzers_manager/observable_analyzers/phishstats.py @@ -16,7 +16,11 @@ class PhishStats(ObservableAnalyzer): Analyzer that uses PhishStats API to check if the observable is a phishing site. """ - base_url: str = "https://phishstats.info:2096/api" + url: str = "https://phishstats.info:2096/api" + + @classmethod + def update(cls) -> bool: + pass def __build_phishstats_url(self) -> str: to_analyze_observable_classification = self.observable_classification @@ -47,7 +51,7 @@ def __build_phishstats_url(self) -> str: raise AnalyzerRunException( "Phishstats require either of IP, URL, Domain or Generic" ) - return f"{self.base_url}/{endpoint}" + return f"{self.url}/{endpoint}" def run(self): api_url = self.__build_phishstats_url() diff --git a/api_app/analyzers_manager/observable_analyzers/phoneinfoga_scan.py b/api_app/analyzers_manager/observable_analyzers/phoneinfoga_scan.py index 882547c20c..1f7692f254 100644 --- a/api_app/analyzers_manager/observable_analyzers/phoneinfoga_scan.py +++ b/api_app/analyzers_manager/observable_analyzers/phoneinfoga_scan.py @@ -25,9 +25,10 @@ def update(self) -> bool: _NUMVERIFY_API_KEY: str = "" _GOOGLECSE_CX: str = "" _GOOGLE_API_KEY: str = "" + url = "http://phoneinfoga:5000" def run(self): - url: str = f"http://phoneinfoga:5000/api/v2/scanners/{self.scanner_name}/run" + url: str = f"{self.url}/api/v2/scanners/{self.scanner_name}/run" response = requests.post( url, headers={ diff --git a/api_app/analyzers_manager/observable_analyzers/pulsedive.py b/api_app/analyzers_manager/observable_analyzers/pulsedive.py index 6d9110d853..ae6c6baf23 100644 --- a/api_app/analyzers_manager/observable_analyzers/pulsedive.py +++ b/api_app/analyzers_manager/observable_analyzers/pulsedive.py @@ -18,13 +18,17 @@ class Pulsedive(ObservableAnalyzer): - base_url: str = "https://pulsedive.com/api" + url: str = "https://pulsedive.com/api" max_tries: int = 10 poll_distance: int = 10 scan_mode: str _api_key_name: str + @classmethod + def update(cls) -> bool: + pass + def config(self, runtime_configuration: Dict): super().config(runtime_configuration) supported_scan_values = ["basic", "passive", "active"] @@ -53,7 +57,7 @@ def run(self): params = f"indicator={self.observable_name}" if hasattr(self, "_api_key_name"): params += self.default_param - resp = requests.get(f"{self.base_url}/info.php?{params}") + resp = requests.get(f"{self.url}/info.php?{params}") # handle 404 case, submit for analysis if resp.status_code == 404 and self.scan_mode != "basic": @@ -70,9 +74,7 @@ def __submit_for_analysis(self) -> dict: if hasattr(self, "_api_key_name"): params += self.default_param headers = {"Content-Type": "application/x-www-form-urlencoded; charset=UTF-8"} - resp = requests.post( - f"{self.base_url}/analyze.php", data=params, headers=headers - ) + resp = requests.post(f"{self.url}/analyze.php", data=params, headers=headers) resp.raise_for_status() qid = resp.json().get("qid", None) # 3. retrieve result using qid after waiting for 10 seconds @@ -87,7 +89,7 @@ def __submit_for_analysis(self) -> dict: def __poll_for_result(self, params): result = {} - url = f"{self.base_url}/analyze.php?{params}" + url = f"{self.url}/analyze.php?{params}" obj_repr = self.__repr__() for chance in range(self.max_tries): logger.info( diff --git a/api_app/analyzers_manager/observable_analyzers/robtex.py b/api_app/analyzers_manager/observable_analyzers/robtex.py index fb126c7aba..9c5458a142 100644 --- a/api_app/analyzers_manager/observable_analyzers/robtex.py +++ b/api_app/analyzers_manager/observable_analyzers/robtex.py @@ -12,7 +12,11 @@ class Robtex(classes.ObservableAnalyzer): - base_url = "https://freeapi.robtex.com/" + url = "https://freeapi.robtex.com/" + + @classmethod + def update(cls) -> bool: + pass def run(self): if self.observable_classification == self.ObservableTypes.IP: @@ -36,7 +40,7 @@ def run(self): loaded_results = [] for uri in uris: - response = requests.get(self.base_url + uri) + response = requests.get(self.url + uri) response.raise_for_status() result = response.text.split("\r\n") for item in result: diff --git a/api_app/analyzers_manager/observable_analyzers/securitytrails.py b/api_app/analyzers_manager/observable_analyzers/securitytrails.py index d1c01e6edc..fb135350fd 100644 --- a/api_app/analyzers_manager/observable_analyzers/securitytrails.py +++ b/api_app/analyzers_manager/observable_analyzers/securitytrails.py @@ -9,12 +9,16 @@ class SecurityTrails(classes.ObservableAnalyzer): - base_url: str = "https://api.securitytrails.com/v1/" + url: str = "https://api.securitytrails.com/v1/" securitytrails_analysis: str securitytrails_current_type: str securitytrails_history_analysis: str _api_key_name: str + @classmethod + def update(cls) -> bool: + pass + def run(self): headers = {"apikey": self._api_key_name, "Content-Type": "application/json"} @@ -54,7 +58,7 @@ def run(self): ) try: - response = requests.get(self.base_url + uri, headers=headers) + response = requests.get(self.url + uri, headers=headers) response.raise_for_status() except requests.RequestException as e: raise AnalyzerRunException(e) diff --git a/api_app/analyzers_manager/observable_analyzers/shodan.py b/api_app/analyzers_manager/observable_analyzers/shodan.py index aa9874f353..4090de9089 100644 --- a/api_app/analyzers_manager/observable_analyzers/shodan.py +++ b/api_app/analyzers_manager/observable_analyzers/shodan.py @@ -12,11 +12,15 @@ class Shodan(classes.ObservableAnalyzer): - base_url: str = "https://api.shodan.io/" + url: str = "https://api.shodan.io/" shodan_analysis: str _api_key_name: str + @classmethod + def update(cls) -> bool: + pass + def run(self): if self.shodan_analysis == "search": params = {"key": self._api_key_name, "minify": True} @@ -33,7 +37,7 @@ def run(self): ) try: - response = requests.get(self.base_url + uri, params=params) + response = requests.get(self.url + uri, params=params) response.raise_for_status() except requests.RequestException as e: raise AnalyzerRunException(e) diff --git a/api_app/analyzers_manager/observable_analyzers/spyse.py b/api_app/analyzers_manager/observable_analyzers/spyse.py index 30e6c8057c..d5bf9e05fc 100644 --- a/api_app/analyzers_manager/observable_analyzers/spyse.py +++ b/api_app/analyzers_manager/observable_analyzers/spyse.py @@ -12,10 +12,14 @@ class Spyse(classes.ObservableAnalyzer): - base_url: str = "https://api.spyse.com/v4/data/" + url: str = "https://api.spyse.com/v4/data/" _api_key_name: str + @classmethod + def update(cls) -> bool: + pass + def __build_spyse_api_uri(self) -> str: if self.observable_classification == self.ObservableTypes.DOMAIN: endpoint = "domain" @@ -37,7 +41,7 @@ def __build_spyse_api_uri(self) -> str: f"{self.observable_classification} not supported." "Supported are: IP, domain and generic." ) - return f"{self.base_url}/{endpoint}/{self.observable_name}" + return f"{self.url}/{endpoint}/{self.observable_name}" def run(self): headers = { diff --git a/api_app/analyzers_manager/observable_analyzers/ss_api_net.py b/api_app/analyzers_manager/observable_analyzers/ss_api_net.py index b32be8da39..a79806446c 100644 --- a/api_app/analyzers_manager/observable_analyzers/ss_api_net.py +++ b/api_app/analyzers_manager/observable_analyzers/ss_api_net.py @@ -14,7 +14,7 @@ class SSAPINet(classes.ObservableAnalyzer): - base_url: str = "https://shot.screenshotapi.net/screenshot" + url: str = "https://shot.screenshotapi.net/screenshot" _api_key_name: str use_proxy: bool @@ -23,6 +23,10 @@ class SSAPINet(classes.ObservableAnalyzer): # for other params provided by the API extra_api_params: dict + @classmethod + def update(cls) -> bool: + pass + def run(self): if self.use_proxy and not self.proxy: raise AnalyzerConfigurationException( @@ -45,7 +49,7 @@ def run(self): if self.use_proxy: params["proxy"] = self.proxy - resp = requests.get(self.base_url, params=params) + resp = requests.get(self.url, params=params) resp.raise_for_status() except requests.RequestException as e: raise AnalyzerRunException(e) diff --git a/api_app/analyzers_manager/observable_analyzers/stalkphish.py b/api_app/analyzers_manager/observable_analyzers/stalkphish.py index 4ad9b513e9..93b5505edf 100644 --- a/api_app/analyzers_manager/observable_analyzers/stalkphish.py +++ b/api_app/analyzers_manager/observable_analyzers/stalkphish.py @@ -9,10 +9,14 @@ class Stalkphish(classes.ObservableAnalyzer): - base_url: str = "https://api.stalkphish.io/api/v1/" + url: str = "https://api.stalkphish.io/api/v1/" _api_key_name: str + @classmethod + def update(cls) -> bool: + pass + def run(self): headers = { "User-Agent": "Stalkphish/IntelOwl", @@ -35,7 +39,7 @@ def run(self): ) try: - response = requests.get(self.base_url + uri, headers=headers) + response = requests.get(self.url + uri, headers=headers) response.raise_for_status() except requests.RequestException as e: raise AnalyzerRunException(e) diff --git a/api_app/analyzers_manager/observable_analyzers/threatfox.py b/api_app/analyzers_manager/observable_analyzers/threatfox.py index 642644926e..512a1e6960 100644 --- a/api_app/analyzers_manager/observable_analyzers/threatfox.py +++ b/api_app/analyzers_manager/observable_analyzers/threatfox.py @@ -10,7 +10,7 @@ class ThreatFox(classes.ObservableAnalyzer): - base_url: str = "https://threatfox-api.abuse.ch/api/v1/" + url: str = "https://threatfox-api.abuse.ch/api/v1/" disable: bool = False # optional def update(self) -> bool: @@ -22,15 +22,18 @@ def run(self): payload = {"query": "search_ioc", "search_term": self.observable_name} - response = requests.post(self.base_url, data=json.dumps(payload)) + response = requests.post(self.url, data=json.dumps(payload)) response.raise_for_status() result = response.json() - data = result.get("data", {}) - if isinstance(data, dict): - ioc_id = data.get("id", "") - if ioc_id: - result["link"] = f"https://threatfox.abuse.ch/ioc/{ioc_id}" + data = result.get("data", []) + if data and isinstance(data, list): + for index, element in enumerate(data): + ioc_id = element.get("id", "") + if ioc_id: + result["data"][index][ + "link" + ] = f"https://threatfox.abuse.ch/ioc/{ioc_id}" return result @classmethod @@ -39,7 +42,18 @@ def _monkeypatch(cls): if_mock_connections( patch( "requests.post", - return_value=MockUpResponse({}, 200), + return_value=MockUpResponse( + { + "query_status": "ok", + "data": [ + { + "id": "12", + "ioc": "139.180.203.104:443", + }, + ], + }, + 200, + ), ), ) ] diff --git a/api_app/analyzers_manager/observable_analyzers/threatminer.py b/api_app/analyzers_manager/observable_analyzers/threatminer.py index 19aa7e5157..39b9db9b47 100644 --- a/api_app/analyzers_manager/observable_analyzers/threatminer.py +++ b/api_app/analyzers_manager/observable_analyzers/threatminer.py @@ -9,9 +9,13 @@ class Threatminer(classes.ObservableAnalyzer): - base_url = "https://api.threatminer.org/v2/" + url = "https://api.threatminer.org/v2/" rt_value: str + @classmethod + def update(cls) -> bool: + pass + def run(self): params = {"q": self.observable_name} if self.rt_value: @@ -30,7 +34,7 @@ def run(self): ) try: - response = requests.get(self.base_url + uri, params=params) + response = requests.get(self.url + uri, params=params) response.raise_for_status() except requests.RequestException as e: raise AnalyzerRunException(e) diff --git a/api_app/analyzers_manager/observable_analyzers/threatstream.py b/api_app/analyzers_manager/observable_analyzers/threatstream.py index cd3a8e7db9..cfef6ce509 100644 --- a/api_app/analyzers_manager/observable_analyzers/threatstream.py +++ b/api_app/analyzers_manager/observable_analyzers/threatstream.py @@ -12,7 +12,7 @@ class Threatstream(classes.ObservableAnalyzer): - base_url: str = "https://api.threatstream.com/api/" + url: str = "https://api.threatstream.com/api/" threatstream_analysis: str limit: str @@ -23,6 +23,10 @@ class Threatstream(classes.ObservableAnalyzer): _api_key_name: str _api_user_name: str + @classmethod + def update(cls) -> bool: + pass + def run(self): params = {} uri = "" @@ -60,9 +64,7 @@ def run(self): api_header = { "Authorization": f"apikey {self._api_user_name}:{self._api_key_name}" } - response = requests.get( - self.base_url + uri, params=params, headers=api_header - ) + response = requests.get(self.url + uri, params=params, headers=api_header) response.raise_for_status() except requests.RequestException as e: raise AnalyzerRunException(e) diff --git a/api_app/analyzers_manager/observable_analyzers/tranco.py b/api_app/analyzers_manager/observable_analyzers/tranco.py index 288d913f1f..f5e3840789 100644 --- a/api_app/analyzers_manager/observable_analyzers/tranco.py +++ b/api_app/analyzers_manager/observable_analyzers/tranco.py @@ -10,14 +10,18 @@ class Tranco(classes.ObservableAnalyzer): - base_url: str = "https://tranco-list.eu/api/ranks/domain/" + url: str = "https://tranco-list.eu/api/ranks/domain/" + + @classmethod + def update(cls) -> bool: + pass def run(self): observable_to_analyze = self.observable_name if self.observable_classification == self.ObservableTypes.URL: observable_to_analyze = urlparse(self.observable_name).hostname - url = self.base_url + observable_to_analyze + url = self.url + observable_to_analyze response = requests.get(url) response.raise_for_status() diff --git a/api_app/analyzers_manager/observable_analyzers/urlhaus.py b/api_app/analyzers_manager/observable_analyzers/urlhaus.py index 1fc90bb316..fa9dddc8ff 100644 --- a/api_app/analyzers_manager/observable_analyzers/urlhaus.py +++ b/api_app/analyzers_manager/observable_analyzers/urlhaus.py @@ -9,7 +9,7 @@ class URLHaus(classes.ObservableAnalyzer): - base_url = "https://urlhaus-api.abuse.ch/v1/" + url = "https://urlhaus-api.abuse.ch/v1/" disable: bool = False # optional def update(self) -> bool: @@ -34,7 +34,7 @@ def run(self): f"not supported observable type {self.observable_classification}." ) - response = requests.post(self.base_url + uri, data=post_data, headers=headers) + response = requests.post(self.url + uri, data=post_data, headers=headers) response.raise_for_status() return response.json() diff --git a/api_app/analyzers_manager/observable_analyzers/urlscan.py b/api_app/analyzers_manager/observable_analyzers/urlscan.py index 5481ef272c..acca95d4e3 100644 --- a/api_app/analyzers_manager/observable_analyzers/urlscan.py +++ b/api_app/analyzers_manager/observable_analyzers/urlscan.py @@ -14,13 +14,17 @@ class UrlScan(ObservableAnalyzer): - base_url: str = "https://urlscan.io/api/v1" + url: str = "https://urlscan.io/api/v1" urlscan_analysis: str visibility: str search_size: int _api_key_name: str + @classmethod + def update(cls) -> bool: + pass + def run(self): headers = {"Content-Type": "application/json", "User-Agent": "IntelOwl/v1.x"} if not hasattr(self, "_api_key_name") and self.urlscan_analysis == "search": @@ -45,7 +49,7 @@ def run(self): def __urlscan_submit(self) -> str: data = {"url": self.observable_name, "visibility": self.visibility} uri = "/scan/" - response = self.session.post(self.base_url + uri, json=data) + response = self.session.post(self.url + uri, json=data) # catch error description to help users to understand why it did not work if response.status_code == 400: error_description = response.json().get("description", "") @@ -78,7 +82,7 @@ def __urlscan_search(self): } if self.observable_classification == self.ObservableTypes.URL: params["q"] = "page." + params["q"] - resp = self.session.get(self.base_url + "/search/", params=params) + resp = self.session.get(self.url + "/search/", params=params) resp.raise_for_status() result = resp.json() return result diff --git a/api_app/analyzers_manager/observable_analyzers/validin.py b/api_app/analyzers_manager/observable_analyzers/validin.py index 1b2e74a132..f281a0b1ae 100644 --- a/api_app/analyzers_manager/observable_analyzers/validin.py +++ b/api_app/analyzers_manager/observable_analyzers/validin.py @@ -20,7 +20,7 @@ class Validin(classes.ObservableAnalyzer): def update(self) -> bool: pass - base_url: str = "https://app.validin.com" + url: str = "https://app.validin.com" observable_classification: str observable_name: str scan_choice: str @@ -34,7 +34,7 @@ def _run_all_queries(self, endpoints, headers): ).items(): logger.info(f"Executing query {query_name}") try: - response = requests.get(self.base_url + query_url, headers=headers) + response = requests.get(self.url + query_url, headers=headers) if response.status_code != 200: logger.error(f"Query {query_name} failed") @@ -50,7 +50,7 @@ def _run_specific_query(self, endpoints, headers): if self.observable_classification in endpoints: try: query_url = endpoints[self.observable_classification][self.scan_choice] - response = requests.get(self.base_url + query_url, headers=headers) + response = requests.get(self.url + query_url, headers=headers) return response.json() except KeyError: raise AnalyzerRunException( diff --git a/api_app/analyzers_manager/observable_analyzers/virushee.py b/api_app/analyzers_manager/observable_analyzers/virushee.py index 27bdb9f3c1..528bbb76ea 100644 --- a/api_app/analyzers_manager/observable_analyzers/virushee.py +++ b/api_app/analyzers_manager/observable_analyzers/virushee.py @@ -9,14 +9,18 @@ class VirusheeCheckHash(ObservableAnalyzer): - base_url: str = "https://api.virushee.com/file/hash/{input}" + url: str = "https://api.virushee.com/file/hash/{input}" _api_key_name: str + @classmethod + def update(cls) -> bool: + pass + def run(self): self.__session = requests.Session() if hasattr(self, "_api_key_name"): self.__session.headers["X-API-Key"] = self._api_key_name - url = self.base_url.format(input=self.observable_name) + url = self.url.format(input=self.observable_name) try: response = self.__session.get(url) diff --git a/api_app/analyzers_manager/observable_analyzers/vt/vt3_base.py b/api_app/analyzers_manager/observable_analyzers/vt/vt3_base.py index 77451e2fa7..85f0b99c19 100644 --- a/api_app/analyzers_manager/observable_analyzers/vt/vt3_base.py +++ b/api_app/analyzers_manager/observable_analyzers/vt/vt3_base.py @@ -229,24 +229,22 @@ def _vt_get_report( self._vt_get_relationships( observable_name, relationships_requested, uri, result ) - uri_prefix, uri_postfix = self._get_url_prefix_postfix() + uri_prefix, uri_postfix = self._get_url_prefix_postfix(result) result["link"] = f"https://www.virustotal.com/gui/{uri_prefix}/{uri_postfix}" return result - def _get_url_prefix_postfix(self) -> Tuple[str, str]: + def _get_url_prefix_postfix(self, result: Dict) -> Tuple[str, str]: + uri_postfix = self._job.observable_name if self._job.observable_classification == ObservableClassification.DOMAIN.value: uri_prefix = "domain" - uri_postfix = self._job.observable_name elif self._job.observable_classification == ObservableClassification.IP.value: uri_prefix = "ip-address" - uri_postfix = self._job.observable_name elif self._job.observable_classification == ObservableClassification.URL.value: uri_prefix = "url" - uri_postfix = self._job.sha256 + uri_postfix = result.get("data", {}).get("id", self._job.sha256) else: # hash uri_prefix = "search" - uri_postfix = self._job.observable_name return uri_prefix, uri_postfix def _vt_scan_file(self, md5: str, rescan_instead: bool = False) -> dict: diff --git a/api_app/analyzers_manager/observable_analyzers/xforce.py b/api_app/analyzers_manager/observable_analyzers/xforce.py index cde4f9c66e..e3510865c9 100644 --- a/api_app/analyzers_manager/observable_analyzers/xforce.py +++ b/api_app/analyzers_manager/observable_analyzers/xforce.py @@ -12,7 +12,7 @@ class XForce(classes.ObservableAnalyzer): - base_url: str = "https://exchange.xforce.ibmcloud.com/api" + url: str = "https://exchange.xforce.ibmcloud.com/api" web_url: str = "https://exchange.xforce.ibmcloud.com" _api_key_name: str @@ -20,6 +20,10 @@ class XForce(classes.ObservableAnalyzer): malware_only: bool timeout: int = 5 + @classmethod + def update(cls) -> bool: + pass + def run(self): auth = HTTPBasicAuth(self._api_key_name, self._api_password_name) headers = {"Accept": "application/json"} @@ -31,7 +35,7 @@ def run(self): observable_to_check = quote_plus(self.observable_name) else: observable_to_check = self.observable_name - url = f"{self.base_url}/{endpoint}/{observable_to_check}" + url = f"{self.url}/{endpoint}/{observable_to_check}" response = requests.get( url, auth=auth, headers=headers, timeout=self.timeout ) diff --git a/api_app/analyzers_manager/observable_analyzers/zoomeye.py b/api_app/analyzers_manager/observable_analyzers/zoomeye.py index 5111365110..bc3d0fed34 100644 --- a/api_app/analyzers_manager/observable_analyzers/zoomeye.py +++ b/api_app/analyzers_manager/observable_analyzers/zoomeye.py @@ -12,7 +12,7 @@ class ZoomEye(classes.ObservableAnalyzer): - base_url: str = "https://api.zoomeye.org/" + url: str = "https://api.zoomeye.org/" search_type: str query: str @@ -21,6 +21,10 @@ class ZoomEye(classes.ObservableAnalyzer): history: bool _api_key_name: str + @classmethod + def update(cls) -> bool: + pass + def __build_zoomeye_url(self): if self.observable_classification == self.ObservableTypes.IP: self.query += f" ip:{self.observable_name}" @@ -29,20 +33,20 @@ def __build_zoomeye_url(self): self.search_type = "host" if self.search_type in ["host", "web"]: - self.url = self.base_url + self.search_type + "/search?query=" - self.url += self.query + self.final_url = self.url + self.search_type + "/search?query=" + self.final_url += self.query if self.page: - self.url += f"&page={self.page}" + self.final_url += f"&page={self.page}" if self.facets: - self.url += f"&facet={','.join(self.facets)}" + self.final_url += f"&facet={','.join(self.facets)}" elif self.search_type == "both": - self.url = self.base_url + "both/search?" + self.final_url = self.url + "both/search?" if self.history: - self.url += f"history={self.history}&" - self.url += f"ip={self.observable_name}" + self.final_url += f"history={self.history}&" + self.final_url += f"ip={self.observable_name}" else: raise AnalyzerConfigurationException( f"search type: '{self.search_type}' not supported." @@ -53,7 +57,9 @@ def run(self): self.__build_zoomeye_url() try: - response = requests.get(self.url, headers={"API-KEY": self._api_key_name}) + response = requests.get( + self.final_url, headers={"API-KEY": self._api_key_name} + ) response.raise_for_status() except requests.RequestException as e: raise AnalyzerRunException(e) diff --git a/api_app/analyzers_manager/signals.py b/api_app/analyzers_manager/signals.py index 0786028a2b..e574bdc409 100644 --- a/api_app/analyzers_manager/signals.py +++ b/api_app/analyzers_manager/signals.py @@ -1,13 +1,31 @@ -from django.db.models.signals import post_migrate +import logging +import uuid + +from django.conf import settings from django.dispatch import receiver -from api_app.analyzers_manager.apps import AnalyzersManagerConfig from api_app.analyzers_manager.models import AnalyzerConfig +from api_app.signals import migrate_finished +from intel_owl.celery import get_queue_name + +logger = logging.getLogger(__name__) -@receiver(post_migrate, sender=AnalyzersManagerConfig) -def post_migrate_analyzer( - sender, app_config, verbosity, interactive, stdout, using, plan, apps, **kwargs +@receiver(migrate_finished) +def post_migrate_analyzers_manager( + sender, + *args, + check_unapplied: bool = False, + **kwargs, ): - if plan: - AnalyzerConfig.delete_class_cache_keys() + logger.info(f"Post migrate {args} {kwargs}") + if check_unapplied: + return + from intel_owl.tasks import refresh_cache + + refresh_cache.apply_async( + queue=get_queue_name(settings.CONFIG_QUEUE), + MessageGroupId=str(uuid.uuid4()), + priority=3, + args=[AnalyzerConfig.python_path], + ) diff --git a/api_app/classes.py b/api_app/classes.py index 239652752f..051a7354a8 100644 --- a/api_app/classes.py +++ b/api_app/classes.py @@ -84,7 +84,7 @@ def __str__(self): try: return f"({self.__class__.__name__}, job: #{self.job_id})" except AttributeError: - return f"({self.__class__.__name__}" + return f"{self.__class__.__name__}" def config(self, runtime_configuration: typing.Dict): self.__parameters = self._config.read_configured_params( @@ -128,7 +128,9 @@ def after_run_success(self, content: typing.Any): self.report.save(update_fields=["status", "report"]) def log_error(self, e): - if isinstance(e, (*self.get_exceptions_to_catch(), SoftTimeLimitExceeded)): + if isinstance( + e, (*self.get_exceptions_to_catch(), SoftTimeLimitExceeded, HTTPError) + ): error_message = self.get_error_message(e) logger.error(error_message) else: @@ -137,15 +139,17 @@ def log_error(self, e): logger.exception(error_message) def after_run_failed(self, e: Exception): - self.log_error(e) self.report.errors.append(str(e)) self.report.status = self.report.Status.FAILED self.report.save(update_fields=["status", "errors"]) - if isinstance(e, HTTPError): - if "429 Client Error" in str(e): - self.disable_for_rate_limit() - else: - logger.info(f"Http error is {str(e)}") + if isinstance(e, HTTPError) and ( + hasattr(e, "response") + and hasattr(e.response, "status_code") + and e.response.status_code == 429 + ): + self.disable_for_rate_limit() + else: + self.log_error(e) if settings.STAGE_CI: raise e @@ -177,7 +181,7 @@ def get_exceptions_to_catch(self) -> list: def get_error_message(self, err, is_base_err=False): """ Returns error message for - *_handle_analyzer_exception* and *_handle_base_exception* fn + *_handle_analyzer_exception* fn """ return ( f"{self}." @@ -209,19 +213,14 @@ def start( # add end time of process self.after_run() - def _handle_exception(self, exc) -> None: - error_message = self.get_error_message(exc) + def _handle_exception(self, exc, is_base_err: bool = False) -> None: + if not is_base_err: + traceback.print_exc() + error_message = self.get_error_message(exc, is_base_err=is_base_err) logger.error(error_message) self.report.errors.append(str(exc)) self.report.status = self.report.Status.FAILED - def _handle_base_exception(self, exc) -> None: - traceback.print_exc() - error_message = self.get_error_message(exc, is_base_err=True) - logger.exception(error_message) - self.report.errors.append(str(exc)) - self.report.status = self.report.Status.FAILED - @classmethod def _monkeypatch(cls, patches: list = None) -> None: """ @@ -265,18 +264,18 @@ def health_check(self, user: User = None) -> bool: if url and url.startswith("http"): if settings.STAGE_CI or settings.MOCK_CONNECTIONS: return True - logger.info(f"Checking url {url} for {self}") + logger.info(f"healthcheck url {url} for {self}") try: # momentarily set this to False to # avoid fails for https services - requests.head(url, timeout=10, verify=False) + response = requests.head(url, timeout=10, verify=False) + response.raise_for_status() except ( requests.exceptions.ConnectionError, requests.exceptions.Timeout, + requests.exceptions.HTTPError, ) as e: - logger.info( - f"Health check failed: url {url}" f" for {self}. Error: {e}" - ) + logger.info(f"healthcheck failed: url {url}" f" for {self}. Error: {e}") return False else: return True @@ -293,7 +292,12 @@ def disable_for_rate_limit(self): name__contains="api_key" ).first() # if we do not have api keys OR the api key was org based - if not api_key_parameter or api_key_parameter.is_from_org: + # OR if the api key is not actually required and we do not have it set + if ( + not api_key_parameter + or api_key_parameter.is_from_org + or (not api_key_parameter.required and not api_key_parameter.value) + ): org_configuration.disable_for_rate_limit() else: logger.warning( diff --git a/api_app/connectors_manager/connectors/abuse_submitter.py b/api_app/connectors_manager/connectors/abuse_submitter.py new file mode 100644 index 0000000000..98846e683f --- /dev/null +++ b/api_app/connectors_manager/connectors/abuse_submitter.py @@ -0,0 +1,18 @@ +from api_app.connectors_manager.connectors.email_sender import EmailSender + + +class AbuseSubmitter(EmailSender): + @property + def subject(self) -> str: + return ( + "Takedown domain request for " + f"{self._job.parent_job.parent_job.observable_name}" + ) + + @property + def body(self) -> str: + return ( + f"Domain {self._job.parent_job.parent_job.observable_name} " + "has been detected as malicious by our team. We kindly request you to take " + "it down as soon as possible." + ) diff --git a/api_app/connectors_manager/connectors/email_sender.py b/api_app/connectors_manager/connectors/email_sender.py new file mode 100644 index 0000000000..e55c02a551 --- /dev/null +++ b/api_app/connectors_manager/connectors/email_sender.py @@ -0,0 +1,52 @@ +from django.core.mail import EmailMessage + +from api_app.connectors_manager.classes import Connector +from intel_owl.settings import DEFAULT_FROM_EMAIL +from tests.mock_utils import if_mock_connections, patch + + +class EmailSender(Connector): + sender: str + subject: str + header: str + body: str + footer: str + + def run(self) -> dict: + if self.sender: + sender = self.sender + else: + sender = DEFAULT_FROM_EMAIL + body = self.body + if hasattr(self, "header") and self.header: + body = self.header + "\n\n" + body + if hasattr(self, "footer") and self.footer: + body = body + "\n\n" + self.footer + base_eml = EmailMessage( + subject=self.subject, + from_email=sender, + to=[self._job.observable_name], + body=body, + ) + base_eml.send() + return { + "subject": base_eml.subject, + "from": base_eml.from_email, + "to": base_eml.to, + "body": base_eml.body, + } + + def update(self) -> bool: + pass + + @classmethod + def _monkeypatch(cls): + patches = [ + if_mock_connections( + patch( + "django.core.mail.EmailMessage.send", + return_value="Email sent", + ) + ) + ] + return super()._monkeypatch(patches=patches) diff --git a/api_app/connectors_manager/migrations/0030_connector_config_emailsender.py b/api_app/connectors_manager/migrations/0030_connector_config_emailsender.py new file mode 100644 index 0000000000..08d89f3783 --- /dev/null +++ b/api_app/connectors_manager/migrations/0030_connector_config_emailsender.py @@ -0,0 +1,167 @@ +from django.db import migrations +from django.db.models.fields.related_descriptors import ( + ForwardManyToOneDescriptor, + ForwardOneToOneDescriptor, + ManyToManyDescriptor, +) + +plugin = { + "python_module": { + "health_check_schedule": None, + "update_schedule": None, + "module": "email_sender.EmailSender", + "base_path": "api_app.connectors_manager.connectors", + }, + "name": "EmailSender", + "description": "Send a generic email", + "disabled": False, + "soft_time_limit": 60, + "routing_key": "default", + "health_check_status": True, + "maximum_tlp": "AMBER", + "run_on_failure": True, + "model": "connectors_manager.ConnectorConfig", +} + +params = [ + { + "python_module": { + "module": "email_sender.EmailSender", + "base_path": "api_app.connectors_manager.connectors", + }, + "name": "subject", + "type": "str", + "description": "Email subject", + "is_secret": False, + "required": True, + }, + { + "python_module": { + "module": "email_sender.EmailSender", + "base_path": "api_app.connectors_manager.connectors", + }, + "name": "header", + "type": "str", + "description": "Email header for initial greetings", + "is_secret": False, + "required": False, + }, + { + "python_module": { + "module": "email_sender.EmailSender", + "base_path": "api_app.connectors_manager.connectors", + }, + "name": "body", + "type": "str", + "description": "Email body", + "is_secret": False, + "required": True, + }, + { + "python_module": { + "module": "email_sender.EmailSender", + "base_path": "api_app.connectors_manager.connectors", + }, + "name": "footer", + "type": "str", + "description": "Email footer for final greetings", + "is_secret": False, + "required": False, + }, + { + "python_module": { + "module": "email_sender.EmailSender", + "base_path": "api_app.connectors_manager.connectors", + }, + "name": "sender", + "type": "str", + "description": "Email sender", + "is_secret": False, + "required": False, + }, +] + +values = [] + + +def _get_real_obj(Model, field, value): + def _get_obj(Model, other_model, value): + if isinstance(value, dict): + real_vals = {} + for key, real_val in value.items(): + real_vals[key] = _get_real_obj(other_model, key, real_val) + value = other_model.objects.get_or_create(**real_vals)[0] + # it is just the primary key serialized + else: + if isinstance(value, int): + if Model.__name__ == "PluginConfig": + value = other_model.objects.get(name=plugin["name"]) + else: + value = other_model.objects.get(pk=value) + else: + value = other_model.objects.get(name=value) + return value + + if ( + type(getattr(Model, field)) + in [ForwardManyToOneDescriptor, ForwardOneToOneDescriptor] + and value + ): + other_model = getattr(Model, field).get_queryset().model + value = _get_obj(Model, other_model, value) + elif type(getattr(Model, field)) in [ManyToManyDescriptor] and value: + other_model = getattr(Model, field).rel.model + value = [_get_obj(Model, other_model, val) for val in value] + return value + + +def _create_object(Model, data): + mtm, no_mtm = {}, {} + for field, value in data.items(): + value = _get_real_obj(Model, field, value) + if type(getattr(Model, field)) is ManyToManyDescriptor: + mtm[field] = value + else: + no_mtm[field] = value + try: + o = Model.objects.get(**no_mtm) + except Model.DoesNotExist: + o = Model(**no_mtm) + o.full_clean() + o.save() + for field, value in mtm.items(): + attribute = getattr(o, field) + if value is not None: + attribute.set(value) + return False + return True + + +def migrate(apps, schema_editor): + Parameter = apps.get_model("api_app", "Parameter") + PluginConfig = apps.get_model("api_app", "PluginConfig") + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + if not Model.objects.filter(name=plugin["name"]).exists(): + exists = _create_object(Model, plugin) + if not exists: + for param in params: + _create_object(Parameter, param) + for value in values: + _create_object(PluginConfig, value) + + +def reverse_migrate(apps, schema_editor): + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + Model.objects.get(name=plugin["name"]).delete() + + +class Migration(migrations.Migration): + atomic = False + dependencies = [ + ("api_app", "0062_alter_parameter_python_module"), + ("connectors_manager", "0029_4_change_primary_key"), + ] + + operations = [migrations.RunPython(migrate, reverse_migrate)] diff --git a/api_app/connectors_manager/migrations/0031_connector_config_abusesubmitter.py b/api_app/connectors_manager/migrations/0031_connector_config_abusesubmitter.py new file mode 100644 index 0000000000..dc20e9ae98 --- /dev/null +++ b/api_app/connectors_manager/migrations/0031_connector_config_abusesubmitter.py @@ -0,0 +1,145 @@ +from django.db import migrations +from django.db.models.fields.related_descriptors import ( + ForwardManyToOneDescriptor, + ForwardOneToOneDescriptor, + ManyToManyDescriptor, +) + +plugin = { + "python_module": { + "health_check_schedule": None, + "update_schedule": None, + "module": "abuse_submitter.AbuseSubmitter", + "base_path": "api_app.connectors_manager.connectors", + }, + "name": "AbuseSubmitter", + "description": "Send an email to request to take down a malicious domain.", + "disabled": False, + "soft_time_limit": 60, + "routing_key": "default", + "health_check_status": True, + "maximum_tlp": "AMBER", + "run_on_failure": True, + "model": "connectors_manager.ConnectorConfig", +} + +params = [ + { + "python_module": { + "module": "abuse_submitter.AbuseSubmitter", + "base_path": "api_app.connectors_manager.connectors", + }, + "name": "header", + "type": "str", + "description": "Email header for initial greetings", + "is_secret": False, + "required": False, + }, + { + "python_module": { + "module": "abuse_submitter.AbuseSubmitter", + "base_path": "api_app.connectors_manager.connectors", + }, + "name": "footer", + "type": "str", + "description": "Email footer for final greetings", + "is_secret": False, + "required": False, + }, + { + "python_module": { + "module": "abuse_submitter.AbuseSubmitter", + "base_path": "api_app.connectors_manager.connectors", + }, + "name": "sender", + "type": "str", + "description": "Email sender", + "is_secret": False, + "required": False, + }, +] + +values = [] + + +def _get_real_obj(Model, field, value): + def _get_obj(Model, other_model, value): + if isinstance(value, dict): + real_vals = {} + for key, real_val in value.items(): + real_vals[key] = _get_real_obj(other_model, key, real_val) + value = other_model.objects.get_or_create(**real_vals)[0] + # it is just the primary key serialized + else: + if isinstance(value, int): + if Model.__name__ == "PluginConfig": + value = other_model.objects.get(name=plugin["name"]) + else: + value = other_model.objects.get(pk=value) + else: + value = other_model.objects.get(name=value) + return value + + if ( + type(getattr(Model, field)) + in [ForwardManyToOneDescriptor, ForwardOneToOneDescriptor] + and value + ): + other_model = getattr(Model, field).get_queryset().model + value = _get_obj(Model, other_model, value) + elif type(getattr(Model, field)) in [ManyToManyDescriptor] and value: + other_model = getattr(Model, field).rel.model + value = [_get_obj(Model, other_model, val) for val in value] + return value + + +def _create_object(Model, data): + mtm, no_mtm = {}, {} + for field, value in data.items(): + value = _get_real_obj(Model, field, value) + if type(getattr(Model, field)) is ManyToManyDescriptor: + mtm[field] = value + else: + no_mtm[field] = value + try: + o = Model.objects.get(**no_mtm) + except Model.DoesNotExist: + o = Model(**no_mtm) + o.full_clean() + o.save() + for field, value in mtm.items(): + attribute = getattr(o, field) + if value is not None: + attribute.set(value) + return False + return True + + +def migrate(apps, schema_editor): + Parameter = apps.get_model("api_app", "Parameter") + PluginConfig = apps.get_model("api_app", "PluginConfig") + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + if not Model.objects.filter(name=plugin["name"]).exists(): + exists = _create_object(Model, plugin) + if not exists: + for param in params: + _create_object(Parameter, param) + for value in values: + _create_object(PluginConfig, value) + + +def reverse_migrate(apps, schema_editor): + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + Model.objects.get(name=plugin["name"]).delete() + + +class Migration(migrations.Migration): + atomic = False + dependencies = [ + ("api_app", "0062_alter_parameter_python_module"), + ("connectors_manager", "0030_connector_config_emailsender"), + ] + + operations = [migrations.RunPython(migrate, reverse_migrate)] diff --git a/api_app/connectors_manager/signals.py b/api_app/connectors_manager/signals.py index 99a32d2055..789d966d03 100644 --- a/api_app/connectors_manager/signals.py +++ b/api_app/connectors_manager/signals.py @@ -1,13 +1,31 @@ -from django.db.models.signals import post_migrate +import logging +import uuid + +from django.conf import settings from django.dispatch import receiver -from api_app.connectors_manager.apps import ConnectorsManagerConfig from api_app.connectors_manager.models import ConnectorConfig +from api_app.signals import migrate_finished +from intel_owl.celery import get_queue_name + +logger = logging.getLogger(__name__) -@receiver(post_migrate, sender=ConnectorsManagerConfig) -def post_migrate_connector( - sender, app_config, verbosity, interactive, stdout, using, plan, apps, **kwargs +@receiver(migrate_finished) +def post_migrate_connectors_manager( + sender, + *args, + check_unapplied: bool = False, + **kwargs, ): - if plan: - ConnectorConfig.delete_class_cache_keys() + logger.info(f"Post migrate {args} {kwargs}") + if check_unapplied: + return + from intel_owl.tasks import refresh_cache + + refresh_cache.apply_async( + queue=get_queue_name(settings.CONFIG_QUEUE), + MessageGroupId=str(uuid.uuid4()), + priority=3, + args=[ConnectorConfig.python_path], + ) diff --git a/api_app/ingestors_manager/serializers.py b/api_app/ingestors_manager/serializers.py index 8148d311ee..76c85a177c 100644 --- a/api_app/ingestors_manager/serializers.py +++ b/api_app/ingestors_manager/serializers.py @@ -32,7 +32,7 @@ def to_internal_value(self, data): class IngestorConfigSerializerForMigration(PythonConfigSerializerForMigration): schedule = CrontabScheduleSerializer(read_only=True) periodic_task = PeriodicTaskSerializer(read_only=True) - user = UserSerializer(read_only=True) + user = UserSerializer(read_only=True, omit=["full_name"]) class Meta: model = IngestorConfig diff --git a/api_app/ingestors_manager/signals.py b/api_app/ingestors_manager/signals.py index 6ceeb177d7..69b9a4325a 100644 --- a/api_app/ingestors_manager/signals.py +++ b/api_app/ingestors_manager/signals.py @@ -1,27 +1,37 @@ import json +import logging +import uuid -from django.db.models.signals import post_delete, post_migrate, pre_save +from django.conf import settings +from django.db.models.signals import post_delete, pre_save from django.dispatch import receiver from django_celery_beat.models import PeriodicTask -from api_app.ingestors_manager.apps import IngestorsManagerConfig from api_app.ingestors_manager.models import IngestorConfig +from api_app.signals import migrate_finished from certego_saas.apps.user.models import User +from intel_owl.celery import get_queue_name + +logger = logging.getLogger(__name__) @receiver(pre_save, sender=IngestorConfig) def pre_save_ingestor_config(sender, instance: IngestorConfig, *args, **kwargs): - instance.user = User.objects.get_or_create( - username=f"{instance.name.title()}Ingestor" - )[0] + from intel_owl.tasks import execute_ingestor + + user = User.objects.get_or_create(username=f"{instance.name.title()}Ingestor")[0] + user.profile.task_priority = 7 + user.profile.is_robot = True + user.profile.save() + instance.user = user periodic_task = PeriodicTask.objects.update_or_create( name=f"{instance.name.title()}Ingestor", - task="intel_owl.tasks.execute_ingestor", + task=f"{execute_ingestor.__module__}.{execute_ingestor.__name__}", defaults={ "crontab": instance.schedule, "queue": instance.queue, - "kwargs": json.dumps({"config_pk": instance.name}), + "kwargs": json.dumps({"config_name": instance.name}), "enabled": not instance.disabled, }, )[0] @@ -37,9 +47,21 @@ def post_delete_ingestor_config( instance.user.delete() -@receiver(post_migrate, sender=IngestorsManagerConfig) -def post_migrate_ingestor( - sender, app_config, verbosity, interactive, stdout, using, plan, apps, **kwargs +@receiver(migrate_finished) +def post_migrate_ingestors_manager( + sender, + *args, + check_unapplied: bool = False, + **kwargs, ): - if plan: - IngestorConfig.delete_class_cache_keys() + logger.info(f"Post migrate {args} {kwargs}") + if check_unapplied: + return + from intel_owl.tasks import refresh_cache + + refresh_cache.apply_async( + queue=get_queue_name(settings.CONFIG_QUEUE), + MessageGroupId=str(uuid.uuid4()), + priority=3, + args=[IngestorConfig.python_path], + ) diff --git a/api_app/management/commands/celery_reload.py b/api_app/management/commands/celery_reload.py index 8389edddf0..5dc59480a9 100644 --- a/api_app/management/commands/celery_reload.py +++ b/api_app/management/commands/celery_reload.py @@ -1,3 +1,4 @@ +import logging import shlex import subprocess @@ -5,6 +6,8 @@ from django.core.management.base import BaseCommand from django.utils import autoreload +logger = logging.getLogger(__name__) + class Command(BaseCommand): @staticmethod @@ -15,13 +18,15 @@ def add_arguments(parser): def handle(self, *args, **options): if not settings.DEBUG: - self.stdout.write(self.style.ERROR("Not runnable if in production mode")) - + self.stdout.write(self.style.ERROR("Not runnable in production mode")) + return + logger.info("Starting celery with autoreload") autoreload.run_with_reloader(self._restart_celery, argument=options["command"]) def _restart_celery(self, argument): self.run("pkill celery") self.run(f"/usr/local/bin/celery {argument}") - def run(self, cmd): + @staticmethod + def run(cmd): subprocess.run(shlex.split(cmd), check=True) diff --git a/api_app/management/commands/migrate.py b/api_app/management/commands/migrate.py index f5edf9b9fb..04233578a1 100644 --- a/api_app/management/commands/migrate.py +++ b/api_app/management/commands/migrate.py @@ -8,4 +8,4 @@ def handle(self, *args, **options): super().handle(*args, **options) from api_app.signals import migrate_finished - migrate_finished.send(self) + migrate_finished.send(self, **options) diff --git a/api_app/migrations/0061_job_depth_analysis.py b/api_app/migrations/0061_job_depth_analysis.py index 454d961d2d..85d8667a4a 100644 --- a/api_app/migrations/0061_job_depth_analysis.py +++ b/api_app/migrations/0061_job_depth_analysis.py @@ -61,9 +61,6 @@ class Migration(migrations.Migration): operations = [ migrations.SeparateDatabaseAndState( database_operations=[ - migrations.RunSQL( - "CREATE UNIQUE INDEX IF NOT EXISTS api_app_job_path_f745a786_uniq ON api_app_job USING btree (path)" - ), migrations.RunPython(migrate, migrations.RunPython.noop), ], state_operations=[ diff --git a/api_app/migrations/0062_alter_parameter_python_module.py b/api_app/migrations/0062_alter_parameter_python_module.py index cdb14ec0f7..c07d000670 100644 --- a/api_app/migrations/0062_alter_parameter_python_module.py +++ b/api_app/migrations/0062_alter_parameter_python_module.py @@ -19,4 +19,9 @@ class Migration(migrations.Migration): to="api_app.pythonmodule", ), ), + migrations.AlterField( + model_name="job", + name="path", + field=models.CharField(max_length=255, unique=True), + ), ] diff --git a/api_app/mixins.py b/api_app/mixins.py index bb5c67d658..24c28453ec 100644 --- a/api_app/mixins.py +++ b/api_app/mixins.py @@ -19,9 +19,7 @@ def list(self, request, *args, **kwargs): page = self.paginate_queryset(queryset) if page is not None: - objects = self.serializer_class.Meta.model.objects.filter( - pk__in=[plugin.pk for plugin in page] - ) + objects = queryset.filter(pk__in=[plugin.pk for plugin in page]) if "page" in request.query_params and "page_size" in request.query_params: cache_name += ( f"_{request.query_params['page']}_" diff --git a/api_app/models.py b/api_app/models.py index 234d50b28b..545b137d5b 100644 --- a/api_app/models.py +++ b/api_app/models.py @@ -354,14 +354,7 @@ def sha256(self) -> str: @cached_property def parent_job(self) -> Optional["Job"]: - from api_app.pivots_manager.models import PivotMap - - try: - pm = PivotMap.objects.get(ending_job=self) - except PivotMap.DoesNotExist: - return None - else: - return pm.starting_job + return self.get_parent() @cached_property def sha1(self) -> str: @@ -400,6 +393,7 @@ def retry(self): runner.apply_async( queue=get_queue_name(settings.CONFIG_QUEUE), MessageGroupId=str(uuid.uuid4()), + priority=self.priority, ) def set_final_status(self) -> None: @@ -540,8 +534,13 @@ def _final_status_signature(self) -> Signature: queue=get_queue_name(settings.CONFIG_QUEUE), immutable=True, MessageGroupId=str(uuid.uuid4()), + priority=self.priority, ) + @property + def priority(self): + return self.user.profile.task_priority + def _get_pipeline( self, analyzers: PythonConfigQuerySet, @@ -903,7 +902,7 @@ def disable_for_rate_limit(self): self.rate_limit_enable_task.clocked = clock_schedule self.rate_limit_enable_task.enabled = True self.rate_limit_enable_task.save() - logger.info(f"Disabling {self} for rate limit") + logger.warning(f"Disabling {self} for rate limit") self.save() def disable_manually(self, user: User): @@ -924,7 +923,9 @@ def enable_manually(self, user: User): self.enable() def enable(self): + logger.info(f"Enabling back {self}") self.disabled = False + self.disabled_comment = "" self.save() if self.rate_limit_enable_task: self.rate_limit_enable_task.delete() @@ -941,6 +942,11 @@ def delete_class_cache_keys(cls, user: User = None): logger.debug(f"Deleting cache key {key}") cache.delete(key) + @classmethod + @property + def python_path(cls) -> str: + return f"{cls.__module__}.{cls.__name__}" + class AbstractConfig(ListCachable): objects = AbstractConfigQuerySet.as_manager() @@ -1197,6 +1203,7 @@ def _signature_pipeline_status(cls, job, status: str) -> Signature: queue=get_queue_name(settings.CONFIG_QUEUE), immutable=True, MessageGroupId=str(uuid.uuid4()), + priority=job.priority, ) @property @@ -1268,3 +1275,4 @@ def generate_health_check_periodic_task(self): }, )[0] self.health_check_task = periodic_task + self.save() diff --git a/api_app/pivots_manager/classes.py b/api_app/pivots_manager/classes.py index 7f2d9c0ba9..0a37671b94 100644 --- a/api_app/pivots_manager/classes.py +++ b/api_app/pivots_manager/classes.py @@ -47,10 +47,9 @@ def config_model(cls) -> Type[PivotConfig]: def should_run(self) -> Tuple[bool, Optional[str]]: # by default, the pivot run IF every report attached to it was success - result = all( - x.status == self.report_model.Status.SUCCESS.value - for x in self.related_reports - ) + result = not self.related_reports.exclude( + status=self.report_model.Status.SUCCESS.value + ).exists() return ( result, f"All necessary reports{'' if result else ' do not'} have success status", diff --git a/api_app/pivots_manager/migrations/0002_000_self_analyzable.py b/api_app/pivots_manager/migrations/0002_000_self_analyzable.py index a55a6e6d99..dfa4a5df1e 100644 --- a/api_app/pivots_manager/migrations/0002_000_self_analyzable.py +++ b/api_app/pivots_manager/migrations/0002_000_self_analyzable.py @@ -3,7 +3,7 @@ def migrate_python_module_pivot(apps, schema_editor): PythonModule = apps.get_model("api_app", "PythonModule") - PythonModule.objects.create( + PythonModule.objects.update_or_create( module="self_analyzable.SelfAnalyzable", base_path="api_app.pivots_manager.pivots", ) diff --git a/api_app/pivots_manager/migrations/0002_001_compare.py b/api_app/pivots_manager/migrations/0002_001_compare.py index c600a3af18..0bbdf74499 100644 --- a/api_app/pivots_manager/migrations/0002_001_compare.py +++ b/api_app/pivots_manager/migrations/0002_001_compare.py @@ -3,7 +3,7 @@ def migrate_python_module_pivot(apps, schema_editor): PythonModule = apps.get_model("api_app", "PythonModule") - pm = PythonModule.objects.create( + pm, _ = PythonModule.objects.update_or_create( module="compare.Compare", base_path="api_app.pivots_manager.pivots", ) diff --git a/api_app/pivots_manager/migrations/0026_pivot_config_abuseiptosubmission.py b/api_app/pivots_manager/migrations/0026_pivot_config_abuseiptosubmission.py new file mode 100644 index 0000000000..a033750bd0 --- /dev/null +++ b/api_app/pivots_manager/migrations/0026_pivot_config_abuseiptosubmission.py @@ -0,0 +1,150 @@ +from django.db import migrations +from django.db.models.fields.related_descriptors import ( + ForwardManyToOneDescriptor, + ForwardOneToOneDescriptor, + ManyToManyDescriptor, +) + +plugin = { + "python_module": { + "health_check_schedule": None, + "update_schedule": None, + "module": "compare.Compare", + "base_path": "api_app.pivots_manager.pivots", + }, + "related_analyzer_configs": ["Abusix"], + "related_connector_configs": [], + "playbook_to_execute": "Send_Abuse_Email", + "name": "AbuseIpToSubmission", + "description": "This Plugin leverages results from the Abusix analyzer to " + "extract the abuse contacts of an IP address to pivot to " + "the AbuseSubmitter connector.", + "disabled": False, + "soft_time_limit": 60, + "routing_key": "default", + "health_check_status": True, + "model": "pivots_manager.PivotConfig", +} + +params = [ + { + "python_module": { + "module": "compare.Compare", + "base_path": "api_app.pivots_manager.pivots", + }, + "name": "field_to_compare", + "type": "str", + "description": "Dotted path to the field", + "is_secret": False, + "required": True, + }, +] + +values = [ + { + "parameter": { + "python_module": { + "module": "compare.Compare", + "base_path": "api_app.pivots_manager.pivots", + }, + "name": "field_to_compare", + "type": "str", + "description": "Dotted path to the field", + "is_secret": False, + "required": True, + }, + "analyzer_config": None, + "connector_config": None, + "visualizer_config": None, + "ingestor_config": None, + "pivot_config": "AbuseIpToSubmission", + "for_organization": False, + "value": "abuse_contacts.0", + "updated_at": "2024-04-22T14:08:49.711495Z", + "owner": None, + }, +] + + +def _get_real_obj(Model, field, value): + def _get_obj(Model, other_model, value): + if isinstance(value, dict): + real_vals = {} + for key, real_val in value.items(): + real_vals[key] = _get_real_obj(other_model, key, real_val) + value = other_model.objects.get_or_create(**real_vals)[0] + # it is just the primary key serialized + else: + if isinstance(value, int): + if Model.__name__ == "PluginConfig": + value = other_model.objects.get(name=plugin["name"]) + else: + value = other_model.objects.get(pk=value) + else: + value = other_model.objects.get(name=value) + return value + + if ( + type(getattr(Model, field)) + in [ForwardManyToOneDescriptor, ForwardOneToOneDescriptor] + and value + ): + other_model = getattr(Model, field).get_queryset().model + value = _get_obj(Model, other_model, value) + elif type(getattr(Model, field)) in [ManyToManyDescriptor] and value: + other_model = getattr(Model, field).rel.model + value = [_get_obj(Model, other_model, val) for val in value] + return value + + +def _create_object(Model, data): + mtm, no_mtm = {}, {} + for field, value in data.items(): + value = _get_real_obj(Model, field, value) + if type(getattr(Model, field)) is ManyToManyDescriptor: + mtm[field] = value + else: + no_mtm[field] = value + try: + o = Model.objects.get(**no_mtm) + except Model.DoesNotExist: + o = Model(**no_mtm) + o.full_clean() + o.save() + for field, value in mtm.items(): + attribute = getattr(o, field) + if value is not None: + attribute.set(value) + return False + return True + + +def migrate(apps, schema_editor): + Parameter = apps.get_model("api_app", "Parameter") + PluginConfig = apps.get_model("api_app", "PluginConfig") + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + if not Model.objects.filter(name=plugin["name"]).exists(): + exists = _create_object(Model, plugin) + if not exists: + for param in params: + _create_object(Parameter, param) + for value in values: + _create_object(PluginConfig, value) + + +def reverse_migrate(apps, schema_editor): + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + Model.objects.get(name=plugin["name"]).delete() + + +class Migration(migrations.Migration): + atomic = False + dependencies = [ + ("api_app", "0062_alter_parameter_python_module"), + ("pivots_manager", "0025_alter_pivotmap_ending_job"), + ("playbooks_manager", "0033_playbook_config_send_abuse_email"), + ] + + operations = [migrations.RunPython(migrate, reverse_migrate)] diff --git a/api_app/pivots_manager/migrations/0027_pivot_config_takedownrequesttoabuseip.py b/api_app/pivots_manager/migrations/0027_pivot_config_takedownrequesttoabuseip.py new file mode 100644 index 0000000000..0fb56ec01b --- /dev/null +++ b/api_app/pivots_manager/migrations/0027_pivot_config_takedownrequesttoabuseip.py @@ -0,0 +1,155 @@ +from django.db import migrations +from django.db.models.fields.related_descriptors import ( + ForwardManyToOneDescriptor, + ForwardOneToOneDescriptor, + ManyToManyDescriptor, +) + +plugin = { + "python_module": { + "health_check_schedule": None, + "update_schedule": None, + "module": "any_compare.AnyCompare", + "base_path": "api_app.pivots_manager.pivots", + }, + "related_analyzer_configs": [ + "Classic_DNS", + "CloudFlare_DNS", + "DNS0_EU", + "Google_DNS", + "Quad9_DNS", + ], + "related_connector_configs": [], + "playbook_to_execute": "Abuse_IP", + "name": "TakedownRequestToAbuseIp", + "description": "This Plugin leverages results from DNS resolver analyzers " + "to extract a valid IP address to pivot to the Abusix analyzer.", + "disabled": False, + "soft_time_limit": 60, + "routing_key": "default", + "health_check_status": True, + "model": "pivots_manager.PivotConfig", +} + +params = [ + { + "python_module": { + "module": "any_compare.AnyCompare", + "base_path": "api_app.pivots_manager.pivots", + }, + "name": "field_to_compare", + "type": "str", + "description": "Dotted path to the field", + "is_secret": False, + "required": True, + }, +] + +values = [ + { + "parameter": { + "python_module": { + "module": "any_compare.AnyCompare", + "base_path": "api_app.pivots_manager.pivots", + }, + "name": "field_to_compare", + "type": "str", + "description": "Dotted path to the field", + "is_secret": False, + "required": True, + }, + "analyzer_config": None, + "connector_config": None, + "visualizer_config": None, + "ingestor_config": None, + "pivot_config": "TakedownRequestToAbuseIp", + "for_organization": False, + "value": "resolutions.0.data", + "updated_at": "2024-04-22T14:08:49.711495Z", + "owner": None, + }, +] + + +def _get_real_obj(Model, field, value): + def _get_obj(Model, other_model, value): + if isinstance(value, dict): + real_vals = {} + for key, real_val in value.items(): + real_vals[key] = _get_real_obj(other_model, key, real_val) + value = other_model.objects.get_or_create(**real_vals)[0] + # it is just the primary key serialized + else: + if isinstance(value, int): + if Model.__name__ == "PluginConfig": + value = other_model.objects.get(name=plugin["name"]) + else: + value = other_model.objects.get(pk=value) + else: + value = other_model.objects.get(name=value) + return value + + if ( + type(getattr(Model, field)) + in [ForwardManyToOneDescriptor, ForwardOneToOneDescriptor] + and value + ): + other_model = getattr(Model, field).get_queryset().model + value = _get_obj(Model, other_model, value) + elif type(getattr(Model, field)) in [ManyToManyDescriptor] and value: + other_model = getattr(Model, field).rel.model + value = [_get_obj(Model, other_model, val) for val in value] + return value + + +def _create_object(Model, data): + mtm, no_mtm = {}, {} + for field, value in data.items(): + value = _get_real_obj(Model, field, value) + if type(getattr(Model, field)) is ManyToManyDescriptor: + mtm[field] = value + else: + no_mtm[field] = value + try: + o = Model.objects.get(**no_mtm) + except Model.DoesNotExist: + o = Model(**no_mtm) + o.full_clean() + o.save() + for field, value in mtm.items(): + attribute = getattr(o, field) + if value is not None: + attribute.set(value) + return False + return True + + +def migrate(apps, schema_editor): + Parameter = apps.get_model("api_app", "Parameter") + PluginConfig = apps.get_model("api_app", "PluginConfig") + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + if not Model.objects.filter(name=plugin["name"]).exists(): + exists = _create_object(Model, plugin) + if not exists: + for param in params: + _create_object(Parameter, param) + for value in values: + _create_object(PluginConfig, value) + + +def reverse_migrate(apps, schema_editor): + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + Model.objects.get(name=plugin["name"]).delete() + + +class Migration(migrations.Migration): + atomic = False + dependencies = [ + ("api_app", "0062_alter_parameter_python_module"), + ("pivots_manager", "0026_pivot_config_abuseiptosubmission"), + ("playbooks_manager", "0034_playbook_config_abuse_ip"), + ] + + operations = [migrations.RunPython(migrate, reverse_migrate)] diff --git a/api_app/pivots_manager/pivots/any_compare.py b/api_app/pivots_manager/pivots/any_compare.py new file mode 100644 index 0000000000..c0a23ff64e --- /dev/null +++ b/api_app/pivots_manager/pivots/any_compare.py @@ -0,0 +1,24 @@ +import logging +from typing import Optional, Tuple + +from api_app.pivots_manager.pivots.compare import Compare + +logger = logging.getLogger(__name__) + + +class AnyCompare(Compare): + def should_run(self) -> Tuple[bool, Optional[str]]: + if result := self.related_reports.filter( + status=self.report_model.Status.SUCCESS.value + ).first(): + try: + self._value = self._get_value(self.field_to_compare) + except (RuntimeError, ValueError) as e: + return False, str(e) + return ( + bool(result), + f"All necessary reports{'' if result else ' do not'} have success status", + ) + + def update(self) -> bool: + pass diff --git a/api_app/pivots_manager/pivots/compare.py b/api_app/pivots_manager/pivots/compare.py index 1532470cc6..5df88133e0 100644 --- a/api_app/pivots_manager/pivots/compare.py +++ b/api_app/pivots_manager/pivots/compare.py @@ -6,8 +6,14 @@ class Compare(Pivot): field_to_compare: str + @classmethod + def update(cls) -> bool: + pass + def _get_value(self, field: str) -> Any: - report = self.related_reports.first() + report = self.related_reports.filter( + status=self.report_model.Status.SUCCESS.value + ).first() if not report: raise RuntimeError("No report found") content = report.report @@ -16,7 +22,7 @@ def _get_value(self, field: str) -> Any: try: content = content[key] except TypeError: - if isinstance(content, list): + if isinstance(content, list) and len(content) > 0: content = content[int(key)] else: raise RuntimeError(f"Not found {field}") @@ -26,7 +32,7 @@ def _get_value(self, field: str) -> Any: return content def should_run(self) -> Tuple[bool, Optional[str]]: - if len(list(self.related_reports)) != 1: + if self.related_reports.count() != 1: return ( False, f"Unable to run pivot {self._config.name} " diff --git a/api_app/pivots_manager/signals.py b/api_app/pivots_manager/signals.py index 33b1b187a6..5824c7045b 100644 --- a/api_app/pivots_manager/signals.py +++ b/api_app/pivots_manager/signals.py @@ -1,9 +1,36 @@ +import logging +import uuid + +from django.conf import settings from django.core.exceptions import ValidationError -from django.db.models.signals import m2m_changed, post_migrate, pre_save +from django.db.models.signals import m2m_changed, pre_save from django.dispatch import receiver -from api_app.pivots_manager.apps import PivotsManagerConfig from api_app.pivots_manager.models import PivotConfig +from api_app.signals import migrate_finished +from intel_owl.celery import get_queue_name + +logger = logging.getLogger(__name__) + + +@receiver(migrate_finished) +def post_migrate_pivots_manager( + sender, + *args, + check_unapplied: bool = False, + **kwargs, +): + logger.info(f"Post migrate {args} {kwargs}") + if check_unapplied: + return + from intel_owl.tasks import refresh_cache + + refresh_cache.apply_async( + queue=get_queue_name(settings.CONFIG_QUEUE), + MessageGroupId=str(uuid.uuid4()), + priority=3, + args=[PivotConfig.python_path], + ) @receiver(pre_save, sender=PivotConfig) @@ -24,14 +51,6 @@ def pre_save_pivot_config( return instance -@receiver(post_migrate, sender=PivotsManagerConfig) -def post_migrate_pivot( - sender, app_config, verbosity, interactive, stdout, using, plan, apps, **kwargs -): - if plan: - PivotConfig.delete_class_cache_keys() - - @receiver(m2m_changed, sender=PivotConfig.related_analyzer_configs.through) def m2m_changed_pivot_config_analyzer_config( sender, diff --git a/api_app/playbooks_manager/admin.py b/api_app/playbooks_manager/admin.py index 046cc18927..64f3a2dfd6 100644 --- a/api_app/playbooks_manager/admin.py +++ b/api_app/playbooks_manager/admin.py @@ -21,9 +21,12 @@ class PlaybookConfigAdminView(AbstractConfigAdminView, ModelWithOwnershipAdminVi "get_pivots", "get_visualizers", "scan_mode", + "starting", ) + ModelWithOwnershipAdminView.list_display list_filter = ( - AbstractConfigAdminView.list_filter + ModelWithOwnershipAdminView.list_filter + AbstractConfigAdminView.list_filter + + ("starting",) + + ModelWithOwnershipAdminView.list_filter ) @staticmethod diff --git a/api_app/playbooks_manager/migrations/0031_add_hfinger_analyzer_free_to_use.py b/api_app/playbooks_manager/migrations/0031_add_hfinger_analyzer_free_to_use.py new file mode 100644 index 0000000000..dfba5cbfe7 --- /dev/null +++ b/api_app/playbooks_manager/migrations/0031_add_hfinger_analyzer_free_to_use.py @@ -0,0 +1,34 @@ +# This file is a part of IntelOwl https://github.com/intelowlproject/IntelOwl +# See the file 'LICENSE' for copying permission. + + +from django.db import migrations + + +def migrate(apps, schema_editor): + playbook_config = apps.get_model("playbooks_manager", "PlaybookConfig") + AnalyzerConfig = apps.get_model("analyzers_manager", "AnalyzerConfig") + pc = playbook_config.objects.get(name="FREE_TO_USE_ANALYZERS") + pc.analyzers.add(AnalyzerConfig.objects.get(name="Hfinger").id) + pc.full_clean() + pc.save() + + +def reverse_migrate(apps, schema_editor): + playbook_config = apps.get_model("playbooks_manager", "PlaybookConfig") + AnalyzerConfig = apps.get_model("analyzers_manager", "AnalyzerConfig") + pc = playbook_config.objects.get(name="FREE_TO_USE_ANALYZERS") + pc.analyzers.remove(AnalyzerConfig.objects.get(name="Hfinger").id) + pc.full_clean() + pc.save() + + +class Migration(migrations.Migration): + dependencies = [ + ("playbooks_manager", "0030_add_tweetfeeds_to_free_analyzers"), + ("analyzers_manager", "0078_analyzer_config_hfinger"), + ] + + operations = [ + migrations.RunPython(migrate, reverse_migrate), + ] diff --git a/api_app/playbooks_manager/migrations/0032_delete_dns0_playbook_free_to_use_analyzers.py b/api_app/playbooks_manager/migrations/0032_delete_dns0_playbook_free_to_use_analyzers.py new file mode 100644 index 0000000000..7745a42b94 --- /dev/null +++ b/api_app/playbooks_manager/migrations/0032_delete_dns0_playbook_free_to_use_analyzers.py @@ -0,0 +1,45 @@ +# This file is a part of IntelOwl https://github.com/intelowlproject/IntelOwl +# See the file 'LICENSE' for copying permission. + + +from django.db import migrations + + +def migrate(apps, schema_editor): + playbook_config = apps.get_model("playbooks_manager", "PlaybookConfig") + AnalyzerConfig = apps.get_model("analyzers_manager", "AnalyzerConfig") + pc = playbook_config.objects.filter(name="FREE_TO_USE_ANALYZERS").first() + if pc: + for analyzer_config_name in ["DNS0_rrsets_name", "DNS0_names"]: + analyzer_config = AnalyzerConfig.objects.filter( + name=analyzer_config_name + ).first() + if analyzer_config: + pc.analyzers.remove(analyzer_config.id) + pc.full_clean() + pc.save() + + +def reverse_migrate(apps, schema_editor): + playbook_config = apps.get_model("playbooks_manager", "PlaybookConfig") + AnalyzerConfig = apps.get_model("analyzers_manager", "AnalyzerConfig") + pc = playbook_config.objects.filter(name="FREE_TO_USE_ANALYZERS").first() + if pc: + for analyzer_config_name in ["DNS0_rrsets_name", "DNS0_names"]: + analyzer_config = AnalyzerConfig.objects.filter( + name=analyzer_config_name + ).first() + if analyzer_config: + pc.analyzers.add(analyzer_config.id) + pc.full_clean() + pc.save() + + +class Migration(migrations.Migration): + dependencies = [ + ("playbooks_manager", "0031_add_hfinger_analyzer_free_to_use"), + ] + + operations = [ + migrations.RunPython(migrate, reverse_migrate), + ] diff --git a/api_app/playbooks_manager/migrations/0033_playbook_config_send_abuse_email.py b/api_app/playbooks_manager/migrations/0033_playbook_config_send_abuse_email.py new file mode 100644 index 0000000000..2decf60ebb --- /dev/null +++ b/api_app/playbooks_manager/migrations/0033_playbook_config_send_abuse_email.py @@ -0,0 +1,119 @@ +from django.db import migrations +from django.db.models.fields.related_descriptors import ( + ForwardManyToOneDescriptor, + ForwardOneToOneDescriptor, + ManyToManyDescriptor, +) + +plugin = { + "name": "Send_Abuse_Email", + "analyzers": [], + "connectors": ["AbuseSubmitter"], + "pivots": [], + "for_organization": False, + "description": "Playbook containing the AbuseSubmitter connector to send an email " + "to request to take down a malicious domain. " + "It is executed after the Abuse_IP playbook.", + "disabled": False, + "type": ["generic"], + "runtime_configuration": { + "pivots": {}, + "analyzers": {}, + "connectors": {}, + "visualizers": {}, + }, + "scan_mode": 1, + "scan_check_time": None, + "tlp": "AMBER", + "owner": None, + "tags": [], + "model": "playbooks_manager.PlaybookConfig", +} + +params = [] + +values = [] + + +def _get_real_obj(Model, field, value): + def _get_obj(Model, other_model, value): + if isinstance(value, dict): + real_vals = {} + for key, real_val in value.items(): + real_vals[key] = _get_real_obj(other_model, key, real_val) + value = other_model.objects.get_or_create(**real_vals)[0] + # it is just the primary key serialized + else: + if isinstance(value, int): + if Model.__name__ == "PluginConfig": + value = other_model.objects.get(name=plugin["name"]) + else: + value = other_model.objects.get(pk=value) + else: + value = other_model.objects.get(name=value) + return value + + if ( + type(getattr(Model, field)) + in [ForwardManyToOneDescriptor, ForwardOneToOneDescriptor] + and value + ): + other_model = getattr(Model, field).get_queryset().model + value = _get_obj(Model, other_model, value) + elif type(getattr(Model, field)) in [ManyToManyDescriptor] and value: + other_model = getattr(Model, field).rel.model + value = [_get_obj(Model, other_model, val) for val in value] + return value + + +def _create_object(Model, data): + mtm, no_mtm = {}, {} + for field, value in data.items(): + value = _get_real_obj(Model, field, value) + if type(getattr(Model, field)) is ManyToManyDescriptor: + mtm[field] = value + else: + no_mtm[field] = value + try: + o = Model.objects.get(**no_mtm) + except Model.DoesNotExist: + o = Model(**no_mtm) + o.full_clean() + o.save() + for field, value in mtm.items(): + attribute = getattr(o, field) + if value is not None: + attribute.set(value) + return False + return True + + +def migrate(apps, schema_editor): + Parameter = apps.get_model("api_app", "Parameter") + PluginConfig = apps.get_model("api_app", "PluginConfig") + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + if not Model.objects.filter(name=plugin["name"]).exists(): + exists = _create_object(Model, plugin) + if not exists: + for param in params: + _create_object(Parameter, param) + for value in values: + _create_object(PluginConfig, value) + + +def reverse_migrate(apps, schema_editor): + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + Model.objects.get(name=plugin["name"]).delete() + + +class Migration(migrations.Migration): + atomic = False + dependencies = [ + ("api_app", "0062_alter_parameter_python_module"), + ("playbooks_manager", "0032_delete_dns0_playbook_free_to_use_analyzers"), + ("connectors_manager", "0031_connector_config_abusesubmitter"), + ] + + operations = [migrations.RunPython(migrate, reverse_migrate)] diff --git a/api_app/playbooks_manager/migrations/0034_playbook_config_abuse_ip.py b/api_app/playbooks_manager/migrations/0034_playbook_config_abuse_ip.py new file mode 100644 index 0000000000..f3b96850b9 --- /dev/null +++ b/api_app/playbooks_manager/migrations/0034_playbook_config_abuse_ip.py @@ -0,0 +1,119 @@ +from django.db import migrations +from django.db.models.fields.related_descriptors import ( + ForwardManyToOneDescriptor, + ForwardOneToOneDescriptor, + ManyToManyDescriptor, +) + +plugin = { + "name": "Abuse_IP", + "analyzers": ["Abusix"], + "connectors": [], + "pivots": ["AbuseIpToSubmission"], + "for_organization": False, + "description": "Playbook containing the Abusix analyzer. " + "It is executed after the Takedown_Request playbook.", + "disabled": False, + "type": ["ip"], + "runtime_configuration": { + "pivots": {}, + "analyzers": {}, + "connectors": {}, + "visualizers": {}, + }, + "scan_mode": 1, + "scan_check_time": None, + "tlp": "AMBER", + "owner": None, + "tags": [], + "model": "playbooks_manager.PlaybookConfig", +} + +params = [] + +values = [] + + +def _get_real_obj(Model, field, value): + def _get_obj(Model, other_model, value): + if isinstance(value, dict): + real_vals = {} + for key, real_val in value.items(): + real_vals[key] = _get_real_obj(other_model, key, real_val) + value = other_model.objects.get_or_create(**real_vals)[0] + # it is just the primary key serialized + else: + if isinstance(value, int): + if Model.__name__ == "PluginConfig": + value = other_model.objects.get(name=plugin["name"]) + else: + value = other_model.objects.get(pk=value) + else: + value = other_model.objects.get(name=value) + return value + + if ( + type(getattr(Model, field)) + in [ForwardManyToOneDescriptor, ForwardOneToOneDescriptor] + and value + ): + other_model = getattr(Model, field).get_queryset().model + value = _get_obj(Model, other_model, value) + elif type(getattr(Model, field)) in [ManyToManyDescriptor] and value: + other_model = getattr(Model, field).rel.model + value = [_get_obj(Model, other_model, val) for val in value] + return value + + +def _create_object(Model, data): + mtm, no_mtm = {}, {} + for field, value in data.items(): + value = _get_real_obj(Model, field, value) + if type(getattr(Model, field)) is ManyToManyDescriptor: + mtm[field] = value + else: + no_mtm[field] = value + try: + o = Model.objects.get(**no_mtm) + except Model.DoesNotExist: + o = Model(**no_mtm) + o.full_clean() + o.save() + for field, value in mtm.items(): + attribute = getattr(o, field) + if value is not None: + attribute.set(value) + return False + return True + + +def migrate(apps, schema_editor): + Parameter = apps.get_model("api_app", "Parameter") + PluginConfig = apps.get_model("api_app", "PluginConfig") + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + if not Model.objects.filter(name=plugin["name"]).exists(): + exists = _create_object(Model, plugin) + if not exists: + for param in params: + _create_object(Parameter, param) + for value in values: + _create_object(PluginConfig, value) + + +def reverse_migrate(apps, schema_editor): + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + Model.objects.get(name=plugin["name"]).delete() + + +class Migration(migrations.Migration): + atomic = False + dependencies = [ + ("api_app", "0062_alter_parameter_python_module"), + ("playbooks_manager", "0033_playbook_config_send_abuse_email"), + ("analyzers_manager", "0081_adjust_abusix"), + ("pivots_manager", "0026_pivot_config_abuseiptosubmission"), + ] + + operations = [migrations.RunPython(migrate, reverse_migrate)] diff --git a/api_app/playbooks_manager/migrations/0035_playbook_config_takedown_request.py b/api_app/playbooks_manager/migrations/0035_playbook_config_takedown_request.py new file mode 100644 index 0000000000..7e5cd10229 --- /dev/null +++ b/api_app/playbooks_manager/migrations/0035_playbook_config_takedown_request.py @@ -0,0 +1,124 @@ +from django.db import migrations +from django.db.models.fields.related_descriptors import ( + ForwardManyToOneDescriptor, + ForwardOneToOneDescriptor, + ManyToManyDescriptor, +) + +plugin = { + "name": "Takedown_Request", + "analyzers": [ + "Classic_DNS", + "CloudFlare_DNS", + "DNS0_EU", + "Google_DNS", + "Quad9_DNS", + ], + "connectors": [], + "pivots": ["TakedownRequestToAbuseIp"], + "for_organization": False, + "description": "Start investigation to request to take down a malicious domain. " + "A mail will be sent to the domain's abuse contacts found.", + "disabled": False, + "type": ["domain"], + "runtime_configuration": { + "pivots": {}, + "analyzers": {}, + "connectors": {}, + "visualizers": {}, + }, + "scan_mode": 1, + "scan_check_time": None, + "tlp": "AMBER", + "owner": None, + "tags": [], + "model": "playbooks_manager.PlaybookConfig", +} + +params = [] + +values = [] + + +def _get_real_obj(Model, field, value): + def _get_obj(Model, other_model, value): + if isinstance(value, dict): + real_vals = {} + for key, real_val in value.items(): + real_vals[key] = _get_real_obj(other_model, key, real_val) + value = other_model.objects.get_or_create(**real_vals)[0] + # it is just the primary key serialized + else: + if isinstance(value, int): + if Model.__name__ == "PluginConfig": + value = other_model.objects.get(name=plugin["name"]) + else: + value = other_model.objects.get(pk=value) + else: + value = other_model.objects.get(name=value) + return value + + if ( + type(getattr(Model, field)) + in [ForwardManyToOneDescriptor, ForwardOneToOneDescriptor] + and value + ): + other_model = getattr(Model, field).get_queryset().model + value = _get_obj(Model, other_model, value) + elif type(getattr(Model, field)) in [ManyToManyDescriptor] and value: + other_model = getattr(Model, field).rel.model + value = [_get_obj(Model, other_model, val) for val in value] + return value + + +def _create_object(Model, data): + mtm, no_mtm = {}, {} + for field, value in data.items(): + value = _get_real_obj(Model, field, value) + if type(getattr(Model, field)) is ManyToManyDescriptor: + mtm[field] = value + else: + no_mtm[field] = value + try: + o = Model.objects.get(**no_mtm) + except Model.DoesNotExist: + o = Model(**no_mtm) + o.full_clean() + o.save() + for field, value in mtm.items(): + attribute = getattr(o, field) + if value is not None: + attribute.set(value) + return False + return True + + +def migrate(apps, schema_editor): + Parameter = apps.get_model("api_app", "Parameter") + PluginConfig = apps.get_model("api_app", "PluginConfig") + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + if not Model.objects.filter(name=plugin["name"]).exists(): + exists = _create_object(Model, plugin) + if not exists: + for param in params: + _create_object(Parameter, param) + for value in values: + _create_object(PluginConfig, value) + + +def reverse_migrate(apps, schema_editor): + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + Model.objects.get(name=plugin["name"]).delete() + + +class Migration(migrations.Migration): + atomic = False + dependencies = [ + ("api_app", "0062_alter_parameter_python_module"), + ("playbooks_manager", "0034_playbook_config_abuse_ip"), + ("pivots_manager", "0027_pivot_config_takedownrequesttoabuseip"), + ] + + operations = [migrations.RunPython(migrate, reverse_migrate)] diff --git a/api_app/playbooks_manager/migrations/0036_static_analyzer_remove_visualizer.py b/api_app/playbooks_manager/migrations/0036_static_analyzer_remove_visualizer.py new file mode 100644 index 0000000000..e923825799 --- /dev/null +++ b/api_app/playbooks_manager/migrations/0036_static_analyzer_remove_visualizer.py @@ -0,0 +1,40 @@ +# This file is a part of IntelOwl https://github.com/intelowlproject/IntelOwl +# See the file 'LICENSE' for copying permission. + + +from django.db import migrations + + +def migrate(apps, schema_editor): + playbook_config = apps.get_model("playbooks_manager", "PlaybookConfig") + visualizer_config = apps.get_model("visualizers_manager", "VisualizerConfig") + vc = visualizer_config.objects.filter(name="Yara").first() + if vc: + pc = playbook_config.objects.filter(name="Sample_Static_Analysis").first() + if pc: + vc.playbooks.remove(pc.id) + vc.full_clean() + vc.save() + + +def reverse_migrate(apps, schema_editor): + playbook_config = apps.get_model("playbooks_manager", "PlaybookConfig") + visualizer_config = apps.get_model("visualizers_manager", "VisualizerConfig") + vc = visualizer_config.objects.filter(name="Yara").first() + if vc: + pc = playbook_config.objects.filter(name="Sample_Static_Analysis").first() + if pc: + vc.playbooks.add(pc.id) + vc.full_clean() + vc.save() + + +class Migration(migrations.Migration): + dependencies = [ + ("playbooks_manager", "0035_playbook_config_takedown_request"), + ("visualizers_manager", "0037_4_change_primary_key"), + ] + + operations = [ + migrations.RunPython(migrate, reverse_migrate), + ] diff --git a/api_app/playbooks_manager/migrations/0037_playbookconfig_starting.py b/api_app/playbooks_manager/migrations/0037_playbookconfig_starting.py new file mode 100644 index 0000000000..843cd3db65 --- /dev/null +++ b/api_app/playbooks_manager/migrations/0037_playbookconfig_starting.py @@ -0,0 +1,20 @@ +# Generated by Django 4.2.11 on 2024-05-07 13:54 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("playbooks_manager", "0036_static_analyzer_remove_visualizer"), + ] + + operations = [ + migrations.AddField( + model_name="playbookconfig", + name="starting", + field=models.BooleanField( + default=True, + help_text="If False, the playbook can only be executed by pivots", + ), + ), + ] diff --git a/api_app/playbooks_manager/migrations/0038_playbooks_not_starting.py b/api_app/playbooks_manager/migrations/0038_playbooks_not_starting.py new file mode 100644 index 0000000000..4099087e6e --- /dev/null +++ b/api_app/playbooks_manager/migrations/0038_playbooks_not_starting.py @@ -0,0 +1,29 @@ +# Generated by Django 4.2.11 on 2024-05-07 13:54 + +from django.db import migrations + +playbooks_to_change = ["Abuse_IP"] + + +def migrate(apps, schema_editor): + PlaybookConfig = apps.get_model("playbooks_manager", "PlaybookConfig") + for pc in PlaybookConfig.objects.filter(name__in=playbooks_to_change): + pc.starting = False + pc.save() + + +def reverse_migrate(apps, schema_editor): + PlaybookConfig = apps.get_model("playbooks_manager", "PlaybookConfig") + for pc in PlaybookConfig.objects.filter(name__in=playbooks_to_change): + pc.starting = True + pc.save() + + +class Migration(migrations.Migration): + dependencies = [ + ("playbooks_manager", "0037_playbookconfig_starting"), + ] + + operations = [ + migrations.RunPython(migrate, reverse_migrate), + ] diff --git a/api_app/playbooks_manager/migrations/0039_alter_playbookconfig_scan_check_time_and_more.py b/api_app/playbooks_manager/migrations/0039_alter_playbookconfig_scan_check_time_and_more.py new file mode 100644 index 0000000000..fdd67c6e3e --- /dev/null +++ b/api_app/playbooks_manager/migrations/0039_alter_playbookconfig_scan_check_time_and_more.py @@ -0,0 +1,41 @@ +# Generated by Django 4.2.11 on 2024-05-09 12:55 + +import datetime + +from django.db import migrations, models + + +def migrate(apps, schema_editor): + PlaybookConfig = apps.get_model("playbooks_manager", "PlaybookConfig") + PlaybookConfig.objects.filter(starting=False).update( + scan_mode=1, scan_check_time=None + ) + + +class Migration(migrations.Migration): + dependencies = [ + ("playbooks_manager", "0038_playbooks_not_starting"), + ] + + operations = [ + migrations.RunPython(migrate, migrations.RunPython.noop), + migrations.AlterField( + model_name="playbookconfig", + name="scan_check_time", + field=models.DurationField( + blank=True, + default=datetime.timedelta(days=1), + help_text="Time range checked if the scan_mode is set to `check_previous_analysis`", + null=True, + ), + ), + migrations.AlterField( + model_name="playbookconfig", + name="scan_mode", + field=models.IntegerField( + choices=[(1, "Force New Analysis"), (2, "Check Previous Analysis")], + default=2, + help_text="If it's not a starting playbook, this must be set to `check_previous_analysis`", + ), + ), + ] diff --git a/api_app/playbooks_manager/migrations/0040_alter_domain_reputation_playbook.py b/api_app/playbooks_manager/migrations/0040_alter_domain_reputation_playbook.py new file mode 100644 index 0000000000..53b73bf948 --- /dev/null +++ b/api_app/playbooks_manager/migrations/0040_alter_domain_reputation_playbook.py @@ -0,0 +1,35 @@ +from django.db import migrations + + +def migrate(apps, schema_editor): + playbook_config = apps.get_model("playbooks_manager", "PlaybookConfig") + analyzer_config = apps.get_model("analyzers_manager", "AnalyzerConfig") + pc = playbook_config.objects.filter(name="Popular_URL_Reputation_Services").first() + if pc: + ac = analyzer_config.objects.filter(name="Tranco").first() + if ac: + pc.analyzers.add(ac) + pc.full_clean() + pc.save() + + +def reverse_migrate(apps, schema_editor): + playbook_config = apps.get_model("playbooks_manager", "PlaybookConfig") + analyzer_config = apps.get_model("analyzers_manager", "AnalyzerConfig") + pc = playbook_config.objects.filter(name="Popular_URL_Reputation_Services").first() + if pc: + ac = analyzer_config.objects.filter(name="Tranco").first() + if ac: + pc.analyzers.remove(ac) + pc.full_clean() + pc.save() + + +class Migration(migrations.Migration): + dependencies = [ + ("playbooks_manager", "0039_alter_playbookconfig_scan_check_time_and_more"), + ] + + operations = [ + migrations.RunPython(migrate, reverse_migrate), + ] diff --git a/api_app/playbooks_manager/migrations/0041_add_permhash_to_free_analyzers_and_static_analyzers.py b/api_app/playbooks_manager/migrations/0041_add_permhash_to_free_analyzers_and_static_analyzers.py new file mode 100644 index 0000000000..c6913d3777 --- /dev/null +++ b/api_app/playbooks_manager/migrations/0041_add_permhash_to_free_analyzers_and_static_analyzers.py @@ -0,0 +1,42 @@ +# This file is a part of IntelOwl https://github.com/intelowlproject/IntelOwl +# See the file 'LICENSE' for copying permission. + + +from django.db import migrations + + +def migrate(apps, schema_editor): + playbook_config = apps.get_model("playbooks_manager", "PlaybookConfig") + AnalyzerConfig = apps.get_model("analyzers_manager", "AnalyzerConfig") + pc1 = playbook_config.objects.get(name="FREE_TO_USE_ANALYZERS") + pc1.analyzers.add(AnalyzerConfig.objects.get(name="Permhash").id) + pc1.full_clean() + pc1.save() + pc2 = playbook_config.objects.get(name="Sample_Static_Analysis") + pc2.analyzers.add(AnalyzerConfig.objects.get(name="Permhash").id) + pc2.full_clean() + pc2.save() + + +def reverse_migrate(apps, schema_editor): + playbook_config = apps.get_model("playbooks_manager", "PlaybookConfig") + AnalyzerConfig = apps.get_model("analyzers_manager", "AnalyzerConfig") + pc1 = playbook_config.objects.get(name="FREE_TO_USE_ANALYZERS") + pc1.analyzers.remove(AnalyzerConfig.objects.get(name="Permhash").id) + pc1.full_clean() + pc1.save() + pc2 = playbook_config.objects.get(name="Sample_Static_Analysis") + pc2.analyzers.remove(AnalyzerConfig.objects.get(name="Permhash").id) + pc2.full_clean() + pc2.save() + + +class Migration(migrations.Migration): + dependencies = [ + ("playbooks_manager", "0040_alter_domain_reputation_playbook"), + ("analyzers_manager", "0085_analyzer_config_permhash"), + ] + + operations = [ + migrations.RunPython(migrate, reverse_migrate), + ] diff --git a/api_app/playbooks_manager/migrations/0042_add_blint_to_free_analyzers_and_static_analyzers.py b/api_app/playbooks_manager/migrations/0042_add_blint_to_free_analyzers_and_static_analyzers.py new file mode 100644 index 0000000000..450a9233bc --- /dev/null +++ b/api_app/playbooks_manager/migrations/0042_add_blint_to_free_analyzers_and_static_analyzers.py @@ -0,0 +1,49 @@ +# This file is a part of IntelOwl https://github.com/intelowlproject/IntelOwl +# See the file 'LICENSE' for copying permission. + + +from django.db import migrations + + +def migrate(apps, schema_editor): + playbook_config = apps.get_model("playbooks_manager", "PlaybookConfig") + AnalyzerConfig = apps.get_model("analyzers_manager", "AnalyzerConfig") + + pc = playbook_config.objects.get(name="FREE_TO_USE_ANALYZERS") + pc.analyzers.add(AnalyzerConfig.objects.get(name="Blint").id) + pc.full_clean() + pc.save() + + pc = playbook_config.objects.get(name="Sample_Static_Analysis") + pc.analyzers.add(AnalyzerConfig.objects.get(name="Blint").id) + pc.full_clean() + pc.save() + + +def reverse_migrate(apps, schema_editor): + playbook_config = apps.get_model("playbooks_manager", "PlaybookConfig") + AnalyzerConfig = apps.get_model("analyzers_manager", "AnalyzerConfig") + + pc = playbook_config.objects.get(name="FREE_TO_USE_ANALYZERS") + pc.analyzers.remove(AnalyzerConfig.objects.get(name="Blint").id) + pc.full_clean() + pc.save() + + pc = playbook_config.objects.get(name="Sample_Static_Analysis") + pc.analyzers.remove(AnalyzerConfig.objects.get(name="Blint").id) + pc.full_clean() + pc.save() + + +class Migration(migrations.Migration): + dependencies = [ + ( + "playbooks_manager", + "0041_add_permhash_to_free_analyzers_and_static_analyzers", + ), + ("analyzers_manager", "0086_analyzer_config_blint"), + ] + + operations = [ + migrations.RunPython(migrate, reverse_migrate), + ] diff --git a/api_app/playbooks_manager/models.py b/api_app/playbooks_manager/models.py index 0788e5b514..f31b56502e 100644 --- a/api_app/playbooks_manager/models.py +++ b/api_app/playbooks_manager/models.py @@ -42,15 +42,28 @@ class PlaybookConfig(AbstractConfig, OwnershipAbstractModel): null=False, blank=False, default=ScanMode.CHECK_PREVIOUS_ANALYSIS.value, + help_text=( + "If it's not a starting playbook," + " this must be set to `check_previous_analysis`" + ), ) scan_check_time = models.DurationField( - null=True, blank=True, default=datetime.timedelta(hours=24) + null=True, + blank=True, + default=datetime.timedelta(hours=24), + help_text=( + "Time range checked if the scan_mode is set to `check_previous_analysis`" + ), ) tags = models.ManyToManyField(Tag, related_name="playbooks", blank=True) tlp = models.CharField(max_length=8, choices=TLP.choices) + starting = models.BooleanField( + default=True, help_text="If False, the playbook can only be executed by pivots" + ) + class Meta: ordering = ["name", "disabled"] indexes = OwnershipAbstractModel.Meta.indexes @@ -88,9 +101,16 @@ def clean_scan(self): " and not have check_time set" ) + def clean_starting(self): + if not self.starting and self.scan_mode != ScanMode.FORCE_NEW_ANALYSIS.value: + raise ValidationError( + "Not starting playbooks must always force new analysis" + ) + def clean(self) -> None: super().clean() self.clean_scan() + self.clean_starting() def is_sample(self) -> bool: return AllTypes.FILE.value in self.type diff --git a/api_app/playbooks_manager/signals.py b/api_app/playbooks_manager/signals.py index d1b2300ddf..8cf87a04c4 100644 --- a/api_app/playbooks_manager/signals.py +++ b/api_app/playbooks_manager/signals.py @@ -1,11 +1,38 @@ +import logging +import uuid from typing import Type +from django.conf import settings from django.core.exceptions import ValidationError from django.db.models.signals import m2m_changed from django.dispatch import receiver from api_app.pivots_manager.models import PivotConfig from api_app.playbooks_manager.models import PlaybookConfig +from api_app.signals import migrate_finished +from intel_owl.celery import get_queue_name + +logger = logging.getLogger(__name__) + + +@receiver(migrate_finished) +def post_migrate_playbooks_manager( + sender, + *args, + check_unapplied: bool = False, + **kwargs, +): + logger.info(f"Post migrate {args} {kwargs}") + if check_unapplied: + return + from intel_owl.tasks import refresh_cache + + refresh_cache.apply_async( + queue=get_queue_name(settings.CONFIG_QUEUE), + MessageGroupId=str(uuid.uuid4()), + priority=3, + args=[PlaybookConfig.python_path], + ) @receiver(m2m_changed, sender=PlaybookConfig.analyzers.through) diff --git a/api_app/playbooks_manager/views.py b/api_app/playbooks_manager/views.py index 4285296ec5..686fb26a5f 100644 --- a/api_app/playbooks_manager/views.py +++ b/api_app/playbooks_manager/views.py @@ -54,7 +54,8 @@ def analyze_multiple_observables(self, request): data=request.data, many=True, context={"request": request} ) oas.is_valid(raise_exception=True) - jobs = oas.save(send_task=True) + parent_job = oas.validated_data[0].get("parent_job", None) + jobs = oas.save(send_task=True, parent=parent_job) return Response( JobResponseSerializer(jobs, many=True).data, status=status.HTTP_200_OK, diff --git a/api_app/queryset.py b/api_app/queryset.py index 67a0091cb0..d51d57131b 100644 --- a/api_app/queryset.py +++ b/api_app/queryset.py @@ -1,6 +1,5 @@ import datetime import json -import logging import uuid from typing import TYPE_CHECKING, Generator, Type @@ -13,9 +12,12 @@ from api_app.models import PythonConfig from api_app.serializers import AbstractBIInterface +import logging + from celery.canvas import Signature from django.db import models from django.db.models import ( + BooleanField, Case, Exists, F, @@ -37,6 +39,8 @@ from certego_saas.apps.organization.membership import Membership from certego_saas.apps.user.models import User +logger = logging.getLogger(__name__) + class SendToBiQuerySet(models.QuerySet): @classmethod @@ -53,10 +57,14 @@ def _create_index_template(): settings.ELASTICSEARCH_CLIENT.indices.put_template( name=settings.ELASTICSEARCH_BI_INDEX, body=body ) + logger.info( + f"created template for Elastic named {settings.ELASTICSEARCH_BI_INDEX}" + ) def send_to_elastic_as_bi(self, max_timeout: int = 60) -> bool: from elasticsearch.helpers import bulk + logger.info("BI start") self._create_index_template() BULK_MAX_SIZE = 1000 found_errors = False @@ -73,13 +81,13 @@ def send_to_elastic_as_bi(self, max_timeout: int = 60) -> bool: request_timeout=max_timeout, ) if errors: - logging.error( + logger.error( f"Errors on sending to elastic: {errors}." " We are not marking objects as sent." ) found_errors |= errors else: - logging.info("BI sent") + logger.info("BI sent") self.model.objects.filter( pk__in=objects.values_list("pk", flat=True) ).update(sent_to_bi=True) @@ -262,7 +270,9 @@ def _alias_owner_value_for_user( return self.alias( owner_value=Subquery( PluginConfig.objects.filter( - parameter__pk=OuterRef("pk"), **{config.snake_case_name: config.pk} + parameter__pk=OuterRef("pk"), + **{config.snake_case_name: config.pk}, + for_organization=False, ) .visible_for_user_owned(user) .values("value")[:1], @@ -302,11 +312,13 @@ def _alias_default_value(self, config: "PythonConfig") -> "ParameterQuerySet": def _alias_runtime_config(self, runtime_config=None): if not runtime_config: runtime_config = {} - return self.alias( - runtime_value=Value( - runtime_config.get(F("name"), None), - output_field=JSONField(), - ) + # we are creating conditions for when runtime config should be used + whens = [ + When(name=para, then=Value(value, output_field=JSONField())) + for para, value in runtime_config.items() + ] + return self.annotate( + runtime_value=Case(*whens, default=None, output_field=JSONField()) ) def _alias_for_test(self): @@ -376,11 +388,12 @@ def annotate_value_for_user( is_from_org=Case( When( runtime_value__isnull=True, - org_value__isnull=True, - owner_value__isnull=False, + owner_value__isnull=True, + org_value__isnull=False, then=Value(True), ), default=Value(False), + output_field=BooleanField(), ), ) ) @@ -567,6 +580,7 @@ def get_signatures(self, job) -> Generator[Signature, None, None]: task_id=task_id, immutable=True, MessageGroupId=str(task_id), + priority=job.priority, ) diff --git a/api_app/serializers/__init__.py b/api_app/serializers/__init__.py index 44d8a6cef9..8c7419bb01 100644 --- a/api_app/serializers/__init__.py +++ b/api_app/serializers/__init__.py @@ -51,7 +51,6 @@ def get_environment(instance): def to_elastic_dict(data): return { "_source": data, - "_type": "_doc", "_index": settings.ELASTICSEARCH_BI_INDEX + "-" + now().strftime("%Y.%m"), "_op_type": "index", } diff --git a/api_app/serializers/job.py b/api_app/serializers/job.py index 5a39f3f25b..e82f50d87c 100644 --- a/api_app/serializers/job.py +++ b/api_app/serializers/job.py @@ -25,6 +25,7 @@ from api_app.helpers import calculate_md5, gen_random_colorhex from api_app.investigations_manager.models import Investigation from api_app.models import Comment, Job, Tag +from api_app.pivots_manager.models import PivotMap from api_app.playbooks_manager.models import PlaybookConfig from api_app.serializers import AbstractBIInterface from api_app.serializers.report import AbstractReportSerializerInterface @@ -97,6 +98,7 @@ class Meta: "scan_mode", "scan_check_time", "investigation", + "parent_job", ) md5 = rfs.HiddenField(default=None) @@ -116,6 +118,7 @@ class Meta: investigation = rfs.PrimaryKeyRelatedField( queryset=Investigation.objects.all(), many=False, required=False, default=None ) + parent_job = rfs.PrimaryKeyRelatedField(queryset=Job.objects.all(), required=False) connectors_requested = rfs.SlugRelatedField( slug_field="name", queryset=ConnectorConfig.objects.all(), @@ -226,8 +229,21 @@ def validate(self, attrs: dict) -> dict: playbook.tags.all() ) - attrs["analyzers_to_execute"] = self.set_analyzers_to_execute(**attrs) - attrs["connectors_to_execute"] = self.set_connectors_to_execute(**attrs) + analyzers_to_execute = attrs[ + "analyzers_to_execute" + ] = self.set_analyzers_to_execute(**attrs) + connectors_to_execute = attrs[ + "connectors_to_execute" + ] = self.set_connectors_to_execute(**attrs) + if not analyzers_to_execute and not connectors_to_execute: + warnings = "\n".join(self.filter_warnings) + raise ValidationError( + { + "detail": "No Analyzers and Connectors " + f"can be run after filtering:\n{warnings}" + } + ) + attrs["visualizers_to_execute"] = self.set_visualizers_to_execute(**attrs) attrs["warnings"] = list(self.filter_warnings) attrs["tags"] = attrs.pop("tags_labels", []) @@ -256,11 +272,6 @@ def set_analyzers_to_execute( self, analyzers_requested: List[AnalyzerConfig], tlp: str, **kwargs ) -> List[AnalyzerConfig]: analyzers_executed = list(self.plugins_to_execute(tlp, analyzers_requested)) - if not analyzers_executed: - warnings = "\n".join(self.filter_warnings) - raise ValidationError( - {"detail": f"No Analyzers can be run after filtering:\n{warnings}"} - ) return analyzers_executed def plugins_to_execute( @@ -332,6 +343,7 @@ def check_previous_jobs(self, validated_data: Dict) -> Job: def create(self, validated_data: Dict) -> Job: warnings = validated_data.pop("warnings") send_task = validated_data.pop("send_task", False) + parent_job = validated_data.pop("parent_job", None) if validated_data["scan_mode"] == ScanMode.CHECK_PREVIOUS_ANALYSIS.value: try: return self.check_previous_jobs(validated_data) @@ -342,6 +354,10 @@ def create(self, validated_data: Dict) -> Job: job.warnings = warnings job.save() logger.info(f"Job {job.pk} created") + if parent_job: + PivotMap.objects.create( + starting_job=validated_data["parent"], ending_job=job, pivot_config=None + ) if send_task: from intel_owl.tasks import job_pipeline @@ -350,6 +366,7 @@ def create(self, validated_data: Dict) -> Job: args=[job.pk], queue=get_queue_name(settings.DEFAULT_QUEUE), MessageGroupId=str(uuid.uuid4()), + priority=job.priority, ) return job @@ -438,6 +455,7 @@ class Meta: "pivot_config", "playbook", "status", + "received_request_time", ] playbook = rfs.SlugRelatedField( @@ -472,8 +490,6 @@ class Meta: "path", "numchild", "sent_to_bi", - "scan_mode", - "scan_check_time", ) comments = CommentSerializer(many=True, read_only=True) @@ -497,12 +513,18 @@ class Meta: ) playbook_requested = rfs.SlugRelatedField(read_only=True, slug_field="name") playbook_to_execute = rfs.SlugRelatedField(read_only=True, slug_field="name") + investigation = rfs.SerializerMethodField(read_only=True, default=None) permissions = rfs.SerializerMethodField() def get_pivots_to_execute(self, obj: Job): # skipcq: PYL-R0201 # this cast is required or serializer doesn't work with websocket return list(obj.pivots_to_execute.all().values_list("name", flat=True)) + def get_investigation(self, instance: Job): # skipcq: PYL-R0201 + if root_investigation := instance.get_root().investigation: + return root_investigation.pk + return instance.investigation + def get_fields(self): # this method override is required for a cyclic import from api_app.analyzers_manager.serializers import AnalyzerReportSerializer @@ -557,12 +579,25 @@ def save(self, parent: Job = None, **kwargs): if parent: # the parent has already an investigation # so we don't need to do anything because everything is already connected - if parent.investigation: + root = parent.get_root() + if root.investigation: + root.investigation.status = root.investigation.Status.RUNNING.value + root.investigation.save() return jobs # if we have a parent, it means we are pivoting from one job to another else: + if parent.playbook_to_execute: + investigation_name = ( + f"{parent.playbook_to_execute.name}:" + f" {parent.analyzed_object_name}" + ) + else: + investigation_name = ( + f"Pivot investigation: {parent.analyzed_object_name}" + ) + investigation = Investigation.objects.create( - name="Pivot investigation", + name=investigation_name, owner=self.context["request"].user, ) investigation.jobs.add(parent) @@ -579,7 +614,8 @@ def save(self, parent: Job = None, **kwargs): # we are in the multiple input case elif len(jobs) > 1: investigation = Investigation.objects.create( - name="Custom investigation", owner=self.context["request"].user + name=f"Custom investigation: {len(jobs)} jobs", + owner=self.context["request"].user, ) for job in jobs: job: Job @@ -589,7 +625,6 @@ def save(self, parent: Job = None, **kwargs): else: return jobs investigation: Investigation - investigation.name = investigation.name + f" #{investigation.id}" investigation.status = investigation.Status.RUNNING.value investigation.for_organization = True investigation.save() @@ -916,10 +951,7 @@ class JobResponseSerializer(rfs.ModelSerializer): source="playbook_to_execute", slug_field="name", ) - investigation = rfs.SlugRelatedField( - read_only=True, - slug_field="pk", - ) + investigation = rfs.SerializerMethodField(read_only=True, default=None) class Meta: model = Job @@ -934,6 +966,11 @@ class Meta: extra_kwargs = {"warnings": {"read_only": True, "required": False}} list_serializer_class = JobEnvelopeSerializer + def get_investigation(self, instance: Job): # skipcq: PYL-R0201 + if root_investigation := instance.get_root().investigation: + return root_investigation.pk + return instance.investigation + def to_representation(self, instance: Job): result = super().to_representation(instance) result["status"] = self.STATUS_ACCEPTED diff --git a/api_app/signals.py b/api_app/signals.py index 9750bc46d1..6b5a5b2062 100644 --- a/api_app/signals.py +++ b/api_app/signals.py @@ -7,7 +7,6 @@ from django.conf import settings from django.db import models from django.dispatch import receiver -from django_celery_beat.apps import BeatConfig from api_app.decorators import prevent_signal_recursion from api_app.helpers import calculate_md5 @@ -55,36 +54,33 @@ def post_delete_job(sender, instance: Job, **kwargs): instance.investigation.delete() -@receiver(models.signals.post_migrate) -def post_migrate_beat( +@receiver(migrate_finished) +def post_migrate_api_app( sender, - app_config, - verbosity, - interactive, - stdout=None, - using=None, - plan=None, - apps=None, + *args, + check_unapplied: bool = False, **kwargs, ): - if isinstance(sender, BeatConfig): - from django_celery_beat.models import PeriodicTask - - from intel_owl.tasks import update - - for module in PythonModule.objects.filter(health_check_schedule__isnull=False): - for config in module.configs.filter(health_check_task__isnull=True): - config.generate_health_check_periodic_task() - for module in PythonModule.objects.filter( - update_schedule__isnull=False, update_task__isnull=True - ): - module.generate_update_periodic_task() - - for task in PeriodicTask.objects.filter( - enabled=True, task=f"{update.__module__}.{update.__name__}" - ): - task.enabled &= settings.REPO_DOWNLOADER_ENABLED - task.save() + logger.info(f"Post migrate {args} {kwargs}") + if check_unapplied: + return + from django_celery_beat.models import PeriodicTask + + from intel_owl.tasks import update + + for module in PythonModule.objects.filter(health_check_schedule__isnull=False): + for config in module.configs.filter(health_check_task__isnull=True): + config.generate_health_check_periodic_task() + for module in PythonModule.objects.filter( + update_schedule__isnull=False, update_task__isnull=True + ): + module.generate_update_periodic_task() + + for task in PeriodicTask.objects.filter( + enabled=True, task=f"{update.__module__}.{update.__name__}" + ): + task.enabled &= settings.REPO_DOWNLOADER_ENABLED + task.save() @receiver(models.signals.post_save, sender=PluginConfig) diff --git a/api_app/views.py b/api_app/views.py index 5239630f69..dd961d952b 100644 --- a/api_app/views.py +++ b/api_app/views.py @@ -232,7 +232,8 @@ def analyze_multiple_observables(request): data=request.data, many=True, context={"request": request} ) oas.is_valid(raise_exception=True) - jobs = oas.save(send_task=True) + parent_job = oas.validated_data[0].get("parent_job", None) + jobs = oas.save(send_task=True, parent=parent_job) jrs = JobResponseSerializer(jobs, many=True).data logger.info(f"finished analyze_multiple_observables from user {request.user}") return Response( @@ -749,6 +750,7 @@ def perform_retry(report: AbstractReport): queue=report.config.queue, immutable=True, MessageGroupId=str(uuid.uuid4()), + priority=report.job.priority, ) runner() @@ -920,4 +922,8 @@ def pull(self, request, name=None): {"detail": "Unexpected exception raised. Check the code."} ) else: + if update_status is None: + raise ValidationError( + {"detail": "This Plugin has no Update implemented"} + ) return Response(data={"status": update_status}, status=status.HTTP_200_OK) diff --git a/api_app/visualizers_manager/signals.py b/api_app/visualizers_manager/signals.py index cd6128db52..ada644fd4d 100644 --- a/api_app/visualizers_manager/signals.py +++ b/api_app/visualizers_manager/signals.py @@ -1,13 +1,31 @@ -from django.db.models.signals import post_migrate +import logging +import uuid + +from django.conf import settings from django.dispatch import receiver -from api_app.visualizers_manager.apps import VisualizersManagerConfig +from api_app.signals import migrate_finished from api_app.visualizers_manager.models import VisualizerConfig +from intel_owl.celery import get_queue_name + +logger = logging.getLogger(__name__) -@receiver(post_migrate, sender=VisualizersManagerConfig) -def post_migrate_visualizer( - sender, app_config, verbosity, interactive, stdout, using, plan, apps, **kwargs +@receiver(migrate_finished) +def post_migrate_visualizers_manager( + sender, + *args, + check_unapplied: bool = False, + **kwargs, ): - if plan: - VisualizerConfig.delete_class_cache_keys() + logger.info(f"Post migrate {args} {kwargs}") + if check_unapplied: + return + from intel_owl.tasks import refresh_cache + + refresh_cache.apply_async( + queue=get_queue_name(settings.CONFIG_QUEUE), + MessageGroupId=str(uuid.uuid4()), + priority=3, + args=[VisualizerConfig.python_path], + ) diff --git a/api_app/visualizers_manager/visualizers/domain_reputation_services.py b/api_app/visualizers_manager/visualizers/domain_reputation_services.py index f87b644628..74521a28bc 100644 --- a/api_app/visualizers_manager/visualizers/domain_reputation_services.py +++ b/api_app/visualizers_manager/visualizers/domain_reputation_services.py @@ -15,6 +15,10 @@ class DomainReputationServices(Visualizer): + @classmethod + def update(cls) -> bool: + pass + @visualizable_error_handler_with_params("VirusTotal") def _vt3(self): try: @@ -23,6 +27,16 @@ def _vt3(self): ) except AnalyzerReport.DoesNotExist: logger.warning("VirusTotal_v3_Get_Observable report does not exist") + virustotal_report = self.Title( + self.Base( + value="VirusTotal", + link="", + icon=VisualizableIcon.VIRUSTotal, + ), + self.Base(value="Engine Hits: Unknown"), + disable=True, + ) + return virustotal_report else: hits = ( analyzer_report.report.get("data", {}) @@ -33,7 +47,7 @@ def _vt3(self): virustotal_report = self.Title( self.Base( value="VirusTotal", - link=analyzer_report.report["link"], + link=analyzer_report.report.get("link", ""), icon=VisualizableIcon.VIRUSTotal, ), self.Base(value=f"Engine Hits: {hits}"), @@ -84,13 +98,37 @@ def _threatfox(self): malware_printable = data[0].get("malware_printable", "") threatfox_report = self.Title( self.Base( - value="ThreatFox", link=analyzer_report.report.get("link", "") + value="ThreatFox", + link=analyzer_report.report.get("link", ""), + icon=VisualizableIcon.URLHAUS, ), self.Base(value="" if disabled else f"found {malware_printable}"), disable=disabled, ) return threatfox_report + @visualizable_error_handler_with_params("Tranco") + def _tranco(self): + try: + analyzer_report = self.analyzer_reports().get(config__name="Tranco") + except AnalyzerReport.DoesNotExist: + logger.warning("Tranco report does not exist") + else: + ranks = analyzer_report.report.get("ranks", []) + disabled = analyzer_report.status != ReportStatus.SUCCESS or not ranks + rank = "" + if ranks and isinstance(ranks, list): + rank = ranks[0].get("rank", "") + tranco_report = self.Title( + self.Base( + value="Tranco Rank", + link="https://tranco-list.eu/", + ), + self.Base(value="" if disabled else rank), + disable=disabled, + ) + return tranco_report + @visualizable_error_handler_with_params("Phishtank") def _phishtank(self): try: @@ -208,6 +246,8 @@ def run(self) -> List[Dict]: first_level_elements.append(self._threatfox()) + first_level_elements.append(self._tranco()) + second_level_elements.append(self._phishtank()) second_level_elements.append(self._phishing_army()) diff --git a/api_app/visualizers_manager/visualizers/yara.py b/api_app/visualizers_manager/visualizers/yara.py index b1513684ef..89246b8006 100644 --- a/api_app/visualizers_manager/visualizers/yara.py +++ b/api_app/visualizers_manager/visualizers/yara.py @@ -15,6 +15,10 @@ class Yara(Visualizer): + @classmethod + def update(cls) -> bool: + pass + @visualizable_error_handler_with_params("Analyzer") def _yara_analyzer(self): return self.Title( @@ -57,7 +61,6 @@ def run(self) -> List[Dict]: ] page1 = self.Page(name="Yara first page") h1 = self.HList(value=[self._yara_analyzer()]) - logger.debug(h1.to_dict()) page1.add_level( self.Level(position=1, size=self.LevelSize.S_3, horizontal_list=h1) ) @@ -67,17 +70,11 @@ def run(self) -> List[Dict]: self._yara_signatures(signatures), ] ) - logger.debug(h2.to_dict()) - page2 = self.Page(name="Yara second page") - page2.add_level( - self.Level(position=1, size=self.LevelSize.S_3, horizontal_list=h1) - ) - page2.add_level( + page1.add_level( self.Level(position=2, size=self.LevelSize.S_5, horizontal_list=h2) ) logger.debug(page1) - logger.debug(page2) - return [page1.to_dict(), page2.to_dict()] + return [page1.to_dict()] @classmethod def _monkeypatch(cls): diff --git a/authentication/admin.py b/authentication/admin.py index deb8bd84f8..ebd4d09cb5 100644 --- a/authentication/admin.py +++ b/authentication/admin.py @@ -120,7 +120,10 @@ class UserProfileAdmin(admin.ModelAdmin): "company_name", "company_role", "discover_from", + "task_priority", + "is_robot", ) + list_filter = ["task_priority", "is_robot"] @admin.display(boolean=True) def user_is_active(self, obj: UserProfile) -> bool: diff --git a/authentication/apps.py b/authentication/apps.py index b9549bb9a2..7917efeac0 100644 --- a/authentication/apps.py +++ b/authentication/apps.py @@ -6,3 +6,7 @@ class ApiAppAuthConfig(AppConfig): name = "authentication" + + @staticmethod + def ready() -> None: + from . import signals # noqa diff --git a/authentication/migrations/0003_userprofile_is_robot_userprofile_task_priority_and_more.py b/authentication/migrations/0003_userprofile_is_robot_userprofile_task_priority_and_more.py new file mode 100644 index 0000000000..a424fa4fb2 --- /dev/null +++ b/authentication/migrations/0003_userprofile_is_robot_userprofile_task_priority_and_more.py @@ -0,0 +1,41 @@ +# Generated by Django 4.2.11 on 2024-04-04 08:16 + +import django.core.validators +import django.db.models.deletion +from django.conf import settings +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ("authentication", "0002_migrate_from_durin"), + ] + + operations = [ + migrations.AddField( + model_name="userprofile", + name="is_robot", + field=models.BooleanField(default=False), + ), + migrations.AddField( + model_name="userprofile", + name="task_priority", + field=models.IntegerField( + default=10, + validators=[ + django.core.validators.MaxValueValidator(10), + django.core.validators.MinValueValidator(1), + ], + ), + ), + migrations.AlterField( + model_name="userprofile", + name="user", + field=models.OneToOneField( + on_delete=django.db.models.deletion.CASCADE, + related_name="profile", + to=settings.AUTH_USER_MODEL, + ), + ), + ] diff --git a/authentication/migrations/0004_alter_userprofile_company_name_and_more.py b/authentication/migrations/0004_alter_userprofile_company_name_and_more.py new file mode 100644 index 0000000000..8730f54a43 --- /dev/null +++ b/authentication/migrations/0004_alter_userprofile_company_name_and_more.py @@ -0,0 +1,34 @@ +# Generated by Django 4.2.11 on 2024-04-04 08:27 + +import django.core.validators +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ( + "authentication", + "0003_userprofile_is_robot_userprofile_task_priority_and_more", + ), + ] + + operations = [ + migrations.AlterField( + model_name="userprofile", + name="company_name", + field=models.CharField( + max_length=32, + null=True, + validators=[django.core.validators.MinLengthValidator(3)], + ), + ), + migrations.AlterField( + model_name="userprofile", + name="company_role", + field=models.CharField( + max_length=32, + null=True, + validators=[django.core.validators.MinLengthValidator(3)], + ), + ), + ] diff --git a/authentication/migrations/0005_create_profiles.py b/authentication/migrations/0005_create_profiles.py new file mode 100644 index 0000000000..83b3585e49 --- /dev/null +++ b/authentication/migrations/0005_create_profiles.py @@ -0,0 +1,34 @@ +# Generated by Django 4.2.11 on 2024-04-04 07:46 + +from django.conf import settings +from django.db import migrations + + +def migrate(apps, schema_editor): + User = apps.get_model(*settings.AUTH_USER_MODEL.split(".")) + Profile = apps.get_model("authentication", "UserProfile") + for user in User.objects.all(): + is_robot = user.username.endswith("Ingestor") + if not hasattr(user, "profile") or not user.profile: + profile = Profile( + user=user, task_priority=7 if is_robot else 10, is_robot=is_robot + ) + else: + profile = user.profile + profile.task_priority=7 if is_robot else 10 + profile.is_robot=is_robot + profile.save() + + +def reverse_migrate(apps, schema_editor): + Profile = apps.get_model("authentication", "UserProfile") + Profile.objects.all().delete() + + +class Migration(migrations.Migration): + dependencies = [ + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ("authentication", "0004_alter_userprofile_company_name_and_more"), + ] + + operations = [migrations.RunPython(migrate, reverse_migrate)] diff --git a/authentication/migrations/0006_alter_userprofile_company_name_and_more.py b/authentication/migrations/0006_alter_userprofile_company_name_and_more.py new file mode 100644 index 0000000000..f4cdf6e811 --- /dev/null +++ b/authentication/migrations/0006_alter_userprofile_company_name_and_more.py @@ -0,0 +1,24 @@ +# Generated by Django 4.2.11 on 2024-05-07 13:54 + +import django.core.validators +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('authentication', '0005_create_profiles'), + ] + + operations = [ + migrations.AlterField( + model_name='userprofile', + name='company_name', + field=models.CharField(blank=True, default='', max_length=32, validators=[django.core.validators.MinLengthValidator(3)]), + ), + migrations.AlterField( + model_name='userprofile', + name='company_role', + field=models.CharField(blank=True, default='', max_length=32, validators=[django.core.validators.MinLengthValidator(3)]), + ), + ] diff --git a/authentication/models.py b/authentication/models.py index a73db5fe5c..0089978388 100644 --- a/authentication/models.py +++ b/authentication/models.py @@ -2,7 +2,11 @@ # See the file 'LICENSE' for copying permission. from django.conf import settings -from django.core.validators import MinLengthValidator +from django.core.validators import ( + MaxValueValidator, + MinLengthValidator, + MinValueValidator, +) from django.db import models __all__ = [ @@ -24,17 +28,21 @@ class DiscoverFromChoices(models.TextChoices): class UserProfile(models.Model): - # contants + # constants DiscoverFromChoices = DiscoverFromChoices # fields user = models.OneToOneField( settings.AUTH_USER_MODEL, on_delete=models.CASCADE, - related_name="user_profile", + related_name="profile", + ) + company_name = models.CharField( + max_length=32, validators=[MinLengthValidator(3)], default="", blank=True + ) + company_role = models.CharField( + max_length=32, validators=[MinLengthValidator(3)], default="", blank=True ) - company_name = models.CharField(max_length=32, validators=[MinLengthValidator(3)]) - company_role = models.CharField(max_length=32, validators=[MinLengthValidator(3)]) twitter_handle = models.CharField( max_length=16, default="", blank=True, validators=[MinLengthValidator(3)] ) @@ -43,6 +51,10 @@ class UserProfile(models.Model): choices=DiscoverFromChoices.choices, default=DiscoverFromChoices.OTHER, ) + task_priority = models.IntegerField( + default=10, validators=[MaxValueValidator(10), MinValueValidator(1)] + ) + is_robot = models.BooleanField(default=False) # meta class Meta: diff --git a/authentication/serializers.py b/authentication/serializers.py index 815d490f83..40d37a18eb 100644 --- a/authentication/serializers.py +++ b/authentication/serializers.py @@ -134,9 +134,11 @@ def create(self, validated_data): try: user = super().create(validated_data) - # save profile object only if user object was actually saved + # update profile object only if user object was actually saved if getattr(user, "pk", None): - self._profile_serializer.save(user=user) + self._profile_serializer.update( + user.profile, self._profile_serializer.data + ) user.refresh_from_db() except DatabaseError: transaction.rollback() diff --git a/authentication/signals.py b/authentication/signals.py new file mode 100644 index 0000000000..c637ee92e6 --- /dev/null +++ b/authentication/signals.py @@ -0,0 +1,11 @@ +from django.conf import settings +from django.db.models.signals import post_save +from django.dispatch import receiver + +from authentication.models import UserProfile + + +@receiver(post_save, sender=settings.AUTH_USER_MODEL) +def post_save_user(sender, instance, created, **kwargs): + if created: + UserProfile.objects.create(user=instance) diff --git a/authentication/views.py b/authentication/views.py index 797f22c100..1459bd179c 100644 --- a/authentication/views.py +++ b/authentication/views.py @@ -44,18 +44,44 @@ class PasswordResetRequestView( rest_email_auth.views.PasswordResetRequestView, RecaptchaV2Mixin ): + """ + Handles requests for password reset. + + Args: + rest_email_auth.views.PasswordResetRequestView: + The parent view class for password reset requests. + RecaptchaV2Mixin: A mixin for reCAPTCHA verification. + """ + authentication_classes: List = [] permission_classes: List = [] throttle_classes: List = [POSTUserRateThrottle] class PasswordResetView(rest_email_auth.views.PasswordResetView, RecaptchaV2Mixin): + """ + Handles password reset. + + Args: + rest_email_auth.views.PasswordResetView: + The parent view class for password reset. + RecaptchaV2Mixin: A mixin for reCAPTCHA verification. + """ + authentication_classes: List = [] permission_classes: List = [] throttle_classes: List = [POSTUserRateThrottle] class EmailVerificationView(rest_email_auth.views.EmailVerificationView): + """ + Handles email verification. + + Args: + rest_email_auth.views.EmailVerificationView: + The parent view class for email verification. + """ + authentication_classes: List = [] permission_classes: List = [] throttle_classes: List = [POSTUserRateThrottle] @@ -63,35 +89,84 @@ class EmailVerificationView(rest_email_auth.views.EmailVerificationView): class RegistrationView(rest_email_auth.views.RegistrationView, RecaptchaV2Mixin): + """ + Handles user registration. + + Args: + rest_email_auth.views.RegistrationView: + The parent view class for user registration. + RecaptchaV2Mixin: A mixin for reCAPTCHA verification. + """ + authentication_classes: List = [] permission_classes: List = [] throttle_classes: List = [POSTUserRateThrottle] serializer_class = RegistrationSerializer def get_serializer_class(self): # skipcq: PYL-R0201 + """ + Returns the serializer class for registration. + + Returns: + RegistrationSerializer: The serializer class for user registration. + """ return RegistrationSerializer class ResendVerificationView( rest_email_auth.views.ResendVerificationView, RecaptchaV2Mixin ): + """ + Handles re-sending email verification. + + Args: + rest_email_auth.views.ResendVerificationView: + The parent view class for resending email verification. + RecaptchaV2Mixin: A mixin for reCAPTCHA verification. + """ + authentication_classes: List = [] permission_classes: List = [] throttle_classes: List = [POSTUserRateThrottle] class LoginView(RecaptchaV2Mixin): + """ + Handles user login. + + Args: + RecaptchaV2Mixin: A mixin for reCAPTCHA verification. + """ + authentication_classes: List = [] permission_classes: List = [] throttle_classes: List = [POSTUserRateThrottle] @staticmethod def validate_and_return_user(request): + """ + Validates user credentials and returns the user object. + + Args: + request (Request): The request object containing user credentials. + + Returns: + Any: The authenticated user object. + """ serializer = LoginSerializer(data=request.data) serializer.is_valid(raise_exception=True) return serializer.validated_data["user"] def post(self, request, *args, **kwargs): + """ + Handles POST request for user login. + + Args: + request (Request): The request object containing user credentials. + + Returns: + Response: The response object. + """ try: self.get_serializer() # for RecaptchaV2Mixin except AssertionError: @@ -104,11 +179,24 @@ def post(self, request, *args, **kwargs): class ChangePasswordView(APIView): + """ + Handles changing user password. + """ + permission_classes = [IsAuthenticated] @staticmethod def post(request: Request) -> Response: # Get the old password and new password from the request data + """ + Handles POST request for changing user password. + + Args: + request (Request): The request object containing old and new passwords. + + Returns: + Response: The response object. + """ old_password = request.data.get("old_password") new_password = request.data.get("new_password") @@ -131,9 +219,22 @@ def post(request: Request) -> Response: class LogoutView(APIView): + """ + Handles user logout. + """ + permission_classes = [IsAuthenticated] def post(self, request, *args, **kwargs): # skipcq: PYL-R0201 + """ + Handles POST request for user logout. + + Args: + request (Request): The request object. + + Returns: + Response: The response object. + """ user = request.user logger.info(f"perform_logout received request from '{user.username}''.") logout(request) @@ -148,6 +249,12 @@ def post(self, request, *args, **kwargs): # skipcq: PYL-R0201 def google_login(request: Request): """ Redirect to Google OAuth login + + Args: + request (Request): The request object. + + Returns: + Response: The response object. """ redirect_uri = request.build_absolute_uri(reverse("oauth_google_callback")) try: @@ -162,8 +269,21 @@ def google_login(request: Request): class GoogleLoginCallbackView(LoginView): + """ + Handles Google OAuth login callback. + """ + @staticmethod def validate_and_return_user(request): + """ + Validates Google OAuth token and returns the user object. + + Args: + request (Request): The request object. + + Returns: + Any: The authenticated user object. + """ try: token = oauth.google.authorize_access_token(request) except ( @@ -198,6 +318,15 @@ def post(self, request, *args, **kwargs): @api_view(["get"]) @permission_classes([AllowAny]) def checkConfiguration(request): + """ + Checks the configuration settings. + + Args: + request (Request): The request object. + + Returns: + Response: The response object. + """ logger.info(f"Requested checking configuration from {request.user}.") page = request.query_params.get("page") register_uri = reverse("auth_register") @@ -243,6 +372,7 @@ class APIAccessTokenView(APIView): - ``GET`` -> get token-client pair info - ``POST`` -> create and get token-client pair info - ``DELETE`` -> delete existing API access token + Handles API access token operations. """ permission_classes = [IsAuthenticated] @@ -256,12 +386,30 @@ def get_object(self): return instance def get(self, request, *args, **kwargs): + """ + Handles GET request for retrieving API access token. + + Args: + request (Request): The request object. + + Returns: + Response: The response object. + """ instance = self.get_object() logger.info(f" user {request.user} request the API token") serializer = TokenSerializer(instance) return Response(serializer.data) def post(self, request): # skipcq: PYL-R0201 + """ + Handles POST request for creating API access token. + + Args: + request (Request): The request object. + + Returns: + Response: The response object. + """ username = request.user.username logger.info(f"user {username} send a request to create the API token") serializer = TokenSerializer(data={}, context={"user": request.user}) @@ -270,6 +418,15 @@ def post(self, request): # skipcq: PYL-R0201 return Response(serializer.data, status=status.HTTP_201_CREATED) def delete(self, request): + """ + Handles DELETE request for deleting API access token. + + Args: + request (Request): The request object. + + Returns: + Response: The response object. + """ logger.info(f"user {request.user} send a request to delete the API token") instance = self.get_object() instance.delete() diff --git a/configuration/elastic_search_mappings/intel_owl_bi.json b/configuration/elastic_search_mappings/intel_owl_bi.json index fca1ec09ce..0342ef4480 100644 --- a/configuration/elastic_search_mappings/intel_owl_bi.json +++ b/configuration/elastic_search_mappings/intel_owl_bi.json @@ -1,47 +1,46 @@ { "settings" : { - "number_of_shards" : 3 + "number_of_shards" : 1, + "number_of_replicas": 0 }, "mappings": { - "_doc": { - "dynamic": false, - "properties": { - "timestamp": { - "type": "date" - }, - "application": { - "type": "keyword" - }, - "username": { - "type": "keyword" - }, - "environment": { - "type": "keyword" - }, - "name": { - "type": "keyword" - }, - "process_time": { - "type": "integer" - }, - "status": { - "type": "keyword" - }, - "end_time": { - "type": "date" - }, - "parameters": { - "type": "object", - "dynamic": true - }, - "playbook": { - "type": "keyword" - }, - "class_instance": { - "type": "keyword" - } - - } + "dynamic": false, + "properties": { + "timestamp": { + "type": "date" + }, + "application": { + "type": "keyword" + }, + "username": { + "type": "keyword" + }, + "environment": { + "type": "keyword" + }, + "name": { + "type": "keyword" + }, + "process_time": { + "type": "integer" + }, + "status": { + "type": "keyword" + }, + "end_time": { + "type": "date" + }, + "parameters": { + "type": "object", + "dynamic": true + }, + "playbook": { + "type": "keyword" + }, + "class_instance": { + "type": "keyword" } + + } } } \ No newline at end of file diff --git a/docker/.env b/docker/.env index d05b23ac76..bc984be7da 100644 --- a/docker/.env +++ b/docker/.env @@ -1,6 +1,6 @@ ### DO NOT CHANGE THIS VALUE !! ### It should be updated only when you pull latest changes off from the 'master' branch of IntelOwl. # this variable must start with "REACT_APP_" to be used in the frontend too -REACT_APP_INTELOWL_VERSION="v6.0.1" +REACT_APP_INTELOWL_VERSION="v6.0.2" # if you want to use a nfs volume for shared files # NFS_ADDRESS= diff --git a/docker/Dockerfile b/docker/Dockerfile index 0a577ba814..ec352ea097 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -22,21 +22,21 @@ ENV LOG_PATH /var/log/intel_owl ARG REPO_DOWNLOADER_ENABLED=true ARG WATCHMAN=false ENV watch_logs_cmd "watch -n1 tail -n10 /var/log/intel_owl/django/api_app.log" -ARG PYELASTIC_VERSION=7.4.1 ARG PYCTI_VERSION=5.12.29 RUN mkdir -p ${LOG_PATH} \ ${LOG_PATH}/django \ ${LOG_PATH}/uwsgi \ ${LOG_PATH}/asgi \ - /opt/deploy/files_required /opt/deploy/files_required/yara /opt/deploy/configuration + /opt/deploy/files_required /opt/deploy/configuration /opt/deploy/files_required/blint /opt/deploy/files_required/yara # install required packages. some notes about:o # python3-psycopg2 is required to use PostgresSQL with Django # apache2-utils is required to execute htpasswd +# tshark is required for Hfinger file analyzer RUN apt-get update \ && apt-get install -y --no-install-recommends apt-utils libsasl2-dev libssl-dev netcat-traditional \ - vim libldap2-dev libfuzzy-dev net-tools python3-psycopg2 git apache2-utils \ + vim libldap2-dev libfuzzy-dev net-tools python3-psycopg2 git apache2-utils tshark \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* \ && pip3 install --no-cache-dir --upgrade pip @@ -51,16 +51,13 @@ COPY requirements/certego-requirements.txt $PYTHONPATH/certego-requirements.txt WORKDIR $PYTHONPATH RUN pip3 install --no-cache-dir --compile -r project-requirements.txt \ - # install elasticsearch-dsl's appropriate version as specified by user - && pip3 install --no-cache-dir elasticsearch-dsl==${PYELASTIC_VERSION} \ && pip3 install --no-cache-dir pycti==${PYCTI_VERSION} \ && pip3 install --no-cache-dir --compile -r certego-requirements.txt - - COPY . $PYTHONPATH RUN touch ${LOG_PATH}/django/api_app.log ${LOG_PATH}/django/api_app_errors.log \ + && touch ${LOG_PATH}/django/intel_owl.log ${LOG_PATH}/django/intel_owl_errors.log \ && touch ${LOG_PATH}/django/celery.log ${LOG_PATH}/django/celery_errors.log \ && touch ${LOG_PATH}/django/django_auth_ldap.log ${LOG_PATH}/django/django_errors.log \ && touch ${LOG_PATH}/django/certego_saas.log ${LOG_PATH}/django/certego_saas_errors.log \ @@ -81,6 +78,6 @@ COPY --from=frontend-build /build /var/www/reactapp # quarkengine calls # HOME_DIR = f"{Path.home()}/.quark-engine/" # Path(HOME_DIR).mkdir(parents=True, exist_ok=True) -# so we have to set the home env variable to allow to reate its directory +# so we have to set the home env variable to allow to create its directory ENV HOME ${PYTHONPATH} diff --git a/docker/Dockerfile_nginx b/docker/Dockerfile_nginx index 2b5e31aab7..73ef0a2f40 100644 --- a/docker/Dockerfile_nginx +++ b/docker/Dockerfile_nginx @@ -1,4 +1,4 @@ -FROM library/nginx:1.25-alpine +FROM library/nginx:1.26.0-alpine ENV NGINX_LOG_DIR /var/log/nginx # this is to avoid having these logs redirected to stdout/stderr diff --git a/docker/ci.override.yml b/docker/ci.override.yml index 2288954ca5..c64f176e69 100644 --- a/docker/ci.override.yml +++ b/docker/ci.override.yml @@ -9,6 +9,7 @@ services: limits: cpus: '1' memory: 2000M + uwsgi: build: context: .. diff --git a/docker/default.yml b/docker/default.yml index e6cfee614b..49d17d5ec2 100644 --- a/docker/default.yml +++ b/docker/default.yml @@ -24,11 +24,10 @@ services: - .env healthcheck: test: [ "CMD-SHELL", "nc -z localhost 8001 || exit 1" ] - interval: 10s + interval: 5s timeout: 2s - start_period: 10s - retries: 12 - + start_period: 3s + retries: 15 daphne: image: intelowlproject/intelowl:${REACT_APP_INTELOWL_VERSION} @@ -47,12 +46,13 @@ services: test: ["CMD-SHELL", "nc -z localhost 8011 || exit 1"] interval: 5s timeout: 2s - start_period: 5s - retries: 5 + start_period: 2s + retries: 6 depends_on: uwsgi: condition: service_healthy + nginx: image: intelowlproject/intelowl_nginx:${REACT_APP_INTELOWL_VERSION} container_name: intelowl_nginx @@ -69,6 +69,8 @@ services: depends_on: uwsgi: condition: service_healthy + daphne: + condition: service_healthy celery_beat: image: intelowlproject/intelowl:${REACT_APP_INTELOWL_VERSION} @@ -82,10 +84,10 @@ services: - ./docker/entrypoints/celery_beat.sh env_file: - env_file_app + <<: *no-healthcheck depends_on: uwsgi: condition: service_healthy - <<: *no-healthcheck celery_worker_default: diff --git a/docker/entrypoints/uwsgi.sh b/docker/entrypoints/uwsgi.sh index b90fc7d31a..4489667e6a 100755 --- a/docker/entrypoints/uwsgi.sh +++ b/docker/entrypoints/uwsgi.sh @@ -7,11 +7,11 @@ done # Apply database migrations echo "Waiting for db to be ready..." -sleep 3 # makemigrations is needed only for the durin package. # The customization of the parameters is not applied until the migration is done python manage.py makemigrations durin python manage.py makemigrations rest_email_auth +python manage.py createcachetable # fake-initial does not fake the migration if the table does not exist python manage.py migrate --fake-initial if ! python manage.py migrate --check @@ -19,7 +19,6 @@ if ! python manage.py migrate --check echo "Issue with migration exiting" exit 1 fi -python manage.py createcachetable # Collect static files python manage.py collectstatic --noinput echo "------------------------------" diff --git a/docker/env_file_app_ci b/docker/env_file_app_ci index dc9b7a732c..acb8e204b0 100644 --- a/docker/env_file_app_ci +++ b/docker/env_file_app_ci @@ -35,7 +35,6 @@ ELASTICSEARCH_DSL_NO_OF_REPLICAS=0 ELASTICSEARCH_BI_ENABLED=False ELASTICSEARCH_BI_HOST= -ELASTICSEARCH_SSL_CERTIFICATE_FILE_NAME=elastisearch.crt ELASTICSEARCH_BI_INDEX=intelowl-bi diff --git a/docker/env_file_app_template b/docker/env_file_app_template index 946e12156a..ec56bc5c23 100644 --- a/docker/env_file_app_template +++ b/docker/env_file_app_template @@ -61,7 +61,6 @@ ELASTICSEARCH_DSL_NO_OF_REPLICAS=0 ELASTICSEARCH_BI_ENABLED=False ELASTICSEARCH_BI_HOST= -ELASTICSEARCH_SSL_CERTIFICATE_FILE_NAME=elastisearch.crt ELASTICSEARCH_BI_INDEX=intelowl-bi # Test tokens diff --git a/docker/postgres.override.yml b/docker/postgres.override.yml index af50ca8365..f1dcec6bc5 100644 --- a/docker/postgres.override.yml +++ b/docker/postgres.override.yml @@ -11,10 +11,10 @@ services: - ./env_file_postgres healthcheck: test: [ "CMD-SHELL", "pg_isready -U $$POSTGRES_USER -d $$POSTGRES_DB" ] - interval: 10s - timeout: 5s - retries: 5 - start_period: 5s + interval: 5s + timeout: 2s + retries: 6 + start_period: 3s uwsgi: diff --git a/docker/rabbitmq.override.yml b/docker/rabbitmq.override.yml index 7f4d06429d..d59fe20500 100644 --- a/docker/rabbitmq.override.yml +++ b/docker/rabbitmq.override.yml @@ -13,4 +13,12 @@ services: environment: - BROKER_URL="amqp://guest:guest@rabbitmq:5672" depends_on: - - rabbitmq \ No newline at end of file + - rabbitmq + + celery_worker_default: + environment: + - BROKER_URL="amqp://guest:guest@rabbitmq:5672" + + celery_beat: + environment: + - BROKER_URL="amqp://guest:guest@rabbitmq:5672" diff --git a/docker/redis.override.yml b/docker/redis.override.yml index 33efbb74e5..8fd0947619 100644 --- a/docker/redis.override.yml +++ b/docker/redis.override.yml @@ -26,4 +26,17 @@ services: expose: - "6379" healthcheck: - test: test $$(redis-cli -h '127.0.0.1' ping) = 'PONG' \ No newline at end of file + test: test $$(redis-cli -h '127.0.0.1' ping) = 'PONG' + + celery_beat: + depends_on: + redis: + condition: service_healthy + environment: + - BROKER_URL=redis://redis:6379/1 + - WEBSOCKETS_URL=redis://redis:6379/0 + + celery_worker_default: + environment: + - BROKER_URL=redis://redis:6379/1 + - WEBSOCKETS_URL=redis://redis:6379/0 \ No newline at end of file diff --git a/docker/sqs.override.yml b/docker/sqs.override.yml index 4fce1efd89..876d2eb078 100644 --- a/docker/sqs.override.yml +++ b/docker/sqs.override.yml @@ -3,4 +3,14 @@ services: uwsgi: environment: - AWS_SQS=True - - BROKER_URL=sqs:// \ No newline at end of file + - BROKER_URL=sqs:// + + celery_beat: + environment: + - AWS_SQS=True + - BROKER_URL=sqs:// + + celery_worker_default: + environment: + - AWS_SQS=True + - BROKER_URL=sqs:// diff --git a/docker/test.override.yml b/docker/test.override.yml index 104a45af98..ae02816d03 100644 --- a/docker/test.override.yml +++ b/docker/test.override.yml @@ -7,8 +7,7 @@ services: dockerfile: docker/Dockerfile args: REPO_DOWNLOADER_ENABLED: ${REPO_DOWNLOADER_ENABLED} - WATCHMAN: "true" - PYELASTIC_VERSION: ${PYELASTIC_VERSION:-7.4.1} + WATCHMAN: true PYCTI_VERSION: ${PYCTI_VERSION:-5.10.0} image: intelowlproject/intelowl:test volumes: diff --git a/docker/traefik.override.yml b/docker/traefik.override.yml index d19ec211a2..6bbff6d4c7 100644 --- a/docker/traefik.override.yml +++ b/docker/traefik.override.yml @@ -25,7 +25,6 @@ services: nginx: depends_on: - - uwsgi - traefik labels: - "traefik.enable=true" diff --git a/docs/source/Advanced-Configuration.md b/docs/source/Advanced-Configuration.md index 5817559501..727671d28f 100644 --- a/docs/source/Advanced-Configuration.md +++ b/docs/source/Advanced-Configuration.md @@ -25,6 +25,7 @@ This page includes details about some advanced features that Intel Owl provides - [Manual usage](#manual-usage) ## ElasticSearch +Right now only ElasticSearch v8 is supported. ### DSL IntelOwl makes use of [django-elasticsearch-dsl](https://django-elasticsearch-dsl.readthedocs.io/en/latest/about.html) to index Job results into elasticsearch. The `save` and `delete` operations are auto-synced so you always have the latest data in ES. @@ -40,9 +41,7 @@ Intel Owl provides a Kibana's "Saved Object" configuration (with example dashboa 1. Setup [Elastic Search and Kibana](https://hub.docker.com/r/nshou/elasticsearch-kibana/) and say it is running in a docker service with name `elasticsearch` on port `9200` which is exposed to the shared docker network. (Alternatively, you can spin up a local Elastic Search instance, by appending `--elastic` to the `./start` command. Note that the local Elastic Search instance consumes large amount of memory, and hence having >=16GB is recommended.)) 2. In the `env_file_app`, we set `ELASTICSEARCH_DSL_ENABLED` to `True` and `ELASTICSEARCH_DSL_HOST` to `elasticsearch:9200`. -3. Configure the version of the ElasticSearch Library used [depending on the version](https://django-elasticsearch-dsl.readthedocs.io/en/latest/about.html#features) of our Elasticsearch server. This is required for compatibility. To do that, you can leverage the option `--pyelastic-version` of the `./start` script. The default value of that parameter indicates the version that would be installed by default. -4. Rebuild the docker images with `./start test build --pyelastic-version x.x.x` (required only if you changed the default value of `--pyelastic-version`) -5. Now start the docker containers and execute, +3. Now start the docker containers and execute ```bash docker exec -ti intelowl_uwsgi python manage.py search_index --rebuild @@ -52,7 +51,7 @@ This will build and populate all existing job objects into the `jobs` index. ### Business Intelligence -IntelOwl makes use of [elasticsearch-py](https://elasticsearch-py.readthedocs.io/en/7.x/index.html) to store data that can be used for Business Intelligence purpose. +IntelOwl makes use of [elasticsearch-py](https://elasticsearch-py.readthedocs.io/en/8.x/index.html) to store data that can be used for Business Intelligence purpose. Since plugin reports are deleted periodically, this feature allows to save indefinitely small amount of data to keep track of how analyzers perform and user usage. At the moment, the following information are sent to elastic: - application name @@ -65,16 +64,13 @@ At the moment, the following information are sent to elastic: - parameters Documents are saved in the `ELEASTICSEARCH_BI_INDEX-%YEAR-%MONTH`, allowing to manage the retention accordingly. -To activate this feature, it is necessary to set `ELASTICSEARCH_BI_ENABLED` -to `True` in the `env_file_app` and +To activate this feature, it is necessary to set `ELASTICSEARCH_BI_ENABLED` to `True` in the `env_file_app` and `ELASTICSEARCH_BI_HOST` to `elasticsearch:9200` or your elasticsearch server. -At last, you have to copy your ssl certificate in the `configuration` folder -and set `ELASTICSEARCH_SSL_CERTIFICATE_FILE_NAME` to your certificate file name. An [index template](https://github.com/intelowlproject/IntelOwl/configuration/elastic_search_mappings/intel_owl_bi.json) is created after the first bulk submission of reports. If you want to use kibana to visualize your data/make dashboard, you must create an index pattern: -Go to Kibana -> Management -> Index Patterns -> search for your index and use as time field `timestamp` +Go to Kibana -> Discover -> Stack Management -> Index Patterns -> search for your index and use as time field `timestamp` ## Authentication options @@ -192,6 +188,8 @@ Only FIFO queues are supported. If you want to use a remote message broker (like an `ElasticCache` or `AmazonMQ` instance), you must populate the `BROKER_URL` environment variable. +It is possible to use [task priority](https://docs.celeryq.dev/en/stable/userguide/routing.html#special-routing-options) inside IntelOwl: each User has default priority of 10, and robots users (like the Ingestors) have a priority of 7. +You can customize these priorities inside Django Admin, in the `Authentication.User Profiles` section. #### Websockets diff --git a/docs/source/Advanced-Usage.md b/docs/source/Advanced-Usage.md index 4d66212c38..f0200bb9eb 100644 --- a/docs/source/Advanced-Usage.md +++ b/docs/source/Advanced-Usage.md @@ -234,11 +234,6 @@ Some analyzers could require a special configuration: - The `repositories` values is what will be used to actually run the analysis: if you have added private repositories, remember to add the url in `repositories` too! - You can add local rules inside the directory at `/opt/deploy/files_required/yara/YOUR_USERNAME/custom_rules/`. Please remember that these rules are not synced in a cluster deploy: for this reason is advised to upload them on GitHub and use the `repositories` or `private_repositories` attributes. -- `DNS0_rrsets_name` and `DNS0_rrsets_data` ([DNS0 API](https://docs.dns0.eu/dns-api/rrsets)): - - Both these analyzers have a default parameter named `direction` that is used to dispatch the type of query to run. - - The value `right` for this parameter runs the query using `data` API parameter. Otherwise, if the parameter value is `left` it runs the query using the `name` API parameter. - - This parameter should not be changed from default value. - ## Notifications Since v4, IntelOwl integrated the notification system from the `certego_saas` package, allowing the admins to create notification that every user will be able to see. diff --git a/docs/source/Contribute.md b/docs/source/Contribute.md index 61b101d6b1..59afe2c685 100644 --- a/docs/source/Contribute.md +++ b/docs/source/Contribute.md @@ -259,7 +259,7 @@ After having written the new python module, you have to remember to: 6. *Maximum tlp: maximum tlp to allow the run on the connector 7. *Run on failure: if the connector should be run even if the job fails -### Hot to add a new Ingestor +### How to add a new Ingestor 1. Put the module in the `ingestors` directory 2. Remember to use `_monkeypatch()` in its class to create automated tests for the new ingestor. This is a trick to have tests in the same class of its ingestor. 3. Create the configuration inside django admin in `Ingestors_manager/IngestorConfigs` (* = mandatory, ~ = mandatory on conditions) @@ -271,6 +271,24 @@ After having written the new python module, you have to remember to: 6. *Playbook to Execute: Playbook that **will** be executed on every IOC retrieved 7. *Schedule: Crontab object that describes the schedule of the ingestor. You are able to create a new clicking the `plus` symbol. +### How to add a new Pivot +1. Put the module in the `pivots` directory +2. Remember to use `_monkeypatch()` in its class to create automated tests for the new pivot. This is a trick to have tests in the same class of its pivot. +3. Create the configuration inside django admin in `Pivots_manager/PivotConfigs` (* = mandatory, ~ = mandatory on conditions) + 1. *Name: specific name of the configuration + 2. *Python module: . + 3. *Description: description of the configuration + 4. *Routing key: celery queue that will be used + 5. *Soft_time_limit: maximum time for the task execution + 6. *Playbook to Execute: Playbook that **will** be executed in the Job generated by the Pivot + +Most of the times you don't need to create a new Pivot Module. There are already some base modules that can be extended. +The most important ones are the following 2: +1.`AnyCompare`: use this module if you want to create a custom Pivot from a specific value extracted from the results of the analyzers/connectors. How? you should populate the parameter `field_to_compare` with the dotted path to the field you would like to extract the value from. +2.`SelfAnalyzable`: use this module if you want to create a custom Pivot that would analyze again the same observable/file. + + + ### How to add a new Visualizer #### Configuration @@ -315,32 +333,32 @@ To do so, some utility classes have been made: VisualizablePage A single page of the final report, made of different levels. Each page added is represented as a new tab in frontend. - Visualizable Page example + Visualizable Page example VisualizableLevel Each level corresponds to a line in the final frontend visualizations. Every level is made of a VisualizableHorizontalList. - Visualizable Level example + Visualizable Level example VisualizableHorizontalList An horizontal list of visualizable elements. In the example there is an horizontal list of vertical lists. - Visualizable Horizontal List Example + Visualizable Horizontal List Example VisualizableVerticalList A vertical list made of a name, a title, and the list of elements. - Visualizable Vertical List Example + Visualizable Vertical List Example VisualizableBool The representation of a boolean value. It can be enabled or disabled with colors. - Visualizable Bool example + Visualizable Bool example VisualizableTitle The representation of a tuple, composed of a title and a value. - Visualizable Title example + Visualizable Title example VisualizableBase diff --git a/docs/source/Installation.md b/docs/source/Installation.md index db2fe339eb..70dcccad7e 100644 --- a/docs/source/Installation.md +++ b/docs/source/Installation.md @@ -320,13 +320,15 @@ The database migration procedure is as follows: - You have IntelOwl version 5.x.x up and running - Bring down the application (you can use the start script or manually concatenate your docker compose configuration ) - Go inside the docker folder `cd docker` -- Bring only the postgres 12 container up `docker run -d --name intelowl_postgres_12 -v intelowl_postgres_data:/var/lib/postgresql/data/ --env-file env_file_postgres library/postgres:12-alpine ` -- Dump the entire database. You need the user and the database that you configured during startup for this `docker exec -t intelowl_postgres_12 pg_dump -U $POSTGRES_USER -d $POSTGRES_DB --no-owner> /tmp/dump_intelowl.sql ` -- Remove the backup container `docker rm intelowl_postgres_12` -- Remove the postgres volume `docker volume rm intelowl_postgres_data` -- Start the intermediary postgres 16 container `docker run -d --name intelowl_postgres_16 -v postgres_data:/var/lib/postgresql/data/ --env-file env_file_postgres library/postgres:16-alpine ` -- Add the data to the volume `cat /tmp/dump_postgres.sql| docker exec -i intelowl_postgres_16 psql -U $POSTGRES_USER -d $POSTGRES_PASSWORD` -- Remove the intermediary container `docker rm intelowl_postgres_16` +- Bring only the postgres 12 container up `docker run -d --name intelowl_postgres_12 -v intel_owl_postgres_data:/var/lib/postgresql/data/ --env-file env_file_postgres library/postgres:12-alpine` +- Dump the entire database. You need the user and the database that you configured during startup for this `docker exec -t intelowl_postgres_12 pg_dump -U -d --no-owner > /tmp/dump_intelowl.sql` +- Stop che container `docker container stop intelowl_postgres_12` +- Remove the backup container `docker container rm intelowl_postgres_12` +- Remove the postgres volume `docker volume rm intel_owl_postgres_data` <------------- remove old data, this is not exactly necessary because the new postgres has a different volume name +- Start the intermediary postgres 16 container `docker run -d --name intelowl_postgres_16 -v intelowl_postgres_data:/var/lib/postgresql/data/ --env-file env_file_postgres library/postgres:16-alpine` +- Add the data to the volume `cat /tmp/dump_intelowl.sql | docker exec -i intelowl_postgres_16 psql -U -d ` +- Stop the intermediary container `docker container stop intelowl_postgres_16` +- Remove the intermediary container `docker container rm intelowl_postgres_16` - Update IntelOwl to the latest version - Bring up the application back again (you can use the start script or manually concatenate your docker compose configuration) diff --git a/docs/source/Usage.md b/docs/source/Usage.md index 1457d87501..bcf5383091 100644 --- a/docs/source/Usage.md +++ b/docs/source/Usage.md @@ -82,8 +82,10 @@ The following is the list of the available analyzers you can run out-of-the-box. * `ELF_Info`: static ELF analysis with [pyelftools](https://github.com/eliben/pyelftools) and [telfhash](https://github.com/trendmicro/telfhash) * `File_Info`: static generic File analysis (hashes, magic and [exiftool](https://exiftool.org/)) * `Floss`: [Mandiant Floss](https://github.com/mandiant/flare-floss) Obfuscated String Solver in files +* `Hfinger`: create fingerprints of malware HTTPS requests using [Hfinger](https://github.com/CERT-Polska/hfinger) * `PE_Info`: static PE analysis with [pefile](https://github.com/mlodic/pefile) * `PEframe_Scan`: Perform static analysis on Portable Executable malware and malicious MS Office documents with [PeFrame](https://github.com/guelfoweb/peframe) +* `Permhash`: create hash of manifest permssions found in APK, Android manifest, Chrome extensions or Chrome extension manifest using [Permhash](https://github.com/google/permhash) * `PDF_Info`: static PDF analysis ([peepdf](https://github.com/jesparza/peepdf) + [pdfid](https://github.com/mlodic/pdfid)) * `Qiling_Linux`: [Qiling](https://github.com/qilingframework/qiling) qiling linux binary emulation. * `Qiling_Linux_Shellcode`: [Qiling](https://github.com/qilingframework/qiling) qiling linux shellcode emulation. @@ -148,6 +150,7 @@ The following is the list of the available analyzers you can run out-of-the-box. - `YARAify_File_Scan`: scan a file against public and non-public YARA and ClamAV signatures in [YARAify](https://yaraify.abuse.ch/) public service - `YARAify_File_Search`: scan an hash against [YARAify](https://yaraify.abuse.ch/) database - `Zippy_scan` : [Zippy](https://github.com/thinkst/zippy): Fast method to classify text as AI or human-generated; takes in `lzma`,`zlib`,`brotli` as input based engines; `ensemble` being default. +- `Blint`: [Blint](https://github.com/owasp-dep-scan/blint) is a Binary Linter that checks the security properties and capabilities of your executables. Supported binary formats: - Android (apk, aab) - ELF (GNU, musl) - PE (exe, dll) - Mach-O (x64, arm64) ##### Observable analyzers (ip, domain, url, hash) ###### Internal tools @@ -159,6 +162,7 @@ The following is the list of the available analyzers you can run out-of-the-box. ###### External services * `AbuseIPDB`: check if an ip was reported on [AbuseIPDB](https://www.abuseipdb.com/) +* `Abusix`: get abuse contacts of an IP address from [Abusix](https://abusix.com/contact-db/) * `BGP Ranking`: [BGP-Ranking](https://github.com/D4-project/BGP-Ranking) provides a way to collect such malicious activities, aggregate the information per ASN and provide a ranking model to rank the ASN from the most malicious to the less malicious ASN. * `Anomali_Threatstream_PassiveDNS`: Return information from passive dns of Anomali. On [Anomali Threatstream](https://www.anomali.com/products/threatstream) PassiveDNS Api. * `Auth0`: scan an IP against the Auth0 API @@ -176,9 +180,6 @@ The following is the list of the available analyzers you can run out-of-the-box. * `DNSDB`: scan an observable against the [Passive DNS Farsight Database](https://www.farsightsecurity.com/solutions/dnsdb/) (support both v1 and v2 versions) * `DNS0_EU`: Retrieve current domain resolution with DNS0.eu DoH (DNS over HTTPS) * `DNS0_EU_Malicious_Detector`: Check if a domain or an url is marked as malicious in DNS0.eu database ([Zero](https://www.dns0.eu/zero) service) -* `DNS0_names`: Run advanced searches on billions of current and historical domain names. ([DNS0 /names](https://docs.dns0.eu/dns-api/names)) -* `DNS0_rrsets_data`: Query billions of current and historical DNS resource records sets. Performs right-hand side matching. ([DNS0 /rrsets](https://docs.dns0.eu/dns-api/rrsets)) -* `DNS0_rrsets_name`: Query billions of current and historical DNS resource records sets. Performs left-hand side matching. ([DNS0 /rrsets](https://docs.dns0.eu/dns-api/rrsets)) * `DocGuard_Get`: check if an hash was analyzed on DocGuard. [DocGuard](https://www.docguard.io) * `Feodo_Tracker`: [Feodo Tracker](https://feodotracker.abuse.ch/) offers various blocklists, helping network owners to protect their users from Dridex and Emotet/Heodo. * `FileScan_Search`: Finds reports and uploaded files by various tokens, like hash, filename, verdict, IOCs etc via [FileScan.io API](https://www.filescan.io/api/docs). @@ -189,12 +190,14 @@ The following is the list of the available analyzers you can run out-of-the-box. * `GreedyBear`: scan an IP or a domain against the [GreedyBear](https://greedybear.honeynet.org/) API (requires API key) * `GreyNoise`: scan an IP against the [Greynoise](https://www.greynoise.io/) API (requires API key) * `GreyNoiseCommunity`: scan an IP against the [Community Greynoise API](https://www.greynoise.io/) (requires API key)) +* `Greynoise_Labs`: scan an IP against the [Greynoise API](https://www.greynoise.io/) (requires authentication token which can be obtained from cookies on Greynoise website after launching the playground from [here](https://api.labs.greynoise.io/)) * `HashLookupServer_Get_Observable`: check if a md5 or sha1 is available in the database of [known file hosted by CIRCL](https://github.com/adulau/hashlookup-server) * `HoneyDB_Get`: [HoneyDB](https://honeydb.io/) IP lookup service * `HoneyDB_Scan_Twitter`: scan an IP against HoneyDB.io's Twitter Threat Feed * `Hunter_How`: Scans IP and domain against [Hunter_How API](https://hunter.how/search-api). * `Hunter_Io`: Scans a domain name and returns set of data about the organisation, the email address found and additional information about the people owning those email addresses. * `HybridAnalysis_Get_Observable`: search an observable in the [HybridAnalysis](https://www.hybrid-analysis.com/) sandbox reports +* `IP2WHOIS`: [API Docs](https://www.ip2location.io/ip2whois-documentation) IP2Location.io IP2WHOIS Domain WHOIS API helps users to obtain domain information and WHOIS record by using a domain name. * `IPQS_Fraud_And_Risk_Scoring`: Scan an Observable against [IPQualityscore](https://www.ipqualityscore.com/) * `InQuest_DFI`: Deep File Inspection by [InQuest Labs](https://labs.inquest.net/dfi) * `InQuest_IOCdb`: Indicators of Compromise Database by [InQuest Labs](https://labs.inquest.net/iocdb) @@ -297,6 +300,8 @@ The following is the list of the available connectors. You can also navigate the - `OpenCTI`: automatically creates an observable and a linked report on your OpenCTI instance, linking the the successful analysis on IntelOwl. - `YETI`: YETI = Your Everyday Threat Intelligence. find or create observable on YETI, linking the successful analysis on IntelOwl. - `Slack`: Send the analysis link to a Slack channel (useful for external notifications) +- `EmailSender`: Send a generic email. +- `AbuseSubmitter`: Send an email to request to take down a malicious domain. ### Pivots @@ -307,10 +312,31 @@ Pivots are designed to create a job from another job. This plugin allows the use This is a "SOAR" feature that allows the users to connect multiple analysis together. -Right now the support for this kind of plugin in the GUI is very limited, while the backend is fully operative. We are working on the frontend. - #### List of pre-built Pivots -None +- `TakedownRequestToAbuseIp`: This Plugin leverages results from DNS resolver analyzers to extract a valid IP address to pivot to the Abusix analyzer. +- `AbuseIpToSubmission`: This Plugin leverages results from the Abusix analyzer to extract the abuse contacts of an IP address to pivot to the AbuseSubmitter connector. + +You can build your own custom Pivot with your custom logic with just few lines of code. See the [Contribute](https://intelowl.readthedocs.io/en/latest/Contribute.html#how-to-add-a-new-pivot) section for more info. + +#### Creating Pivots from the GUI + +From the GUI, the users can pivot in two ways: +- If a Job executed a [Visualizer](#visualizers), it is possible to select a field extracted and analyze its value by clicking the "Pivot" button (see following image). In this way, the user is able to "jump" from one indicator to another. +![img.png](../static/pivot_job_report.png) + +- Starting from an already existing [Investigation](#investigations-framework), it is possible to select a Job block and click the "Pivot" button to analyze the same observable again, usually choosing another [Playbook](#playbooks) (see following image) +![img.png](../static/pivot_investigation_report.png) + +In both cases, the user is redirected to the Scan Page that is precompiled with the observable selected. Then the user would be able to select the [Playbook](#playbooks) to execute in the new job. +![img.png](../static/pivot_scan_page.png) + +After the new Job is started, a new [Investigation](#investigations-framework) will be created (if it does not already exist) and both the jobs will be added to the same Investigation. + +In the following image you can find an example of an [Investigation](#investigations-framework) composed by 3 pivots generated manually: +* leveraging the first way to create a Pivot, the 2 Jobs that analyzed IP addresses have been generated from the first `test\.com` Job +* leveraging the second way to create a Pivot, the second `test\.com` analysis had been created with a different Playbook. + +![img.png](../static/pivot_investigation.png) ### Visualizers @@ -370,6 +396,9 @@ The following is the list of the available pre-built playbooks. You can also nav - `Popular_URL_Reputation_Services`: Collection of the most popular and free reputation analyzers for URLs and Domains - `Popular_IP_Reputation_Services`: Collection of the most popular and free reputation analyzers for IP addresses - `Dns`: A playbook containing all dns providers +- `Takedown_Request`: Start investigation to request to take down a malicious domain. A mail will be sent to the domain's abuse contacts found +- `Abuse_IP`: Playbook containing the Abusix analyzer. It is executed after the Takedown_Request playbook +- `Send_Abuse_Email`: Playbook containing the AbuseSubmitter connector to send an email to request to take down a malicious domain. It is executed after the Abuse_IP playbook #### Playbooks creation and customization @@ -552,6 +581,17 @@ These is how every available TLP value behaves once selected for an analysis exe 4. `RED`: disable analyzers that could impact privacy, limit view permissions to my group and do not use any external service +### Running a plugin +A plugin can be run when all of the following requirements have been satisfied: +1. All the required parameters of the plugin have been configured +2. The plugin is not disabled +3. The plugin is not disabled for the user's organization +4. If the plugin has a health check schedule, the last check has to be successful +5. The TLP selected to run the plugin cannot be higher than the maximum TLP configured for that plugin +6. The observable classification or the file mimetype has to be supported by the plugin + + + ## Investigations Framework *Investigations* are a new framework introduced in IntelOwl v6 with the goal to allow the users to connect the analysis they do with each other. @@ -568,7 +608,7 @@ Things to know about the framework: *Investigations* are created in 2 ways: * automatically: * if you scan multiple observables at the same time, a new investigation will be created by default and all the observables they will be automatically connected to the same investigation. - * if you run a Job with a Playbook which contains a Pivot that triggers another Job, a new investigation will be created and both the Jobs will be added to the same investigation. + * if you run a Job with a Playbook which contains a [Pivot](#pivots) that triggers another Job, a new investigation will be created and both the Jobs will be added to the same investigation. See how you can create a new [Pivot manually from the GUI](#creating-pivots-from-the-gui). * manually: by clicking on the button in the "History" section you can create an Investigation from scratch without any job attached (see following image) ![img.png](../static/create_investigation.png) diff --git a/docs/source/schema.yml b/docs/source/schema.yml index e6adf3764a..62bc131470 100644 --- a/docs/source/schema.yml +++ b/docs/source/schema.yml @@ -1,7 +1,7 @@ openapi: 3.0.3 info: title: IntelOwl API specification - version: 6.0.1 + version: 6.0.2 paths: /api/analyze_file: post: diff --git a/docs/static/pivot_investigation.png b/docs/static/pivot_investigation.png new file mode 100644 index 0000000000..9ab41a3d70 Binary files /dev/null and b/docs/static/pivot_investigation.png differ diff --git a/docs/static/pivot_investigation_report.png b/docs/static/pivot_investigation_report.png new file mode 100644 index 0000000000..9753a5b508 Binary files /dev/null and b/docs/static/pivot_investigation_report.png differ diff --git a/docs/static/pivot_job_report.png b/docs/static/pivot_job_report.png new file mode 100644 index 0000000000..e7409a1c69 Binary files /dev/null and b/docs/static/pivot_job_report.png differ diff --git a/docs/static/pivot_scan_page.png b/docs/static/pivot_scan_page.png new file mode 100644 index 0000000000..dd516d91b0 Binary files /dev/null and b/docs/static/pivot_scan_page.png differ diff --git a/frontend/package-lock.json b/frontend/package-lock.json index def4af3dec..20a8b6d681 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -1,19 +1,19 @@ { "name": "intelowl", - "version": "5.0.0", + "version": "6.0.0", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "intelowl", - "version": "5.0.0", + "version": "6.0.0", "dependencies": { "@certego/certego-ui": "^0.1.13", "@dagrejs/dagre": "^1.0.4", "axios": "^1.6.0", "axios-hooks": "^3.1.5", "bootstrap": "^5.3.2", - "classnames": "^2.3.1", + "classnames": "^2.5.1", "flag-icons": "^7.1.0", "formik": "^2.4.5", "js-cookie": "^3.0.5", @@ -24,7 +24,7 @@ "react-error-boundary": "^4.0.11", "react-google-recaptcha": "^2.1.0", "react-icons": "^4.12.0", - "react-joyride": "^2.7.2", + "react-joyride": "^2.8.1", "react-json-tree": "^0.18.0", "react-markdown": "^8.0.7", "react-router-dom": "^6.22.0", @@ -34,7 +34,7 @@ "react-use": "^17.5.0", "reactflow": "^11.10.4", "reactstrap": "^9.2.1", - "recharts": "^2.8.0", + "recharts": "^2.12.6", "zustand": "^4.5.2" }, "devDependencies": { @@ -50,12 +50,12 @@ "eslint-config-prettier": "^9.1.0", "eslint-plugin-import": "^2.29.1", "eslint-plugin-jsx-a11y": "^6.8.0", - "eslint-plugin-react": "^7.33.2", + "eslint-plugin-react": "^7.34.1", "eslint-plugin-react-hooks": "^4.5.0", "jest": "^29.7.0", "jest-environment-jsdom": "^29.7.0", "prettier": "^3.2.5", - "sass": "^1.61.0", + "sass": "^1.77.0", "stylelint": "^14.9.1", "stylelint-config-prettier": "^9.0.3", "stylelint-config-standard-scss": "^4.0.0" @@ -2575,34 +2575,6 @@ "resolved": "https://registry.npmjs.org/@gilbarbara/deep-equal/-/deep-equal-0.3.1.tgz", "integrity": "sha512-I7xWjLs2YSVMc5gGx1Z3ZG1lgFpITPndpi8Ku55GeEIKpACCPQNS/OTqQbxgTCfq0Ncvcc+CrFov96itVh6Qvw==" }, - "node_modules/@gilbarbara/helpers": { - "version": "0.9.1", - "resolved": "https://registry.npmjs.org/@gilbarbara/helpers/-/helpers-0.9.1.tgz", - "integrity": "sha512-B6q4qruzaurfbpmdGK85SSgnI36pFuJlewTul9hWHUv7u8VGxDwjj8anxSfuPyDZ3ovXF1H6ifCVFHQqRV2+Gg==", - "dependencies": { - "@gilbarbara/types": "^0.2.2", - "is-lite": "^1.2.0" - } - }, - "node_modules/@gilbarbara/types": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/@gilbarbara/types/-/types-0.2.2.tgz", - "integrity": "sha512-QuQDBRRcm1Q8AbSac2W1YElurOhprj3Iko/o+P1fJxUWS4rOGKMVli98OXS7uo4z+cKAif6a+L9bcZFSyauQpQ==", - "dependencies": { - "type-fest": "^4.1.0" - } - }, - "node_modules/@gilbarbara/types/node_modules/type-fest": { - "version": "4.9.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-4.9.0.tgz", - "integrity": "sha512-KS/6lh/ynPGiHD/LnAobrEFq3Ad4pBzOlJ1wAnJx9N4EYoqFhMfLIBjUT2UEx4wg5ZE+cC1ob6DCSpppVo+rtg==", - "engines": { - "node": ">=16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/@humanwhocodes/config-array": { "version": "0.11.10", "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.10.tgz", @@ -5768,12 +5740,15 @@ } }, "node_modules/array-buffer-byte-length": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.0.tgz", - "integrity": "sha512-LPuwb2P+NrQw3XhxGc36+XSvuBPopovXYTR9Ew++Du9Yb/bx5AzBfrIsBoj0EZUifjQU+sHL21sseZ3jerWO/A==", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.1.tgz", + "integrity": "sha512-ahC5W1xgou+KTXix4sAO8Ki12Q+jf4i0+tmk3sC+zgcynshkHxzpXdImBehiUYKKKDwvfFiJl1tZt6ewscS1Mg==", "dependencies": { - "call-bind": "^1.0.2", - "is-array-buffer": "^3.0.1" + "call-bind": "^1.0.5", + "is-array-buffer": "^3.0.4" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -5810,6 +5785,25 @@ "node": ">=8" } }, + "node_modules/array.prototype.findlast": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/array.prototype.findlast/-/array.prototype.findlast-1.2.5.tgz", + "integrity": "sha512-CVvd6FHg1Z3POpBLxO6E6zr+rSKEQ9L6rZHAaY7lLfhKsWYUBBOuMs0e9o24oopj6H+geRCX0YJ+TJLBK2eHyQ==", + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "es-shim-unscopables": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/array.prototype.findlastindex": { "version": "1.2.3", "resolved": "https://registry.npmjs.org/array.prototype.findlastindex/-/array.prototype.findlastindex-1.2.3.tgz", @@ -5880,28 +5874,41 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/array.prototype.tosorted": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/array.prototype.tosorted/-/array.prototype.tosorted-1.1.1.tgz", - "integrity": "sha512-pZYPXPRl2PqWcsUs6LOMn+1f1532nEoPTYowBtqLwAW+W8vSVhkIGnmOX1t/UQjD6YGI0vcD2B1U7ZFGQH9jnQ==", + "node_modules/array.prototype.toreversed": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/array.prototype.toreversed/-/array.prototype.toreversed-1.1.2.tgz", + "integrity": "sha512-wwDCoT4Ck4Cz7sLtgUmzR5UV3YF5mFHUlbChCzZBQZ+0m2cl/DH3tKgvphv1nKgFsJ48oCSg6p91q2Vm0I/ZMA==", "dependencies": { "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4", - "es-shim-unscopables": "^1.0.0", - "get-intrinsic": "^1.1.3" + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1", + "es-shim-unscopables": "^1.0.0" + } + }, + "node_modules/array.prototype.tosorted": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/array.prototype.tosorted/-/array.prototype.tosorted-1.1.3.tgz", + "integrity": "sha512-/DdH4TiTmOKzyQbp/eadcCVexiCb36xJg7HshYOYJnNZFDj33GEv0P7GxsynpShhq4OLYJzbGcBDkLsDt7MnNg==", + "dependencies": { + "call-bind": "^1.0.5", + "define-properties": "^1.2.1", + "es-abstract": "^1.22.3", + "es-errors": "^1.1.0", + "es-shim-unscopables": "^1.0.2" } }, "node_modules/arraybuffer.prototype.slice": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.1.tgz", - "integrity": "sha512-09x0ZWFEjj4WD8PDbykUwo3t9arLn8NIzmmYEJFpYekOAQjpkGSyrQhNoRTcwwcFRu+ycWF78QZ63oWTqSjBcw==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.3.tgz", + "integrity": "sha512-bMxMKAjg13EBSVscxTaYA4mRc5t1UAXa2kXiGTNfZ079HIWXEkKmkgFrh/nJqamaLSrXO5H4WFFkPEaLJWbs3A==", "dependencies": { - "array-buffer-byte-length": "^1.0.0", - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "get-intrinsic": "^1.2.1", - "is-array-buffer": "^3.0.2", + "array-buffer-byte-length": "^1.0.1", + "call-bind": "^1.0.5", + "define-properties": "^1.2.1", + "es-abstract": "^1.22.3", + "es-errors": "^1.2.1", + "get-intrinsic": "^1.2.3", + "is-array-buffer": "^3.0.4", "is-shared-array-buffer": "^1.0.2" }, "engines": { @@ -5944,14 +5951,6 @@ "resolved": "https://registry.npmjs.org/async/-/async-3.2.4.tgz", "integrity": "sha512-iAB+JbDEGXhyIUavoDl9WP/Jj106Kz9DEn1DPgYw5ruDn0e3Wgi3sKFm55sASdGBNOQB8F59d9qQ7deqrHA8wQ==" }, - "node_modules/asynciterator.prototype": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/asynciterator.prototype/-/asynciterator.prototype-1.0.0.tgz", - "integrity": "sha512-wwHYEIS0Q80f5mosx3L/dfG5t5rjEa9Ft51GTaNt862EnpyGHpgz2RkZvLPp1oF5TnAiTohkEKVEu8pQPJI7Vg==", - "dependencies": { - "has-symbols": "^1.0.3" - } - }, "node_modules/asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", @@ -5998,9 +5997,12 @@ } }, "node_modules/available-typed-arrays": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz", - "integrity": "sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz", + "integrity": "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==", + "dependencies": { + "possible-typed-array-names": "^1.0.0" + }, "engines": { "node": ">= 0.4" }, @@ -6627,12 +6629,18 @@ } }, "node_modules/call-bind": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", - "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz", + "integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==", "dependencies": { - "function-bind": "^1.1.1", - "get-intrinsic": "^1.0.2" + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "set-function-length": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -6839,9 +6847,9 @@ "integrity": "sha512-sbpkOw6z413p+HDGcBENe498WM9woqWHiJxCq7nvmxe9WmrUmqfAcxpIwAiMtM5Q3AhYkzXcNQHqsWq0mND51g==" }, "node_modules/classnames": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/classnames/-/classnames-2.3.2.tgz", - "integrity": "sha512-CSbhY4cFEJRe6/GQzIk5qXZ4Jeg5pcsP7b5peFSDpffpe1cqjASH/n9UTjBwOp6XpMSTwQ8Za2K5V02ueA7Tmw==" + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/classnames/-/classnames-2.5.1.tgz", + "integrity": "sha512-saHYOzhIQs6wy2sVxTM6bUDsQO4F50V9RQ22qBpEdCW+I+/Wmke2HOl6lS6dTpdxVhb88/I6+Hs+438c3lfUow==" }, "node_modules/clean-css": { "version": "5.3.2", @@ -6868,6 +6876,14 @@ "node": ">=12" } }, + "node_modules/clsx": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.1.tgz", + "integrity": "sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==", + "engines": { + "node": ">=6" + } + }, "node_modules/co": { "version": "4.6.0", "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", @@ -7555,11 +7571,6 @@ "node": ">=8.0.0" } }, - "node_modules/css-unit-converter": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/css-unit-converter/-/css-unit-converter-1.1.2.tgz", - "integrity": "sha512-IiJwMC8rdZE0+xiEZHeru6YoONC4rfPMqGm2W85jMIbkFvv5nFTwJVFHam2eFrN6txmoUYFAFXiv8ICVeTO0MA==" - }, "node_modules/css-what": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/css-what/-/css-what-6.1.0.tgz", @@ -7901,6 +7912,54 @@ "node": ">=12" } }, + "node_modules/data-view-buffer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/data-view-buffer/-/data-view-buffer-1.0.1.tgz", + "integrity": "sha512-0lht7OugA5x3iJLOWFhWK/5ehONdprk0ISXqVFn/NFrDu+cuc8iADFrGQz5BnRK7LLU3JmkbXSxaqX+/mXYtUA==", + "dependencies": { + "call-bind": "^1.0.6", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/data-view-byte-length": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/data-view-byte-length/-/data-view-byte-length-1.0.1.tgz", + "integrity": "sha512-4J7wRJD3ABAzr8wP+OcIcqq2dlUKp4DVflx++hs5h5ZKydWMI6/D/fAot+yh6g2tHh8fLFTvNOaVN357NvSrOQ==", + "dependencies": { + "call-bind": "^1.0.7", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/data-view-byte-offset": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/data-view-byte-offset/-/data-view-byte-offset-1.0.0.tgz", + "integrity": "sha512-t/Ygsytq+R995EJ5PZlD4Cu56sWa8InXySaViRzw9apusqsOO2bQP+SbYzAhR0pFKoB+43lYy8rWban9JSuXnA==", + "dependencies": { + "call-bind": "^1.0.6", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/date-fns": { "version": "2.30.0", "resolved": "https://registry.npmjs.org/date-fns/-/date-fns-2.30.0.tgz", @@ -8052,16 +8111,19 @@ } }, "node_modules/define-data-property": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.1.tgz", - "integrity": "sha512-E7uGkTzkk1d0ByLeSc6ZsFS79Axg+m1P/VsgYsxHgiuc3tFSj+MjMIwe90FC4lOAZzNBdY7kkO2P2wKdsQ1vgQ==", + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", + "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", "dependencies": { - "get-intrinsic": "^1.2.1", - "gopd": "^1.0.1", - "has-property-descriptors": "^1.0.0" + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "gopd": "^1.0.1" }, "engines": { "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/define-lazy-prop": { @@ -8447,49 +8509,56 @@ } }, "node_modules/es-abstract": { - "version": "1.22.1", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.22.1.tgz", - "integrity": "sha512-ioRRcXMO6OFyRpyzV3kE1IIBd4WG5/kltnzdxSCqoP8CMGs/Li+M1uF5o7lOkZVFjDs+NLesthnF66Pg/0q0Lw==", - "dependencies": { - "array-buffer-byte-length": "^1.0.0", - "arraybuffer.prototype.slice": "^1.0.1", - "available-typed-arrays": "^1.0.5", - "call-bind": "^1.0.2", - "es-set-tostringtag": "^2.0.1", + "version": "1.23.3", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.23.3.tgz", + "integrity": "sha512-e+HfNH61Bj1X9/jLc5v1owaLYuHdeHHSQlkhCBiTK8rBvKaULl/beGMxwrMXjpYrv4pz22BlY570vVePA2ho4A==", + "dependencies": { + "array-buffer-byte-length": "^1.0.1", + "arraybuffer.prototype.slice": "^1.0.3", + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.7", + "data-view-buffer": "^1.0.1", + "data-view-byte-length": "^1.0.1", + "data-view-byte-offset": "^1.0.0", + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "es-set-tostringtag": "^2.0.3", "es-to-primitive": "^1.2.1", - "function.prototype.name": "^1.1.5", - "get-intrinsic": "^1.2.1", - "get-symbol-description": "^1.0.0", + "function.prototype.name": "^1.1.6", + "get-intrinsic": "^1.2.4", + "get-symbol-description": "^1.0.2", "globalthis": "^1.0.3", "gopd": "^1.0.1", - "has": "^1.0.3", - "has-property-descriptors": "^1.0.0", - "has-proto": "^1.0.1", + "has-property-descriptors": "^1.0.2", + "has-proto": "^1.0.3", "has-symbols": "^1.0.3", - "internal-slot": "^1.0.5", - "is-array-buffer": "^3.0.2", + "hasown": "^2.0.2", + "internal-slot": "^1.0.7", + "is-array-buffer": "^3.0.4", "is-callable": "^1.2.7", - "is-negative-zero": "^2.0.2", + "is-data-view": "^1.0.1", + "is-negative-zero": "^2.0.3", "is-regex": "^1.1.4", - "is-shared-array-buffer": "^1.0.2", + "is-shared-array-buffer": "^1.0.3", "is-string": "^1.0.7", - "is-typed-array": "^1.1.10", + "is-typed-array": "^1.1.13", "is-weakref": "^1.0.2", - "object-inspect": "^1.12.3", + "object-inspect": "^1.13.1", "object-keys": "^1.1.1", - "object.assign": "^4.1.4", - "regexp.prototype.flags": "^1.5.0", - "safe-array-concat": "^1.0.0", - "safe-regex-test": "^1.0.0", - "string.prototype.trim": "^1.2.7", - "string.prototype.trimend": "^1.0.6", - "string.prototype.trimstart": "^1.0.6", - "typed-array-buffer": "^1.0.0", - "typed-array-byte-length": "^1.0.0", - "typed-array-byte-offset": "^1.0.0", - "typed-array-length": "^1.0.4", + "object.assign": "^4.1.5", + "regexp.prototype.flags": "^1.5.2", + "safe-array-concat": "^1.1.2", + "safe-regex-test": "^1.0.3", + "string.prototype.trim": "^1.2.9", + "string.prototype.trimend": "^1.0.8", + "string.prototype.trimstart": "^1.0.8", + "typed-array-buffer": "^1.0.2", + "typed-array-byte-length": "^1.0.1", + "typed-array-byte-offset": "^1.0.2", + "typed-array-length": "^1.0.6", "unbox-primitive": "^1.0.2", - "which-typed-array": "^1.1.10" + "which-typed-array": "^1.1.15" }, "engines": { "node": ">= 0.4" @@ -8503,6 +8572,25 @@ "resolved": "https://registry.npmjs.org/es-array-method-boxes-properly/-/es-array-method-boxes-properly-1.0.0.tgz", "integrity": "sha512-wd6JXUmyHmt8T5a2xreUwKcGPq6f1f+WwIJkijUqiGcJz1qqnZgP6XIK+QyIWU5lT7imeNxUll48bziG+TSYcA==" }, + "node_modules/es-define-property": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz", + "integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==", + "dependencies": { + "get-intrinsic": "^1.2.4" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "engines": { + "node": ">= 0.4" + } + }, "node_modules/es-get-iterator": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/es-get-iterator/-/es-get-iterator-1.1.3.tgz", @@ -8524,24 +8612,27 @@ } }, "node_modules/es-iterator-helpers": { - "version": "1.0.15", - "resolved": "https://registry.npmjs.org/es-iterator-helpers/-/es-iterator-helpers-1.0.15.tgz", - "integrity": "sha512-GhoY8uYqd6iwUl2kgjTm4CZAf6oo5mHK7BPqx3rKgx893YSsy0LGHV6gfqqQvZt/8xM8xeOnfXBCfqclMKkJ5g==", + "version": "1.0.19", + "resolved": "https://registry.npmjs.org/es-iterator-helpers/-/es-iterator-helpers-1.0.19.tgz", + "integrity": "sha512-zoMwbCcH5hwUkKJkT8kDIBZSz9I6mVG//+lDCinLCGov4+r7NIy0ld8o03M0cJxl2spVf6ESYVS6/gpIfq1FFw==", "dependencies": { - "asynciterator.prototype": "^1.0.0", - "call-bind": "^1.0.2", + "call-bind": "^1.0.7", "define-properties": "^1.2.1", - "es-abstract": "^1.22.1", - "es-set-tostringtag": "^2.0.1", - "function-bind": "^1.1.1", - "get-intrinsic": "^1.2.1", + "es-abstract": "^1.23.3", + "es-errors": "^1.3.0", + "es-set-tostringtag": "^2.0.3", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", "globalthis": "^1.0.3", - "has-property-descriptors": "^1.0.0", - "has-proto": "^1.0.1", + "has-property-descriptors": "^1.0.2", + "has-proto": "^1.0.3", "has-symbols": "^1.0.3", - "internal-slot": "^1.0.5", + "internal-slot": "^1.0.7", "iterator.prototype": "^1.1.2", - "safe-array-concat": "^1.0.1" + "safe-array-concat": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" } }, "node_modules/es-module-lexer": { @@ -8549,25 +8640,36 @@ "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.3.0.tgz", "integrity": "sha512-vZK7T0N2CBmBOixhmjdqx2gWVbFZ4DXZ/NyRMZVlJXPa7CyFS+/a4QQsDGDQy9ZfEzxFuNEsMLeQJnKP2p5/JA==" }, + "node_modules/es-object-atoms": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.0.0.tgz", + "integrity": "sha512-MZ4iQ6JwHOBQjahnjwaC1ZtIBH+2ohjamzAO3oaHcXYup7qxjF2fixyH+Q71voWHeOkI2q/TnJao/KfXYIZWbw==", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/es-set-tostringtag": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.1.tgz", - "integrity": "sha512-g3OMbtlwY3QewlqAiMLI47KywjWZoEytKr8pf6iTC8uJq5bIAH52Z9pnQ8pVL6whrCto53JZDuUIsifGeLorTg==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.3.tgz", + "integrity": "sha512-3T8uNMC3OQTHkFUsFq8r/BwAXLHvU/9O9mE0fBc/MY5iq/8H7ncvO947LmYA6ldWw9Uh8Yhf25zu6n7nML5QWQ==", "dependencies": { - "get-intrinsic": "^1.1.3", - "has": "^1.0.3", - "has-tostringtag": "^1.0.0" + "get-intrinsic": "^1.2.4", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.1" }, "engines": { "node": ">= 0.4" } }, "node_modules/es-shim-unscopables": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz", - "integrity": "sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.0.2.tgz", + "integrity": "sha512-J3yBRXCzDu4ULnQwxyToo/OjdMx6akgVC7K6few0a7F/0wLtmKKN7I73AH5T2836UuXRqN7Qg+IIUw/+YJksRw==", "dependencies": { - "has": "^1.0.3" + "hasown": "^2.0.0" } }, "node_modules/es-to-primitive": { @@ -8928,26 +9030,28 @@ } }, "node_modules/eslint-plugin-react": { - "version": "7.33.2", - "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.33.2.tgz", - "integrity": "sha512-73QQMKALArI8/7xGLNI/3LylrEYrlKZSb5C9+q3OtOewTnMQi5cT+aE9E41sLCmli3I9PGGmD1yiZydyo4FEPw==", + "version": "7.34.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.34.1.tgz", + "integrity": "sha512-N97CxlouPT1AHt8Jn0mhhN2RrADlUAsk1/atcT2KyA/l9Q/E6ll7OIGwNumFmWfZ9skV3XXccYS19h80rHtgkw==", "dependencies": { - "array-includes": "^3.1.6", - "array.prototype.flatmap": "^1.3.1", - "array.prototype.tosorted": "^1.1.1", + "array-includes": "^3.1.7", + "array.prototype.findlast": "^1.2.4", + "array.prototype.flatmap": "^1.3.2", + "array.prototype.toreversed": "^1.1.2", + "array.prototype.tosorted": "^1.1.3", "doctrine": "^2.1.0", - "es-iterator-helpers": "^1.0.12", + "es-iterator-helpers": "^1.0.17", "estraverse": "^5.3.0", "jsx-ast-utils": "^2.4.1 || ^3.0.0", "minimatch": "^3.1.2", - "object.entries": "^1.1.6", - "object.fromentries": "^2.0.6", - "object.hasown": "^1.1.2", - "object.values": "^1.1.6", + "object.entries": "^1.1.7", + "object.fromentries": "^2.0.7", + "object.hasown": "^1.1.3", + "object.values": "^1.1.7", "prop-types": "^15.8.1", - "resolve": "^2.0.0-next.4", + "resolve": "^2.0.0-next.5", "semver": "^6.3.1", - "string.prototype.matchall": "^4.0.8" + "string.prototype.matchall": "^4.0.10" }, "engines": { "node": ">=4" @@ -8979,11 +9083,11 @@ } }, "node_modules/eslint-plugin-react/node_modules/resolve": { - "version": "2.0.0-next.4", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-2.0.0-next.4.tgz", - "integrity": "sha512-iMDbmAWtfU+MHpxt/I5iWI7cY6YVEZUQ3MBgPQ++XD1PELuJHIl82xBmObyP2KyQmkNB2dsqF7seoQQiAn5yDQ==", + "version": "2.0.0-next.5", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-2.0.0-next.5.tgz", + "integrity": "sha512-U7WjGVG9sH8tvjW5SmGbQuui75FiyjAX72HX15DwBBwF9dNiQZRQAg9nnPhYy+TUnE0+VcrttuvNI8oSxZcocA==", "dependencies": { - "is-core-module": "^2.9.0", + "is-core-module": "^2.13.0", "path-parse": "^1.0.7", "supports-preserve-symlinks-flag": "^1.0.0" }, @@ -10168,14 +10272,14 @@ } }, "node_modules/function.prototype.name": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.5.tgz", - "integrity": "sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA==", + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.6.tgz", + "integrity": "sha512-Z5kx79swU5P27WEayXM1tBi5Ze/lbIyiNgU3qyXUOf9b2rgXYyF9Dy9Cx+IQv/Lc8WCG6L82zwUPpSS9hGehIg==", "dependencies": { "call-bind": "^1.0.2", - "define-properties": "^1.1.3", - "es-abstract": "^1.19.0", - "functions-have-names": "^1.2.2" + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1", + "functions-have-names": "^1.2.3" }, "engines": { "node": ">= 0.4" @@ -10209,14 +10313,18 @@ } }, "node_modules/get-intrinsic": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.1.tgz", - "integrity": "sha512-2DcsyfABl+gVHEfCOaTrWgyt+tb6MSEGmKq+kI5HwLbIYgjgmMcV8KQ41uaKz1xxUcn9tJtgFbQUEVcEbd0FYw==", + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz", + "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==", "dependencies": { - "function-bind": "^1.1.1", - "has": "^1.0.3", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", "has-proto": "^1.0.1", - "has-symbols": "^1.0.3" + "has-symbols": "^1.0.3", + "hasown": "^2.0.0" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -10247,12 +10355,13 @@ } }, "node_modules/get-symbol-description": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.0.tgz", - "integrity": "sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.2.tgz", + "integrity": "sha512-g0QYk1dZBxGwk+Ngc+ltRH2IBp2f7zBkBMBJZCDerh6EhlhSR6+9irMCuT/09zD6qkarHUSn529sK/yL4S27mg==", "dependencies": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.1.1" + "call-bind": "^1.0.5", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.4" }, "engines": { "node": ">= 0.4" @@ -10432,17 +10541,6 @@ "resolved": "https://registry.npmjs.org/harmony-reflect/-/harmony-reflect-1.6.2.tgz", "integrity": "sha512-HIp/n38R9kQjDEziXyDTuW3vvoxxyxjxFzXLrBr18uB47GnSt+G9D29fqrpM5ZkspMcPICud3XsBJQ4Y2URg8g==" }, - "node_modules/has": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", - "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", - "dependencies": { - "function-bind": "^1.1.1" - }, - "engines": { - "node": ">= 0.4.0" - } - }, "node_modules/has-bigints": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.2.tgz", @@ -10460,20 +10558,20 @@ } }, "node_modules/has-property-descriptors": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz", - "integrity": "sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", + "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", "dependencies": { - "get-intrinsic": "^1.1.1" + "es-define-property": "^1.0.0" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/has-proto": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.1.tgz", - "integrity": "sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.3.tgz", + "integrity": "sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==", "engines": { "node": ">= 0.4" }, @@ -10493,11 +10591,11 @@ } }, "node_modules/has-tostringtag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz", - "integrity": "sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", "dependencies": { - "has-symbols": "^1.0.2" + "has-symbols": "^1.0.3" }, "engines": { "node": ">= 0.4" @@ -10507,9 +10605,9 @@ } }, "node_modules/hasown": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.0.tgz", - "integrity": "sha512-vUptKVTpIJhcczKBbgnS+RtcuYMB8+oNzPK2/Hp3hanz8JmpATdmmgLgSaadVREkDm+e2giHwY3ZRkyjSIDDFA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", "dependencies": { "function-bind": "^1.1.2" }, @@ -11008,12 +11106,12 @@ } }, "node_modules/internal-slot": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.5.tgz", - "integrity": "sha512-Y+R5hJrzs52QCG2laLn4udYVnxsfny9CpOhNhUvk/SSSVyF6T27FzRbF0sroPidSu3X8oEAkOn2K804mjpt6UQ==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.7.tgz", + "integrity": "sha512-NGnrKwXzSms2qUUih/ILZ5JBqNTSa1+ZmP6flaIp6KmSElgE9qdndzS3cqjrDovwFdmwsGsLdeFgB6suw+1e9g==", "dependencies": { - "get-intrinsic": "^1.2.0", - "has": "^1.0.3", + "es-errors": "^1.3.0", + "hasown": "^2.0.0", "side-channel": "^1.0.4" }, "engines": { @@ -11050,13 +11148,15 @@ } }, "node_modules/is-array-buffer": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.2.tgz", - "integrity": "sha512-y+FyyR/w8vfIRq4eQcM1EYgSTnmHXPqaF+IgzgraytCFq5Xh8lllDVmAZolPJiZttZLeFSINPYMaEJ7/vWUa1w==", + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.4.tgz", + "integrity": "sha512-wcjaerHw0ydZwfhiKbXJWLDY8A7yV7KhjQOpb83hGgGfId/aQa4TOvwyzn2PuswW2gPCYEL/nEAiSVpdOj1lXw==", "dependencies": { "call-bind": "^1.0.2", - "get-intrinsic": "^1.2.0", - "is-typed-array": "^1.1.10" + "get-intrinsic": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -11145,6 +11245,20 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/is-data-view": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-data-view/-/is-data-view-1.0.1.tgz", + "integrity": "sha512-AHkaJrsUVW6wq6JS8y3JnM/GJF/9cf+k20+iDzlSaJrinEo5+7vRiteOSwBhHRiAyQATN1AmY4hwzxJKPmYf+w==", + "dependencies": { + "is-typed-array": "^1.1.13" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/is-date-object": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz", @@ -11252,9 +11366,9 @@ "integrity": "sha512-51ypPSPCoTEIN9dy5Oy+h4pShgJmPCygKfyRCISBI+JoWT/2oJvK8QPxmwv7b/p239jXrm9M1mlQbyKJ5A152g==" }, "node_modules/is-negative-zero": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.2.tgz", - "integrity": "sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.3.tgz", + "integrity": "sha512-5KoIu2Ngpyek75jXodFvnafB6DJgr3u8uuK0LEZJjrU19DrMD3EVERaR8sjz8CCGgpZvxPl9SuE1GMVPFHx1mw==", "engines": { "node": ">= 0.4" }, @@ -11363,11 +11477,14 @@ } }, "node_modules/is-shared-array-buffer": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz", - "integrity": "sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.3.tgz", + "integrity": "sha512-nA2hv5XIhLR3uVzDDfCIknerhx8XUKnstuOERPNNIinXG7v9u+ohXF67vxm4TPTEPU6lm61ZkwP3c9PCB97rhg==", "dependencies": { - "call-bind": "^1.0.2" + "call-bind": "^1.0.7" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -11413,11 +11530,11 @@ } }, "node_modules/is-typed-array": { - "version": "1.1.12", - "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.12.tgz", - "integrity": "sha512-Z14TF2JNG8Lss5/HMqt0//T9JeHXttXy5pH/DBU4vi98ozO2btxzq9MwYDZYnKwU8nRsz/+GVFVRDq3DkVuSPg==", + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.13.tgz", + "integrity": "sha512-uZ25/bUAlUY5fR4OKT4rZQEBrzQWYV9ZJYGGsUmEJ6thodVJ1HX64ePQ6Z0qPWP+m+Uq6e9UugrE38jeYsDSMw==", "dependencies": { - "which-typed-array": "^1.1.11" + "which-typed-array": "^1.1.14" }, "engines": { "node": ">= 0.4" @@ -15685,9 +15802,9 @@ } }, "node_modules/object-inspect": { - "version": "1.12.3", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.3.tgz", - "integrity": "sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g==", + "version": "1.13.1", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.1.tgz", + "integrity": "sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ==", "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -15717,12 +15834,12 @@ } }, "node_modules/object.assign": { - "version": "4.1.4", - "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.4.tgz", - "integrity": "sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ==", + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.5.tgz", + "integrity": "sha512-byy+U7gp+FVwmyzKPYhW2h5l3crpmGsxl7X2s8y43IgxvG4g3QZ6CffDtsNQy1WsmZpQbO+ybo0AlW7TY6DcBQ==", "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", + "call-bind": "^1.0.5", + "define-properties": "^1.2.1", "has-symbols": "^1.0.3", "object-keys": "^1.1.1" }, @@ -15792,12 +15909,16 @@ } }, "node_modules/object.hasown": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/object.hasown/-/object.hasown-1.1.2.tgz", - "integrity": "sha512-B5UIT3J1W+WuWIU55h0mjlwaqxiE5vYENJXIXZ4VFe05pNYrkKuK0U/6aFcb0pKywYJh7IhfoqUfKVmrJJHZHw==", + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/object.hasown/-/object.hasown-1.1.4.tgz", + "integrity": "sha512-FZ9LZt9/RHzGySlBARE3VF+gE26TxR38SdmqOqliuTnl9wrKulaQs+4dee1V+Io8VfxqzAfHu6YuRgUy8OHoTg==", "dependencies": { - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -16189,6 +16310,14 @@ "url": "https://opencollective.com/popperjs" } }, + "node_modules/possible-typed-array-names": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.0.0.tgz", + "integrity": "sha512-d7Uw+eZoloe0EHDIYoe+bQ5WXnGMOpmiZFTuMWCwpjzzkL2nTjcKiAk4hh8TjnGye2TwWOk3UXucZ+3rbmBa8Q==", + "engines": { + "node": ">= 0.4" + } + }, "node_modules/postcss": { "version": "8.4.27", "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.27.tgz", @@ -18066,22 +18195,21 @@ "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==" }, "node_modules/react-joyride": { - "version": "2.7.2", - "resolved": "https://registry.npmjs.org/react-joyride/-/react-joyride-2.7.2.tgz", - "integrity": "sha512-AVzEweJxjQMc6hXUbJlH6St987GCmw0pkCSoz+X3XBMQmrk57FCMOrh1LvyMvW5GaT95C4D5oZpoaVjaOsgptg==", + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/react-joyride/-/react-joyride-2.8.1.tgz", + "integrity": "sha512-fVwCmoOvJsiFKKHn8mvPUYc4JUUkgAsQMvarpZDtFPTc4duj240b12+AB8+3NXlTYGZVnKNSTgFFzoSh9RxjmQ==", "dependencies": { "@gilbarbara/deep-equal": "^0.3.1", - "@gilbarbara/helpers": "^0.9.0", "deep-diff": "^1.0.2", "deepmerge": "^4.3.1", - "is-lite": "^1.2.0", + "is-lite": "^1.2.1", "react-floater": "^0.7.9", "react-innertext": "^1.1.5", "react-is": "^16.13.1", "scroll": "^3.0.1", "scrollparent": "^2.1.0", "tree-changes": "^0.11.2", - "type-fest": "^4.8.3" + "type-fest": "^4.15.0" }, "peerDependencies": { "react": "15 - 18", @@ -18097,9 +18225,9 @@ } }, "node_modules/react-joyride/node_modules/type-fest": { - "version": "4.9.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-4.9.0.tgz", - "integrity": "sha512-KS/6lh/ynPGiHD/LnAobrEFq3Ad4pBzOlJ1wAnJx9N4EYoqFhMfLIBjUT2UEx4wg5ZE+cC1ob6DCSpppVo+rtg==", + "version": "4.18.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-4.18.2.tgz", + "integrity": "sha512-+suCYpfJLAe4OXS6+PPXjW3urOS4IoP9waSiLuXfLgqZODKw/aWwASvzqE886wA0kQgGy0mIWyhd87VpqIy6Xg==", "engines": { "node": ">=16" }, @@ -18229,18 +18357,6 @@ "node": ">=0.10.0" } }, - "node_modules/react-resize-detector": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/react-resize-detector/-/react-resize-detector-8.1.0.tgz", - "integrity": "sha512-S7szxlaIuiy5UqLhLL1KY3aoyGHbZzsTpYal9eYMwCyKqoqoVLCmIgAgNyIM1FhnP2KyBygASJxdhejrzjMb+w==", - "dependencies": { - "lodash": "^4.17.21" - }, - "peerDependencies": { - "react": "^16.0.0 || ^17.0.0 || ^18.0.0", - "react-dom": "^16.0.0 || ^17.0.0 || ^18.0.0" - } - }, "node_modules/react-router": { "version": "6.22.0", "resolved": "https://registry.npmjs.org/react-router/-/react-router-6.22.0.tgz", @@ -19965,40 +20081,17 @@ } }, "node_modules/react-smooth": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/react-smooth/-/react-smooth-2.0.3.tgz", - "integrity": "sha512-yl4y3XiMorss7ayF5QnBiSprig0+qFHui8uh7Hgg46QX5O+aRMRKlfGGNGLHno35JkQSvSYY8eCWkBfHfrSHfg==", - "dependencies": { - "fast-equals": "^5.0.0", - "react-transition-group": "2.9.0" - }, - "peerDependencies": { - "prop-types": "^15.6.0", - "react": "^15.0.0 || ^16.0.0 || ^17.0.0 || ^18.0.0", - "react-dom": "^15.0.0 || ^16.0.0 || ^17.0.0 || ^18.0.0" - } - }, - "node_modules/react-smooth/node_modules/dom-helpers": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/dom-helpers/-/dom-helpers-3.4.0.tgz", - "integrity": "sha512-LnuPJ+dwqKDIyotW1VzmOZ5TONUN7CwkCR5hrgawTUbkBGYdeoNLZo6nNfGkCrjtE1nXXaj7iMMpDa8/d9WoIA==", - "dependencies": { - "@babel/runtime": "^7.1.2" - } - }, - "node_modules/react-smooth/node_modules/react-transition-group": { - "version": "2.9.0", - "resolved": "https://registry.npmjs.org/react-transition-group/-/react-transition-group-2.9.0.tgz", - "integrity": "sha512-+HzNTCHpeQyl4MJ/bdE0u6XRMe9+XG/+aL4mCxVN4DnPBQ0/5bfHWPDuOZUzYdMj94daZaZdCCc1Dzt9R/xSSg==", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/react-smooth/-/react-smooth-4.0.1.tgz", + "integrity": "sha512-OE4hm7XqR0jNOq3Qmk9mFLyd6p2+j6bvbPJ7qlB7+oo0eNcL2l7WQzG6MBnT3EXY6xzkLMUBec3AfewJdA0J8w==", "dependencies": { - "dom-helpers": "^3.4.0", - "loose-envify": "^1.4.0", - "prop-types": "^15.6.2", - "react-lifecycles-compat": "^3.0.4" + "fast-equals": "^5.0.1", + "prop-types": "^15.8.1", + "react-transition-group": "^4.4.5" }, "peerDependencies": { - "react": ">=15.0.0", - "react-dom": ">=15.0.0" + "react": "^16.8.0 || ^17.0.0 || ^18.0.0", + "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0" } }, "node_modules/react-table": { @@ -20246,25 +20339,23 @@ } }, "node_modules/recharts": { - "version": "2.8.0", - "resolved": "https://registry.npmjs.org/recharts/-/recharts-2.8.0.tgz", - "integrity": "sha512-nciXqQDh3aW8abhwUlA4EBOBusRHLNiKHfpRZiG/yjups1x+auHb2zWPuEcTn/IMiN47vVMMuF8Sr+vcQJtsmw==", + "version": "2.12.6", + "resolved": "https://registry.npmjs.org/recharts/-/recharts-2.12.6.tgz", + "integrity": "sha512-D+7j9WI+D0NHauah3fKHuNNcRK8bOypPW7os1DERinogGBGaHI7i6tQKJ0aUF3JXyBZ63dyfKIW2WTOPJDxJ8w==", "dependencies": { - "classnames": "^2.2.5", + "clsx": "^2.0.0", "eventemitter3": "^4.0.1", - "lodash": "^4.17.19", + "lodash": "^4.17.21", "react-is": "^16.10.2", - "react-resize-detector": "^8.0.4", - "react-smooth": "^2.0.2", + "react-smooth": "^4.0.0", "recharts-scale": "^0.4.4", - "reduce-css-calc": "^2.1.8", + "tiny-invariant": "^1.3.1", "victory-vendor": "^36.6.8" }, "engines": { - "node": ">=12" + "node": ">=14" }, "peerDependencies": { - "prop-types": "^15.6.0", "react": "^16.0.0 || ^17.0.0 || ^18.0.0", "react-dom": "^16.0.0 || ^17.0.0 || ^18.0.0" } @@ -20301,20 +20392,6 @@ "node": ">=8" } }, - "node_modules/reduce-css-calc": { - "version": "2.1.8", - "resolved": "https://registry.npmjs.org/reduce-css-calc/-/reduce-css-calc-2.1.8.tgz", - "integrity": "sha512-8liAVezDmUcH+tdzoEGrhfbGcP7nOV4NkGE3a74+qqvE7nt9i4sKLGBuZNOnpI4WiGksiNPklZxva80061QiPg==", - "dependencies": { - "css-unit-converter": "^1.1.1", - "postcss-value-parser": "^3.3.0" - } - }, - "node_modules/reduce-css-calc/node_modules/postcss-value-parser": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz", - "integrity": "sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==" - }, "node_modules/reflect.getprototypeof": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/reflect.getprototypeof/-/reflect.getprototypeof-1.0.4.tgz", @@ -20369,13 +20446,14 @@ "integrity": "sha512-jbD/FT0+9MBU2XAZluI7w2OBs1RBi6p9M83nkoZayQXXU9e8Robt69FcZc7wU4eJD/YFTjn1JdCk3rbMJajz8Q==" }, "node_modules/regexp.prototype.flags": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.0.tgz", - "integrity": "sha512-0SutC3pNudRKgquxGoRGIz946MZVHqbNfPjBdxeOhBrdgDKlRoXmYLQN9xRbrR09ZXWeGAdPuif7egofn6v5LA==", + "version": "1.5.2", + "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.2.tgz", + "integrity": "sha512-NcDiDkTLuPR+++OCKB0nWafEmhg/Da8aUPLPMQbK+bxKKCm1/S5he+AqYa4PlMCVBalb4/yxIRub6qkEx5yJbw==", "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "functions-have-names": "^1.2.3" + "call-bind": "^1.0.6", + "define-properties": "^1.2.1", + "es-errors": "^1.3.0", + "set-function-name": "^2.0.1" }, "engines": { "node": ">= 0.4" @@ -20733,12 +20811,12 @@ } }, "node_modules/safe-array-concat": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.0.1.tgz", - "integrity": "sha512-6XbUAseYE2KtOuGueyeobCySj9L4+66Tn6KQMOPQJrAJEowYKW/YR/MGJZl7FdydUdaFu4LYyDZjxf4/Nmo23Q==", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.1.2.tgz", + "integrity": "sha512-vj6RsCsWBCf19jIeHEfkRMw8DPiBb+DMXklQ/1SGDHOMlHdPUkZXFQ2YdplS23zESTijAcurb1aSgJA3AgMu1Q==", "dependencies": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.2.1", + "call-bind": "^1.0.7", + "get-intrinsic": "^1.2.4", "has-symbols": "^1.0.3", "isarray": "^2.0.5" }, @@ -20769,14 +20847,17 @@ ] }, "node_modules/safe-regex-test": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.0.tgz", - "integrity": "sha512-JBUUzyOgEwXQY1NuPtvcj/qcBDbDmEvWufhlnXZIm75DEHp+afM1r1ujJpJsV/gSM4t59tpDyPi1sd6ZaPFfsA==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.3.tgz", + "integrity": "sha512-CdASjNJPvRa7roO6Ra/gLYBTzYzzPyyBXxIMdGW3USQLyjWEls2RgW5UBTXaQVp+OrpeCK3bLem8smtmheoRuw==", "dependencies": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.1.3", + "call-bind": "^1.0.6", + "es-errors": "^1.3.0", "is-regex": "^1.1.4" }, + "engines": { + "node": ">= 0.4" + }, "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -20792,9 +20873,9 @@ "integrity": "sha512-ZRwKbh/eQ6w9vmTjkuG0Ioi3HBwPFce0O+v//ve+aOq1oeCy7jMV2qzzAlpsNuqpqCBjjriM1lbtZbF/Q8jVyA==" }, "node_modules/sass": { - "version": "1.64.1", - "resolved": "https://registry.npmjs.org/sass/-/sass-1.64.1.tgz", - "integrity": "sha512-16rRACSOFEE8VN7SCgBu1MpYCyN7urj9At898tyzdXFhC+a+yOX5dXwAR7L8/IdPJ1NB8OYoXmD55DM30B2kEQ==", + "version": "1.77.0", + "resolved": "https://registry.npmjs.org/sass/-/sass-1.77.0.tgz", + "integrity": "sha512-eGj4HNfXqBWtSnvItNkn7B6icqH14i3CiCGbzMKs3BAPTq62pp9NBYsBgyN4cA+qssqo9r26lW4JSvlaUUWbgw==", "devOptional": true, "dependencies": { "chokidar": ">=3.0.0 <4.0.0", @@ -21066,14 +21147,31 @@ "node": ">= 0.8.0" } }, + "node_modules/set-function-length": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", + "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==", + "dependencies": { + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/set-function-name": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.1.tgz", - "integrity": "sha512-tMNCiqYVkXIZgc2Hnoy2IvC/f8ezc5koaRFkCjrpWzGpCd3qbZXPzVy9MAZzK1ch/X0jvSkojys3oqJN0qCmdA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.2.tgz", + "integrity": "sha512-7PGFlmtwsEADb0WYyvCMa1t+yke6daIG4Wirafur5kcf+MhUnPms1UeR0CKQdTZD81yESwMHbtn+TR+dMviakQ==", "dependencies": { - "define-data-property": "^1.0.1", + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", "functions-have-names": "^1.2.3", - "has-property-descriptors": "^1.0.0" + "has-property-descriptors": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -21125,13 +21223,17 @@ } }, "node_modules/side-channel": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", - "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz", + "integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==", "dependencies": { - "call-bind": "^1.0.0", - "get-intrinsic": "^1.0.2", - "object-inspect": "^1.9.0" + "call-bind": "^1.0.7", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.4", + "object-inspect": "^1.13.1" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -21487,31 +21589,39 @@ "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" }, "node_modules/string.prototype.matchall": { - "version": "4.0.8", - "resolved": "https://registry.npmjs.org/string.prototype.matchall/-/string.prototype.matchall-4.0.8.tgz", - "integrity": "sha512-6zOCOcJ+RJAQshcTvXPHoxoQGONa3e/Lqx90wUA+wEzX78sg5Bo+1tQo4N0pohS0erG9qtCqJDjNCQBjeWVxyg==", + "version": "4.0.11", + "resolved": "https://registry.npmjs.org/string.prototype.matchall/-/string.prototype.matchall-4.0.11.tgz", + "integrity": "sha512-NUdh0aDavY2og7IbBPenWqR9exH+E26Sv8e0/eTe1tltDGZL+GtBkDAnnyBtmekfK6/Dq3MkcGtzXFEd1LQrtg==", "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4", - "get-intrinsic": "^1.1.3", + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "get-intrinsic": "^1.2.4", + "gopd": "^1.0.1", "has-symbols": "^1.0.3", - "internal-slot": "^1.0.3", - "regexp.prototype.flags": "^1.4.3", - "side-channel": "^1.0.4" + "internal-slot": "^1.0.7", + "regexp.prototype.flags": "^1.5.2", + "set-function-name": "^2.0.2", + "side-channel": "^1.0.6" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/string.prototype.trim": { - "version": "1.2.7", - "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.7.tgz", - "integrity": "sha512-p6TmeT1T3411M8Cgg9wBTMRtY2q9+PNy9EV1i2lIXUN/btt763oIfxwN3RR8VU6wHX8j/1CFy0L+YuThm6bgOg==", + "version": "1.2.9", + "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.9.tgz", + "integrity": "sha512-klHuCNxiMZ8MlsOihJhJEBJAiMVqU3Z2nEXWfWnIqjN0gEFS9J9+IxKozWWtQGcgoa1WUZzLjKPTr4ZHNFTFxw==", "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.0", + "es-object-atoms": "^1.0.0" }, "engines": { "node": ">= 0.4" @@ -21521,26 +21631,29 @@ } }, "node_modules/string.prototype.trimend": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.6.tgz", - "integrity": "sha512-JySq+4mrPf9EsDBEDYMOb/lM7XQLulwg5R/m1r0PXEFqrV0qHvl58sdTilSXtKOflCsK2E8jxf+GKC0T07RWwQ==", + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.8.tgz", + "integrity": "sha512-p73uL5VCHCO2BZZ6krwwQE3kCzM7NKmis8S//xEC6fQonchbum4eP6kR4DLEjQFO3Wnj3Fuo8NM0kOSjVdHjZQ==", "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/string.prototype.trimstart": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.6.tgz", - "integrity": "sha512-omqjMDaY92pbn5HOX7f9IccLA+U1tA9GvtU4JrodiXFfYB7jPzzHpRzpglLAjtUV6bB557zwClJezTqnAiYnQA==", + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.8.tgz", + "integrity": "sha512-UXSH262CSZY1tfu3G3Secr6uGLCFVPMhIqHjlgCUtCCcgihYc/xKs9djMTMUOb2j1mVSeU8EU6NWc/iQKU6Gfg==", "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -22316,6 +22429,11 @@ "resolved": "https://registry.npmjs.org/thunky/-/thunky-1.1.0.tgz", "integrity": "sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA==" }, + "node_modules/tiny-invariant": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/tiny-invariant/-/tiny-invariant-1.3.3.tgz", + "integrity": "sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg==" + }, "node_modules/tiny-warning": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/tiny-warning/-/tiny-warning-1.0.3.tgz", @@ -22540,27 +22658,28 @@ } }, "node_modules/typed-array-buffer": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.0.tgz", - "integrity": "sha512-Y8KTSIglk9OZEr8zywiIHG/kmQ7KWyjseXs1CbSo8vC42w7hg2HgYTxSWwP0+is7bWDc1H+Fo026CpHFwm8tkw==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.2.tgz", + "integrity": "sha512-gEymJYKZtKXzzBzM4jqa9w6Q1Jjm7x2d+sh19AdsD4wqnMPDYyvwpsIc2Q/835kHuo3BEQ7CjelGhfTsoBb2MQ==", "dependencies": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.2.1", - "is-typed-array": "^1.1.10" + "call-bind": "^1.0.7", + "es-errors": "^1.3.0", + "is-typed-array": "^1.1.13" }, "engines": { "node": ">= 0.4" } }, "node_modules/typed-array-byte-length": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.0.tgz", - "integrity": "sha512-Or/+kvLxNpeQ9DtSydonMxCx+9ZXOswtwJn17SNLvhptaXYDJvkFFP5zbfU/uLmvnBJlI4yrnXRxpdWH/M5tNA==", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.1.tgz", + "integrity": "sha512-3iMJ9q0ao7WE9tWcaYKIptkNBuOIcZCCT0d4MRvuuH88fEoEH62IuQe0OtraD3ebQEoTRk8XCBoknUNc1Y67pw==", "dependencies": { - "call-bind": "^1.0.2", + "call-bind": "^1.0.7", "for-each": "^0.3.3", - "has-proto": "^1.0.1", - "is-typed-array": "^1.1.10" + "gopd": "^1.0.1", + "has-proto": "^1.0.3", + "is-typed-array": "^1.1.13" }, "engines": { "node": ">= 0.4" @@ -22570,15 +22689,16 @@ } }, "node_modules/typed-array-byte-offset": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.0.tgz", - "integrity": "sha512-RD97prjEt9EL8YgAgpOkf3O4IF9lhJFr9g0htQkm0rchFp/Vx7LW5Q8fSXXub7BXAODyUQohRMyOc3faCPd0hg==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.2.tgz", + "integrity": "sha512-Ous0vodHa56FviZucS2E63zkgtgrACj7omjwd/8lTEMEPFFyjfixMZ1ZXenpgCFBBt4EC1J2XsyVS2gkG0eTFA==", "dependencies": { - "available-typed-arrays": "^1.0.5", - "call-bind": "^1.0.2", + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.7", "for-each": "^0.3.3", - "has-proto": "^1.0.1", - "is-typed-array": "^1.1.10" + "gopd": "^1.0.1", + "has-proto": "^1.0.3", + "is-typed-array": "^1.1.13" }, "engines": { "node": ">= 0.4" @@ -22588,13 +22708,19 @@ } }, "node_modules/typed-array-length": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.4.tgz", - "integrity": "sha512-KjZypGq+I/H7HI5HlOoGHkWUUGq+Q0TPhQurLbyrVrvnKTBgzLhIJ7j6J/XTQOi0d1RjyZ0wdas8bKs2p0x3Ng==", + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.6.tgz", + "integrity": "sha512-/OxDN6OtAk5KBpGb28T+HZc2M+ADtvRxXrKKbUwtsLgdoxgX13hyy7ek6bFRl5+aBs2yZzB0c4CnQfAtVypW/g==", "dependencies": { - "call-bind": "^1.0.2", + "call-bind": "^1.0.7", "for-each": "^0.3.3", - "is-typed-array": "^1.1.9" + "gopd": "^1.0.1", + "has-proto": "^1.0.3", + "is-typed-array": "^1.1.13", + "possible-typed-array-names": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -23609,15 +23735,15 @@ } }, "node_modules/which-typed-array": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.11.tgz", - "integrity": "sha512-qe9UWWpkeG5yzZ0tNYxDmd7vo58HDBc39mZ0xWWpolAGADdFOzkfamWLDxkOWcvHQKVmdTyQdLD4NOfjLWTKew==", + "version": "1.1.15", + "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.15.tgz", + "integrity": "sha512-oV0jmFtUky6CXfkqehVvBP/LSWJ2sy4vWMioiENyJLePrBO/yKyV9OyJySfAKosh+RYkIl5zJCNZ8/4JncrpdA==", "dependencies": { - "available-typed-arrays": "^1.0.5", - "call-bind": "^1.0.2", + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.7", "for-each": "^0.3.3", "gopd": "^1.0.1", - "has-tostringtag": "^1.0.0" + "has-tostringtag": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -25748,30 +25874,6 @@ "resolved": "https://registry.npmjs.org/@gilbarbara/deep-equal/-/deep-equal-0.3.1.tgz", "integrity": "sha512-I7xWjLs2YSVMc5gGx1Z3ZG1lgFpITPndpi8Ku55GeEIKpACCPQNS/OTqQbxgTCfq0Ncvcc+CrFov96itVh6Qvw==" }, - "@gilbarbara/helpers": { - "version": "0.9.1", - "resolved": "https://registry.npmjs.org/@gilbarbara/helpers/-/helpers-0.9.1.tgz", - "integrity": "sha512-B6q4qruzaurfbpmdGK85SSgnI36pFuJlewTul9hWHUv7u8VGxDwjj8anxSfuPyDZ3ovXF1H6ifCVFHQqRV2+Gg==", - "requires": { - "@gilbarbara/types": "^0.2.2", - "is-lite": "^1.2.0" - } - }, - "@gilbarbara/types": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/@gilbarbara/types/-/types-0.2.2.tgz", - "integrity": "sha512-QuQDBRRcm1Q8AbSac2W1YElurOhprj3Iko/o+P1fJxUWS4rOGKMVli98OXS7uo4z+cKAif6a+L9bcZFSyauQpQ==", - "requires": { - "type-fest": "^4.1.0" - }, - "dependencies": { - "type-fest": { - "version": "4.9.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-4.9.0.tgz", - "integrity": "sha512-KS/6lh/ynPGiHD/LnAobrEFq3Ad4pBzOlJ1wAnJx9N4EYoqFhMfLIBjUT2UEx4wg5ZE+cC1ob6DCSpppVo+rtg==" - } - } - }, "@humanwhocodes/config-array": { "version": "0.11.10", "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.10.tgz", @@ -28218,12 +28320,12 @@ } }, "array-buffer-byte-length": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.0.tgz", - "integrity": "sha512-LPuwb2P+NrQw3XhxGc36+XSvuBPopovXYTR9Ew++Du9Yb/bx5AzBfrIsBoj0EZUifjQU+sHL21sseZ3jerWO/A==", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.1.tgz", + "integrity": "sha512-ahC5W1xgou+KTXix4sAO8Ki12Q+jf4i0+tmk3sC+zgcynshkHxzpXdImBehiUYKKKDwvfFiJl1tZt6ewscS1Mg==", "requires": { - "call-bind": "^1.0.2", - "is-array-buffer": "^3.0.1" + "call-bind": "^1.0.5", + "is-array-buffer": "^3.0.4" } }, "array-flatten": { @@ -28248,6 +28350,19 @@ "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==" }, + "array.prototype.findlast": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/array.prototype.findlast/-/array.prototype.findlast-1.2.5.tgz", + "integrity": "sha512-CVvd6FHg1Z3POpBLxO6E6zr+rSKEQ9L6rZHAaY7lLfhKsWYUBBOuMs0e9o24oopj6H+geRCX0YJ+TJLBK2eHyQ==", + "requires": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "es-shim-unscopables": "^1.0.2" + } + }, "array.prototype.findlastindex": { "version": "1.2.3", "resolved": "https://registry.npmjs.org/array.prototype.findlastindex/-/array.prototype.findlastindex-1.2.3.tgz", @@ -28294,28 +28409,41 @@ "is-string": "^1.0.7" } }, - "array.prototype.tosorted": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/array.prototype.tosorted/-/array.prototype.tosorted-1.1.1.tgz", - "integrity": "sha512-pZYPXPRl2PqWcsUs6LOMn+1f1532nEoPTYowBtqLwAW+W8vSVhkIGnmOX1t/UQjD6YGI0vcD2B1U7ZFGQH9jnQ==", + "array.prototype.toreversed": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/array.prototype.toreversed/-/array.prototype.toreversed-1.1.2.tgz", + "integrity": "sha512-wwDCoT4Ck4Cz7sLtgUmzR5UV3YF5mFHUlbChCzZBQZ+0m2cl/DH3tKgvphv1nKgFsJ48oCSg6p91q2Vm0I/ZMA==", "requires": { "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4", - "es-shim-unscopables": "^1.0.0", - "get-intrinsic": "^1.1.3" + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1", + "es-shim-unscopables": "^1.0.0" + } + }, + "array.prototype.tosorted": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/array.prototype.tosorted/-/array.prototype.tosorted-1.1.3.tgz", + "integrity": "sha512-/DdH4TiTmOKzyQbp/eadcCVexiCb36xJg7HshYOYJnNZFDj33GEv0P7GxsynpShhq4OLYJzbGcBDkLsDt7MnNg==", + "requires": { + "call-bind": "^1.0.5", + "define-properties": "^1.2.1", + "es-abstract": "^1.22.3", + "es-errors": "^1.1.0", + "es-shim-unscopables": "^1.0.2" } }, "arraybuffer.prototype.slice": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.1.tgz", - "integrity": "sha512-09x0ZWFEjj4WD8PDbykUwo3t9arLn8NIzmmYEJFpYekOAQjpkGSyrQhNoRTcwwcFRu+ycWF78QZ63oWTqSjBcw==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.3.tgz", + "integrity": "sha512-bMxMKAjg13EBSVscxTaYA4mRc5t1UAXa2kXiGTNfZ079HIWXEkKmkgFrh/nJqamaLSrXO5H4WFFkPEaLJWbs3A==", "requires": { - "array-buffer-byte-length": "^1.0.0", - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "get-intrinsic": "^1.2.1", - "is-array-buffer": "^3.0.2", + "array-buffer-byte-length": "^1.0.1", + "call-bind": "^1.0.5", + "define-properties": "^1.2.1", + "es-abstract": "^1.22.3", + "es-errors": "^1.2.1", + "get-intrinsic": "^1.2.3", + "is-array-buffer": "^3.0.4", "is-shared-array-buffer": "^1.0.2" } }, @@ -28346,14 +28474,6 @@ "resolved": "https://registry.npmjs.org/async/-/async-3.2.4.tgz", "integrity": "sha512-iAB+JbDEGXhyIUavoDl9WP/Jj106Kz9DEn1DPgYw5ruDn0e3Wgi3sKFm55sASdGBNOQB8F59d9qQ7deqrHA8wQ==" }, - "asynciterator.prototype": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/asynciterator.prototype/-/asynciterator.prototype-1.0.0.tgz", - "integrity": "sha512-wwHYEIS0Q80f5mosx3L/dfG5t5rjEa9Ft51GTaNt862EnpyGHpgz2RkZvLPp1oF5TnAiTohkEKVEu8pQPJI7Vg==", - "requires": { - "has-symbols": "^1.0.3" - } - }, "asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", @@ -28378,9 +28498,12 @@ } }, "available-typed-arrays": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz", - "integrity": "sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw==" + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz", + "integrity": "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==", + "requires": { + "possible-typed-array-names": "^1.0.0" + } }, "axe-core": { "version": "4.7.0", @@ -28845,12 +28968,15 @@ "integrity": "sha512-pMhOfFDPiv9t5jjIXkHosWmkSyQbvsgEVNkz0ERHbuLh2T/7j4Mqqpz523Fe8MVY89KC6Sh/QfS2sM+SjgFDcw==" }, "call-bind": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", - "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz", + "integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==", "requires": { - "function-bind": "^1.1.1", - "get-intrinsic": "^1.0.2" + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "set-function-length": "^1.2.1" } }, "callsites": { @@ -28985,9 +29111,9 @@ "integrity": "sha512-sbpkOw6z413p+HDGcBENe498WM9woqWHiJxCq7nvmxe9WmrUmqfAcxpIwAiMtM5Q3AhYkzXcNQHqsWq0mND51g==" }, "classnames": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/classnames/-/classnames-2.3.2.tgz", - "integrity": "sha512-CSbhY4cFEJRe6/GQzIk5qXZ4Jeg5pcsP7b5peFSDpffpe1cqjASH/n9UTjBwOp6XpMSTwQ8Za2K5V02ueA7Tmw==" + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/classnames/-/classnames-2.5.1.tgz", + "integrity": "sha512-saHYOzhIQs6wy2sVxTM6bUDsQO4F50V9RQ22qBpEdCW+I+/Wmke2HOl6lS6dTpdxVhb88/I6+Hs+438c3lfUow==" }, "clean-css": { "version": "5.3.2", @@ -29008,6 +29134,11 @@ "wrap-ansi": "^7.0.0" } }, + "clsx": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.1.tgz", + "integrity": "sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==" + }, "co": { "version": "4.6.0", "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", @@ -29503,11 +29634,6 @@ "source-map": "^0.6.1" } }, - "css-unit-converter": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/css-unit-converter/-/css-unit-converter-1.1.2.tgz", - "integrity": "sha512-IiJwMC8rdZE0+xiEZHeru6YoONC4rfPMqGm2W85jMIbkFvv5nFTwJVFHam2eFrN6txmoUYFAFXiv8ICVeTO0MA==" - }, "css-what": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/css-what/-/css-what-6.1.0.tgz", @@ -29751,6 +29877,36 @@ "whatwg-url": "^11.0.0" } }, + "data-view-buffer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/data-view-buffer/-/data-view-buffer-1.0.1.tgz", + "integrity": "sha512-0lht7OugA5x3iJLOWFhWK/5ehONdprk0ISXqVFn/NFrDu+cuc8iADFrGQz5BnRK7LLU3JmkbXSxaqX+/mXYtUA==", + "requires": { + "call-bind": "^1.0.6", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.1" + } + }, + "data-view-byte-length": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/data-view-byte-length/-/data-view-byte-length-1.0.1.tgz", + "integrity": "sha512-4J7wRJD3ABAzr8wP+OcIcqq2dlUKp4DVflx++hs5h5ZKydWMI6/D/fAot+yh6g2tHh8fLFTvNOaVN357NvSrOQ==", + "requires": { + "call-bind": "^1.0.7", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.1" + } + }, + "data-view-byte-offset": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/data-view-byte-offset/-/data-view-byte-offset-1.0.0.tgz", + "integrity": "sha512-t/Ygsytq+R995EJ5PZlD4Cu56sWa8InXySaViRzw9apusqsOO2bQP+SbYzAhR0pFKoB+43lYy8rWban9JSuXnA==", + "requires": { + "call-bind": "^1.0.6", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.1" + } + }, "date-fns": { "version": "2.30.0", "resolved": "https://registry.npmjs.org/date-fns/-/date-fns-2.30.0.tgz", @@ -29864,13 +30020,13 @@ } }, "define-data-property": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.1.tgz", - "integrity": "sha512-E7uGkTzkk1d0ByLeSc6ZsFS79Axg+m1P/VsgYsxHgiuc3tFSj+MjMIwe90FC4lOAZzNBdY7kkO2P2wKdsQ1vgQ==", + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", + "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", "requires": { - "get-intrinsic": "^1.2.1", - "gopd": "^1.0.1", - "has-property-descriptors": "^1.0.0" + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "gopd": "^1.0.1" } }, "define-lazy-prop": { @@ -30159,49 +30315,56 @@ } }, "es-abstract": { - "version": "1.22.1", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.22.1.tgz", - "integrity": "sha512-ioRRcXMO6OFyRpyzV3kE1IIBd4WG5/kltnzdxSCqoP8CMGs/Li+M1uF5o7lOkZVFjDs+NLesthnF66Pg/0q0Lw==", - "requires": { - "array-buffer-byte-length": "^1.0.0", - "arraybuffer.prototype.slice": "^1.0.1", - "available-typed-arrays": "^1.0.5", - "call-bind": "^1.0.2", - "es-set-tostringtag": "^2.0.1", + "version": "1.23.3", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.23.3.tgz", + "integrity": "sha512-e+HfNH61Bj1X9/jLc5v1owaLYuHdeHHSQlkhCBiTK8rBvKaULl/beGMxwrMXjpYrv4pz22BlY570vVePA2ho4A==", + "requires": { + "array-buffer-byte-length": "^1.0.1", + "arraybuffer.prototype.slice": "^1.0.3", + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.7", + "data-view-buffer": "^1.0.1", + "data-view-byte-length": "^1.0.1", + "data-view-byte-offset": "^1.0.0", + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "es-set-tostringtag": "^2.0.3", "es-to-primitive": "^1.2.1", - "function.prototype.name": "^1.1.5", - "get-intrinsic": "^1.2.1", - "get-symbol-description": "^1.0.0", + "function.prototype.name": "^1.1.6", + "get-intrinsic": "^1.2.4", + "get-symbol-description": "^1.0.2", "globalthis": "^1.0.3", "gopd": "^1.0.1", - "has": "^1.0.3", - "has-property-descriptors": "^1.0.0", - "has-proto": "^1.0.1", + "has-property-descriptors": "^1.0.2", + "has-proto": "^1.0.3", "has-symbols": "^1.0.3", - "internal-slot": "^1.0.5", - "is-array-buffer": "^3.0.2", + "hasown": "^2.0.2", + "internal-slot": "^1.0.7", + "is-array-buffer": "^3.0.4", "is-callable": "^1.2.7", - "is-negative-zero": "^2.0.2", + "is-data-view": "^1.0.1", + "is-negative-zero": "^2.0.3", "is-regex": "^1.1.4", - "is-shared-array-buffer": "^1.0.2", + "is-shared-array-buffer": "^1.0.3", "is-string": "^1.0.7", - "is-typed-array": "^1.1.10", + "is-typed-array": "^1.1.13", "is-weakref": "^1.0.2", - "object-inspect": "^1.12.3", + "object-inspect": "^1.13.1", "object-keys": "^1.1.1", - "object.assign": "^4.1.4", - "regexp.prototype.flags": "^1.5.0", - "safe-array-concat": "^1.0.0", - "safe-regex-test": "^1.0.0", - "string.prototype.trim": "^1.2.7", - "string.prototype.trimend": "^1.0.6", - "string.prototype.trimstart": "^1.0.6", - "typed-array-buffer": "^1.0.0", - "typed-array-byte-length": "^1.0.0", - "typed-array-byte-offset": "^1.0.0", - "typed-array-length": "^1.0.4", + "object.assign": "^4.1.5", + "regexp.prototype.flags": "^1.5.2", + "safe-array-concat": "^1.1.2", + "safe-regex-test": "^1.0.3", + "string.prototype.trim": "^1.2.9", + "string.prototype.trimend": "^1.0.8", + "string.prototype.trimstart": "^1.0.8", + "typed-array-buffer": "^1.0.2", + "typed-array-byte-length": "^1.0.1", + "typed-array-byte-offset": "^1.0.2", + "typed-array-length": "^1.0.6", "unbox-primitive": "^1.0.2", - "which-typed-array": "^1.1.10" + "which-typed-array": "^1.1.15" } }, "es-array-method-boxes-properly": { @@ -30209,6 +30372,19 @@ "resolved": "https://registry.npmjs.org/es-array-method-boxes-properly/-/es-array-method-boxes-properly-1.0.0.tgz", "integrity": "sha512-wd6JXUmyHmt8T5a2xreUwKcGPq6f1f+WwIJkijUqiGcJz1qqnZgP6XIK+QyIWU5lT7imeNxUll48bziG+TSYcA==" }, + "es-define-property": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz", + "integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==", + "requires": { + "get-intrinsic": "^1.2.4" + } + }, + "es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==" + }, "es-get-iterator": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/es-get-iterator/-/es-get-iterator-1.1.3.tgz", @@ -30227,24 +30403,24 @@ } }, "es-iterator-helpers": { - "version": "1.0.15", - "resolved": "https://registry.npmjs.org/es-iterator-helpers/-/es-iterator-helpers-1.0.15.tgz", - "integrity": "sha512-GhoY8uYqd6iwUl2kgjTm4CZAf6oo5mHK7BPqx3rKgx893YSsy0LGHV6gfqqQvZt/8xM8xeOnfXBCfqclMKkJ5g==", + "version": "1.0.19", + "resolved": "https://registry.npmjs.org/es-iterator-helpers/-/es-iterator-helpers-1.0.19.tgz", + "integrity": "sha512-zoMwbCcH5hwUkKJkT8kDIBZSz9I6mVG//+lDCinLCGov4+r7NIy0ld8o03M0cJxl2spVf6ESYVS6/gpIfq1FFw==", "requires": { - "asynciterator.prototype": "^1.0.0", - "call-bind": "^1.0.2", + "call-bind": "^1.0.7", "define-properties": "^1.2.1", - "es-abstract": "^1.22.1", - "es-set-tostringtag": "^2.0.1", - "function-bind": "^1.1.1", - "get-intrinsic": "^1.2.1", + "es-abstract": "^1.23.3", + "es-errors": "^1.3.0", + "es-set-tostringtag": "^2.0.3", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", "globalthis": "^1.0.3", - "has-property-descriptors": "^1.0.0", - "has-proto": "^1.0.1", + "has-property-descriptors": "^1.0.2", + "has-proto": "^1.0.3", "has-symbols": "^1.0.3", - "internal-slot": "^1.0.5", + "internal-slot": "^1.0.7", "iterator.prototype": "^1.1.2", - "safe-array-concat": "^1.0.1" + "safe-array-concat": "^1.1.2" } }, "es-module-lexer": { @@ -30252,22 +30428,30 @@ "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.3.0.tgz", "integrity": "sha512-vZK7T0N2CBmBOixhmjdqx2gWVbFZ4DXZ/NyRMZVlJXPa7CyFS+/a4QQsDGDQy9ZfEzxFuNEsMLeQJnKP2p5/JA==" }, + "es-object-atoms": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.0.0.tgz", + "integrity": "sha512-MZ4iQ6JwHOBQjahnjwaC1ZtIBH+2ohjamzAO3oaHcXYup7qxjF2fixyH+Q71voWHeOkI2q/TnJao/KfXYIZWbw==", + "requires": { + "es-errors": "^1.3.0" + } + }, "es-set-tostringtag": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.1.tgz", - "integrity": "sha512-g3OMbtlwY3QewlqAiMLI47KywjWZoEytKr8pf6iTC8uJq5bIAH52Z9pnQ8pVL6whrCto53JZDuUIsifGeLorTg==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.3.tgz", + "integrity": "sha512-3T8uNMC3OQTHkFUsFq8r/BwAXLHvU/9O9mE0fBc/MY5iq/8H7ncvO947LmYA6ldWw9Uh8Yhf25zu6n7nML5QWQ==", "requires": { - "get-intrinsic": "^1.1.3", - "has": "^1.0.3", - "has-tostringtag": "^1.0.0" + "get-intrinsic": "^1.2.4", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.1" } }, "es-shim-unscopables": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz", - "integrity": "sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.0.2.tgz", + "integrity": "sha512-J3yBRXCzDu4ULnQwxyToo/OjdMx6akgVC7K6few0a7F/0wLtmKKN7I73AH5T2836UuXRqN7Qg+IIUw/+YJksRw==", "requires": { - "has": "^1.0.3" + "hasown": "^2.0.0" } }, "es-to-primitive": { @@ -30638,26 +30822,28 @@ } }, "eslint-plugin-react": { - "version": "7.33.2", - "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.33.2.tgz", - "integrity": "sha512-73QQMKALArI8/7xGLNI/3LylrEYrlKZSb5C9+q3OtOewTnMQi5cT+aE9E41sLCmli3I9PGGmD1yiZydyo4FEPw==", + "version": "7.34.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.34.1.tgz", + "integrity": "sha512-N97CxlouPT1AHt8Jn0mhhN2RrADlUAsk1/atcT2KyA/l9Q/E6ll7OIGwNumFmWfZ9skV3XXccYS19h80rHtgkw==", "requires": { - "array-includes": "^3.1.6", - "array.prototype.flatmap": "^1.3.1", - "array.prototype.tosorted": "^1.1.1", + "array-includes": "^3.1.7", + "array.prototype.findlast": "^1.2.4", + "array.prototype.flatmap": "^1.3.2", + "array.prototype.toreversed": "^1.1.2", + "array.prototype.tosorted": "^1.1.3", "doctrine": "^2.1.0", - "es-iterator-helpers": "^1.0.12", + "es-iterator-helpers": "^1.0.17", "estraverse": "^5.3.0", "jsx-ast-utils": "^2.4.1 || ^3.0.0", "minimatch": "^3.1.2", - "object.entries": "^1.1.6", - "object.fromentries": "^2.0.6", - "object.hasown": "^1.1.2", - "object.values": "^1.1.6", + "object.entries": "^1.1.7", + "object.fromentries": "^2.0.7", + "object.hasown": "^1.1.3", + "object.values": "^1.1.7", "prop-types": "^15.8.1", - "resolve": "^2.0.0-next.4", + "resolve": "^2.0.0-next.5", "semver": "^6.3.1", - "string.prototype.matchall": "^4.0.8" + "string.prototype.matchall": "^4.0.10" }, "dependencies": { "doctrine": { @@ -30669,11 +30855,11 @@ } }, "resolve": { - "version": "2.0.0-next.4", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-2.0.0-next.4.tgz", - "integrity": "sha512-iMDbmAWtfU+MHpxt/I5iWI7cY6YVEZUQ3MBgPQ++XD1PELuJHIl82xBmObyP2KyQmkNB2dsqF7seoQQiAn5yDQ==", + "version": "2.0.0-next.5", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-2.0.0-next.5.tgz", + "integrity": "sha512-U7WjGVG9sH8tvjW5SmGbQuui75FiyjAX72HX15DwBBwF9dNiQZRQAg9nnPhYy+TUnE0+VcrttuvNI8oSxZcocA==", "requires": { - "is-core-module": "^2.9.0", + "is-core-module": "^2.13.0", "path-parse": "^1.0.7", "supports-preserve-symlinks-flag": "^1.0.0" } @@ -31421,14 +31607,14 @@ "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==" }, "function.prototype.name": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.5.tgz", - "integrity": "sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA==", + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.6.tgz", + "integrity": "sha512-Z5kx79swU5P27WEayXM1tBi5Ze/lbIyiNgU3qyXUOf9b2rgXYyF9Dy9Cx+IQv/Lc8WCG6L82zwUPpSS9hGehIg==", "requires": { "call-bind": "^1.0.2", - "define-properties": "^1.1.3", - "es-abstract": "^1.19.0", - "functions-have-names": "^1.2.2" + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1", + "functions-have-names": "^1.2.3" } }, "functions-have-names": { @@ -31447,14 +31633,15 @@ "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==" }, "get-intrinsic": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.1.tgz", - "integrity": "sha512-2DcsyfABl+gVHEfCOaTrWgyt+tb6MSEGmKq+kI5HwLbIYgjgmMcV8KQ41uaKz1xxUcn9tJtgFbQUEVcEbd0FYw==", + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz", + "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==", "requires": { - "function-bind": "^1.1.1", - "has": "^1.0.3", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", "has-proto": "^1.0.1", - "has-symbols": "^1.0.3" + "has-symbols": "^1.0.3", + "hasown": "^2.0.0" } }, "get-own-enumerable-property-symbols": { @@ -31473,12 +31660,13 @@ "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==" }, "get-symbol-description": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.0.tgz", - "integrity": "sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.2.tgz", + "integrity": "sha512-g0QYk1dZBxGwk+Ngc+ltRH2IBp2f7zBkBMBJZCDerh6EhlhSR6+9irMCuT/09zD6qkarHUSn529sK/yL4S27mg==", "requires": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.1.1" + "call-bind": "^1.0.5", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.4" } }, "glob": { @@ -31609,14 +31797,6 @@ "resolved": "https://registry.npmjs.org/harmony-reflect/-/harmony-reflect-1.6.2.tgz", "integrity": "sha512-HIp/n38R9kQjDEziXyDTuW3vvoxxyxjxFzXLrBr18uB47GnSt+G9D29fqrpM5ZkspMcPICud3XsBJQ4Y2URg8g==" }, - "has": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", - "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", - "requires": { - "function-bind": "^1.1.1" - } - }, "has-bigints": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.2.tgz", @@ -31628,17 +31808,17 @@ "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==" }, "has-property-descriptors": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz", - "integrity": "sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", + "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", "requires": { - "get-intrinsic": "^1.1.1" + "es-define-property": "^1.0.0" } }, "has-proto": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.1.tgz", - "integrity": "sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg==" + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.3.tgz", + "integrity": "sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==" }, "has-symbols": { "version": "1.0.3", @@ -31646,17 +31826,17 @@ "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==" }, "has-tostringtag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz", - "integrity": "sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", "requires": { - "has-symbols": "^1.0.2" + "has-symbols": "^1.0.3" } }, "hasown": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.0.tgz", - "integrity": "sha512-vUptKVTpIJhcczKBbgnS+RtcuYMB8+oNzPK2/Hp3hanz8JmpATdmmgLgSaadVREkDm+e2giHwY3ZRkyjSIDDFA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", "requires": { "function-bind": "^1.1.2" } @@ -32024,12 +32204,12 @@ } }, "internal-slot": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.5.tgz", - "integrity": "sha512-Y+R5hJrzs52QCG2laLn4udYVnxsfny9CpOhNhUvk/SSSVyF6T27FzRbF0sroPidSu3X8oEAkOn2K804mjpt6UQ==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.7.tgz", + "integrity": "sha512-NGnrKwXzSms2qUUih/ILZ5JBqNTSa1+ZmP6flaIp6KmSElgE9qdndzS3cqjrDovwFdmwsGsLdeFgB6suw+1e9g==", "requires": { - "get-intrinsic": "^1.2.0", - "has": "^1.0.3", + "es-errors": "^1.3.0", + "hasown": "^2.0.0", "side-channel": "^1.0.4" } }, @@ -32054,13 +32234,12 @@ } }, "is-array-buffer": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.2.tgz", - "integrity": "sha512-y+FyyR/w8vfIRq4eQcM1EYgSTnmHXPqaF+IgzgraytCFq5Xh8lllDVmAZolPJiZttZLeFSINPYMaEJ7/vWUa1w==", + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.4.tgz", + "integrity": "sha512-wcjaerHw0ydZwfhiKbXJWLDY8A7yV7KhjQOpb83hGgGfId/aQa4TOvwyzn2PuswW2gPCYEL/nEAiSVpdOj1lXw==", "requires": { "call-bind": "^1.0.2", - "get-intrinsic": "^1.2.0", - "is-typed-array": "^1.1.10" + "get-intrinsic": "^1.2.1" } }, "is-arrayish": { @@ -32119,6 +32298,14 @@ "hasown": "^2.0.0" } }, + "is-data-view": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-data-view/-/is-data-view-1.0.1.tgz", + "integrity": "sha512-AHkaJrsUVW6wq6JS8y3JnM/GJF/9cf+k20+iDzlSaJrinEo5+7vRiteOSwBhHRiAyQATN1AmY4hwzxJKPmYf+w==", + "requires": { + "is-typed-array": "^1.1.13" + } + }, "is-date-object": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz", @@ -32187,9 +32374,9 @@ "integrity": "sha512-51ypPSPCoTEIN9dy5Oy+h4pShgJmPCygKfyRCISBI+JoWT/2oJvK8QPxmwv7b/p239jXrm9M1mlQbyKJ5A152g==" }, "is-negative-zero": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.2.tgz", - "integrity": "sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==" + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.3.tgz", + "integrity": "sha512-5KoIu2Ngpyek75jXodFvnafB6DJgr3u8uuK0LEZJjrU19DrMD3EVERaR8sjz8CCGgpZvxPl9SuE1GMVPFHx1mw==" }, "is-number": { "version": "7.0.0", @@ -32256,11 +32443,11 @@ "integrity": "sha512-+2cnTEZeY5z/iXGbLhPrOAaK/Mau5k5eXq9j14CpRTftq0pAJu2MwVRSZhyZWBzx3o6X795Lz6Bpb6R0GKf37g==" }, "is-shared-array-buffer": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz", - "integrity": "sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.3.tgz", + "integrity": "sha512-nA2hv5XIhLR3uVzDDfCIknerhx8XUKnstuOERPNNIinXG7v9u+ohXF67vxm4TPTEPU6lm61ZkwP3c9PCB97rhg==", "requires": { - "call-bind": "^1.0.2" + "call-bind": "^1.0.7" } }, "is-stream": { @@ -32285,11 +32472,11 @@ } }, "is-typed-array": { - "version": "1.1.12", - "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.12.tgz", - "integrity": "sha512-Z14TF2JNG8Lss5/HMqt0//T9JeHXttXy5pH/DBU4vi98ozO2btxzq9MwYDZYnKwU8nRsz/+GVFVRDq3DkVuSPg==", + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.13.tgz", + "integrity": "sha512-uZ25/bUAlUY5fR4OKT4rZQEBrzQWYV9ZJYGGsUmEJ6thodVJ1HX64ePQ6Z0qPWP+m+Uq6e9UugrE38jeYsDSMw==", "requires": { - "which-typed-array": "^1.1.11" + "which-typed-array": "^1.1.14" } }, "is-typedarray": { @@ -35412,9 +35599,9 @@ "integrity": "sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw==" }, "object-inspect": { - "version": "1.12.3", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.3.tgz", - "integrity": "sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g==" + "version": "1.13.1", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.1.tgz", + "integrity": "sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ==" }, "object-is": { "version": "1.1.5", @@ -35432,12 +35619,12 @@ "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==" }, "object.assign": { - "version": "4.1.4", - "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.4.tgz", - "integrity": "sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ==", + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.5.tgz", + "integrity": "sha512-byy+U7gp+FVwmyzKPYhW2h5l3crpmGsxl7X2s8y43IgxvG4g3QZ6CffDtsNQy1WsmZpQbO+ybo0AlW7TY6DcBQ==", "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", + "call-bind": "^1.0.5", + "define-properties": "^1.2.1", "has-symbols": "^1.0.3", "object-keys": "^1.1.1" } @@ -35486,12 +35673,13 @@ } }, "object.hasown": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/object.hasown/-/object.hasown-1.1.2.tgz", - "integrity": "sha512-B5UIT3J1W+WuWIU55h0mjlwaqxiE5vYENJXIXZ4VFe05pNYrkKuK0U/6aFcb0pKywYJh7IhfoqUfKVmrJJHZHw==", + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/object.hasown/-/object.hasown-1.1.4.tgz", + "integrity": "sha512-FZ9LZt9/RHzGySlBARE3VF+gE26TxR38SdmqOqliuTnl9wrKulaQs+4dee1V+Io8VfxqzAfHu6YuRgUy8OHoTg==", "requires": { - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-object-atoms": "^1.0.0" } }, "object.values": { @@ -35768,6 +35956,11 @@ "resolved": "https://registry.npmjs.org/popper.js/-/popper.js-1.16.1.tgz", "integrity": "sha512-Wb4p1J4zyFTbM+u6WuO4XstYx4Ky9Cewe4DWrel7B0w6VVICvPwdOpotjzcf6eD8TsckVnIMNONQyPIUFOUbCQ==" }, + "possible-typed-array-names": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.0.0.tgz", + "integrity": "sha512-d7Uw+eZoloe0EHDIYoe+bQ5WXnGMOpmiZFTuMWCwpjzzkL2nTjcKiAk4hh8TjnGye2TwWOk3UXucZ+3rbmBa8Q==" + }, "postcss": { "version": "8.4.27", "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.27.tgz", @@ -36935,22 +37128,21 @@ "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==" }, "react-joyride": { - "version": "2.7.2", - "resolved": "https://registry.npmjs.org/react-joyride/-/react-joyride-2.7.2.tgz", - "integrity": "sha512-AVzEweJxjQMc6hXUbJlH6St987GCmw0pkCSoz+X3XBMQmrk57FCMOrh1LvyMvW5GaT95C4D5oZpoaVjaOsgptg==", + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/react-joyride/-/react-joyride-2.8.1.tgz", + "integrity": "sha512-fVwCmoOvJsiFKKHn8mvPUYc4JUUkgAsQMvarpZDtFPTc4duj240b12+AB8+3NXlTYGZVnKNSTgFFzoSh9RxjmQ==", "requires": { "@gilbarbara/deep-equal": "^0.3.1", - "@gilbarbara/helpers": "^0.9.0", "deep-diff": "^1.0.2", "deepmerge": "^4.3.1", - "is-lite": "^1.2.0", + "is-lite": "^1.2.1", "react-floater": "^0.7.9", "react-innertext": "^1.1.5", "react-is": "^16.13.1", "scroll": "^3.0.1", "scrollparent": "^2.1.0", "tree-changes": "^0.11.2", - "type-fest": "^4.8.3" + "type-fest": "^4.15.0" }, "dependencies": { "deepmerge": { @@ -36959,9 +37151,9 @@ "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==" }, "type-fest": { - "version": "4.9.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-4.9.0.tgz", - "integrity": "sha512-KS/6lh/ynPGiHD/LnAobrEFq3Ad4pBzOlJ1wAnJx9N4EYoqFhMfLIBjUT2UEx4wg5ZE+cC1ob6DCSpppVo+rtg==" + "version": "4.18.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-4.18.2.tgz", + "integrity": "sha512-+suCYpfJLAe4OXS6+PPXjW3urOS4IoP9waSiLuXfLgqZODKw/aWwASvzqE886wA0kQgGy0mIWyhd87VpqIy6Xg==" } } }, @@ -37065,14 +37257,6 @@ "resolved": "https://registry.npmjs.org/react-refresh/-/react-refresh-0.11.0.tgz", "integrity": "sha512-F27qZr8uUqwhWZboondsPx8tnC3Ct3SxZA3V5WyEvujRyyNv0VYPhoBg1gZ8/MV5tubQp76Trw8lTv9hzRBa+A==" }, - "react-resize-detector": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/react-resize-detector/-/react-resize-detector-8.1.0.tgz", - "integrity": "sha512-S7szxlaIuiy5UqLhLL1KY3aoyGHbZzsTpYal9eYMwCyKqoqoVLCmIgAgNyIM1FhnP2KyBygASJxdhejrzjMb+w==", - "requires": { - "lodash": "^4.17.21" - } - }, "react-router": { "version": "6.22.0", "resolved": "https://registry.npmjs.org/react-router/-/react-router-6.22.0.tgz", @@ -38378,33 +38562,13 @@ } }, "react-smooth": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/react-smooth/-/react-smooth-2.0.3.tgz", - "integrity": "sha512-yl4y3XiMorss7ayF5QnBiSprig0+qFHui8uh7Hgg46QX5O+aRMRKlfGGNGLHno35JkQSvSYY8eCWkBfHfrSHfg==", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/react-smooth/-/react-smooth-4.0.1.tgz", + "integrity": "sha512-OE4hm7XqR0jNOq3Qmk9mFLyd6p2+j6bvbPJ7qlB7+oo0eNcL2l7WQzG6MBnT3EXY6xzkLMUBec3AfewJdA0J8w==", "requires": { - "fast-equals": "^5.0.0", - "react-transition-group": "2.9.0" - }, - "dependencies": { - "dom-helpers": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/dom-helpers/-/dom-helpers-3.4.0.tgz", - "integrity": "sha512-LnuPJ+dwqKDIyotW1VzmOZ5TONUN7CwkCR5hrgawTUbkBGYdeoNLZo6nNfGkCrjtE1nXXaj7iMMpDa8/d9WoIA==", - "requires": { - "@babel/runtime": "^7.1.2" - } - }, - "react-transition-group": { - "version": "2.9.0", - "resolved": "https://registry.npmjs.org/react-transition-group/-/react-transition-group-2.9.0.tgz", - "integrity": "sha512-+HzNTCHpeQyl4MJ/bdE0u6XRMe9+XG/+aL4mCxVN4DnPBQ0/5bfHWPDuOZUzYdMj94daZaZdCCc1Dzt9R/xSSg==", - "requires": { - "dom-helpers": "^3.4.0", - "loose-envify": "^1.4.0", - "prop-types": "^15.6.2", - "react-lifecycles-compat": "^3.0.4" - } - } + "fast-equals": "^5.0.1", + "prop-types": "^15.8.1", + "react-transition-group": "^4.4.5" } }, "react-table": { @@ -38595,18 +38759,17 @@ } }, "recharts": { - "version": "2.8.0", - "resolved": "https://registry.npmjs.org/recharts/-/recharts-2.8.0.tgz", - "integrity": "sha512-nciXqQDh3aW8abhwUlA4EBOBusRHLNiKHfpRZiG/yjups1x+auHb2zWPuEcTn/IMiN47vVMMuF8Sr+vcQJtsmw==", + "version": "2.12.6", + "resolved": "https://registry.npmjs.org/recharts/-/recharts-2.12.6.tgz", + "integrity": "sha512-D+7j9WI+D0NHauah3fKHuNNcRK8bOypPW7os1DERinogGBGaHI7i6tQKJ0aUF3JXyBZ63dyfKIW2WTOPJDxJ8w==", "requires": { - "classnames": "^2.2.5", + "clsx": "^2.0.0", "eventemitter3": "^4.0.1", - "lodash": "^4.17.19", + "lodash": "^4.17.21", "react-is": "^16.10.2", - "react-resize-detector": "^8.0.4", - "react-smooth": "^2.0.2", + "react-smooth": "^4.0.0", "recharts-scale": "^0.4.4", - "reduce-css-calc": "^2.1.8", + "tiny-invariant": "^1.3.1", "victory-vendor": "^36.6.8" } }, @@ -38636,22 +38799,6 @@ "strip-indent": "^3.0.0" } }, - "reduce-css-calc": { - "version": "2.1.8", - "resolved": "https://registry.npmjs.org/reduce-css-calc/-/reduce-css-calc-2.1.8.tgz", - "integrity": "sha512-8liAVezDmUcH+tdzoEGrhfbGcP7nOV4NkGE3a74+qqvE7nt9i4sKLGBuZNOnpI4WiGksiNPklZxva80061QiPg==", - "requires": { - "css-unit-converter": "^1.1.1", - "postcss-value-parser": "^3.3.0" - }, - "dependencies": { - "postcss-value-parser": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz", - "integrity": "sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==" - } - } - }, "reflect.getprototypeof": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/reflect.getprototypeof/-/reflect.getprototypeof-1.0.4.tgz", @@ -38697,13 +38844,14 @@ "integrity": "sha512-jbD/FT0+9MBU2XAZluI7w2OBs1RBi6p9M83nkoZayQXXU9e8Robt69FcZc7wU4eJD/YFTjn1JdCk3rbMJajz8Q==" }, "regexp.prototype.flags": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.0.tgz", - "integrity": "sha512-0SutC3pNudRKgquxGoRGIz946MZVHqbNfPjBdxeOhBrdgDKlRoXmYLQN9xRbrR09ZXWeGAdPuif7egofn6v5LA==", + "version": "1.5.2", + "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.2.tgz", + "integrity": "sha512-NcDiDkTLuPR+++OCKB0nWafEmhg/Da8aUPLPMQbK+bxKKCm1/S5he+AqYa4PlMCVBalb4/yxIRub6qkEx5yJbw==", "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "functions-have-names": "^1.2.3" + "call-bind": "^1.0.6", + "define-properties": "^1.2.1", + "es-errors": "^1.3.0", + "set-function-name": "^2.0.1" } }, "regexpu-core": { @@ -38949,12 +39097,12 @@ } }, "safe-array-concat": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.0.1.tgz", - "integrity": "sha512-6XbUAseYE2KtOuGueyeobCySj9L4+66Tn6KQMOPQJrAJEowYKW/YR/MGJZl7FdydUdaFu4LYyDZjxf4/Nmo23Q==", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.1.2.tgz", + "integrity": "sha512-vj6RsCsWBCf19jIeHEfkRMw8DPiBb+DMXklQ/1SGDHOMlHdPUkZXFQ2YdplS23zESTijAcurb1aSgJA3AgMu1Q==", "requires": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.2.1", + "call-bind": "^1.0.7", + "get-intrinsic": "^1.2.4", "has-symbols": "^1.0.3", "isarray": "^2.0.5" } @@ -38965,12 +39113,12 @@ "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==" }, "safe-regex-test": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.0.tgz", - "integrity": "sha512-JBUUzyOgEwXQY1NuPtvcj/qcBDbDmEvWufhlnXZIm75DEHp+afM1r1ujJpJsV/gSM4t59tpDyPi1sd6ZaPFfsA==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.3.tgz", + "integrity": "sha512-CdASjNJPvRa7roO6Ra/gLYBTzYzzPyyBXxIMdGW3USQLyjWEls2RgW5UBTXaQVp+OrpeCK3bLem8smtmheoRuw==", "requires": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.1.3", + "call-bind": "^1.0.6", + "es-errors": "^1.3.0", "is-regex": "^1.1.4" } }, @@ -38985,9 +39133,9 @@ "integrity": "sha512-ZRwKbh/eQ6w9vmTjkuG0Ioi3HBwPFce0O+v//ve+aOq1oeCy7jMV2qzzAlpsNuqpqCBjjriM1lbtZbF/Q8jVyA==" }, "sass": { - "version": "1.64.1", - "resolved": "https://registry.npmjs.org/sass/-/sass-1.64.1.tgz", - "integrity": "sha512-16rRACSOFEE8VN7SCgBu1MpYCyN7urj9At898tyzdXFhC+a+yOX5dXwAR7L8/IdPJ1NB8OYoXmD55DM30B2kEQ==", + "version": "1.77.0", + "resolved": "https://registry.npmjs.org/sass/-/sass-1.77.0.tgz", + "integrity": "sha512-eGj4HNfXqBWtSnvItNkn7B6icqH14i3CiCGbzMKs3BAPTq62pp9NBYsBgyN4cA+qssqo9r26lW4JSvlaUUWbgw==", "devOptional": true, "requires": { "chokidar": ">=3.0.0 <4.0.0", @@ -39191,14 +39339,28 @@ "send": "0.18.0" } }, + "set-function-length": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", + "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==", + "requires": { + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.2" + } + }, "set-function-name": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.1.tgz", - "integrity": "sha512-tMNCiqYVkXIZgc2Hnoy2IvC/f8ezc5koaRFkCjrpWzGpCd3qbZXPzVy9MAZzK1ch/X0jvSkojys3oqJN0qCmdA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.2.tgz", + "integrity": "sha512-7PGFlmtwsEADb0WYyvCMa1t+yke6daIG4Wirafur5kcf+MhUnPms1UeR0CKQdTZD81yESwMHbtn+TR+dMviakQ==", "requires": { - "define-data-property": "^1.0.1", + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", "functions-have-names": "^1.2.3", - "has-property-descriptors": "^1.0.0" + "has-property-descriptors": "^1.0.2" } }, "set-harmonic-interval": { @@ -39235,13 +39397,14 @@ "integrity": "sha512-6j1W9l1iAs/4xYBI1SYOVZyFcCis9b4KCLQ8fgAGG07QvzaRLVVRQvAy85yNmmZSjYjg4MWh4gNvlPujU/5LpA==" }, "side-channel": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", - "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz", + "integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==", "requires": { - "call-bind": "^1.0.0", - "get-intrinsic": "^1.0.2", - "object-inspect": "^1.9.0" + "call-bind": "^1.0.7", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.4", + "object-inspect": "^1.13.1" } }, "signal-exit": { @@ -39540,48 +39703,53 @@ } }, "string.prototype.matchall": { - "version": "4.0.8", - "resolved": "https://registry.npmjs.org/string.prototype.matchall/-/string.prototype.matchall-4.0.8.tgz", - "integrity": "sha512-6zOCOcJ+RJAQshcTvXPHoxoQGONa3e/Lqx90wUA+wEzX78sg5Bo+1tQo4N0pohS0erG9qtCqJDjNCQBjeWVxyg==", + "version": "4.0.11", + "resolved": "https://registry.npmjs.org/string.prototype.matchall/-/string.prototype.matchall-4.0.11.tgz", + "integrity": "sha512-NUdh0aDavY2og7IbBPenWqR9exH+E26Sv8e0/eTe1tltDGZL+GtBkDAnnyBtmekfK6/Dq3MkcGtzXFEd1LQrtg==", "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4", - "get-intrinsic": "^1.1.3", + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "get-intrinsic": "^1.2.4", + "gopd": "^1.0.1", "has-symbols": "^1.0.3", - "internal-slot": "^1.0.3", - "regexp.prototype.flags": "^1.4.3", - "side-channel": "^1.0.4" + "internal-slot": "^1.0.7", + "regexp.prototype.flags": "^1.5.2", + "set-function-name": "^2.0.2", + "side-channel": "^1.0.6" } }, "string.prototype.trim": { - "version": "1.2.7", - "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.7.tgz", - "integrity": "sha512-p6TmeT1T3411M8Cgg9wBTMRtY2q9+PNy9EV1i2lIXUN/btt763oIfxwN3RR8VU6wHX8j/1CFy0L+YuThm6bgOg==", + "version": "1.2.9", + "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.9.tgz", + "integrity": "sha512-klHuCNxiMZ8MlsOihJhJEBJAiMVqU3Z2nEXWfWnIqjN0gEFS9J9+IxKozWWtQGcgoa1WUZzLjKPTr4ZHNFTFxw==", "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.0", + "es-object-atoms": "^1.0.0" } }, "string.prototype.trimend": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.6.tgz", - "integrity": "sha512-JySq+4mrPf9EsDBEDYMOb/lM7XQLulwg5R/m1r0PXEFqrV0qHvl58sdTilSXtKOflCsK2E8jxf+GKC0T07RWwQ==", + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.8.tgz", + "integrity": "sha512-p73uL5VCHCO2BZZ6krwwQE3kCzM7NKmis8S//xEC6fQonchbum4eP6kR4DLEjQFO3Wnj3Fuo8NM0kOSjVdHjZQ==", "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" } }, "string.prototype.trimstart": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.6.tgz", - "integrity": "sha512-omqjMDaY92pbn5HOX7f9IccLA+U1tA9GvtU4JrodiXFfYB7jPzzHpRzpglLAjtUV6bB557zwClJezTqnAiYnQA==", + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.8.tgz", + "integrity": "sha512-UXSH262CSZY1tfu3G3Secr6uGLCFVPMhIqHjlgCUtCCcgihYc/xKs9djMTMUOb2j1mVSeU8EU6NWc/iQKU6Gfg==", "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" } }, "stringify-object": { @@ -40161,6 +40329,11 @@ "resolved": "https://registry.npmjs.org/thunky/-/thunky-1.1.0.tgz", "integrity": "sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA==" }, + "tiny-invariant": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/tiny-invariant/-/tiny-invariant-1.3.3.tgz", + "integrity": "sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg==" + }, "tiny-warning": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/tiny-warning/-/tiny-warning-1.0.3.tgz", @@ -40335,46 +40508,51 @@ } }, "typed-array-buffer": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.0.tgz", - "integrity": "sha512-Y8KTSIglk9OZEr8zywiIHG/kmQ7KWyjseXs1CbSo8vC42w7hg2HgYTxSWwP0+is7bWDc1H+Fo026CpHFwm8tkw==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.2.tgz", + "integrity": "sha512-gEymJYKZtKXzzBzM4jqa9w6Q1Jjm7x2d+sh19AdsD4wqnMPDYyvwpsIc2Q/835kHuo3BEQ7CjelGhfTsoBb2MQ==", "requires": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.2.1", - "is-typed-array": "^1.1.10" + "call-bind": "^1.0.7", + "es-errors": "^1.3.0", + "is-typed-array": "^1.1.13" } }, "typed-array-byte-length": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.0.tgz", - "integrity": "sha512-Or/+kvLxNpeQ9DtSydonMxCx+9ZXOswtwJn17SNLvhptaXYDJvkFFP5zbfU/uLmvnBJlI4yrnXRxpdWH/M5tNA==", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.1.tgz", + "integrity": "sha512-3iMJ9q0ao7WE9tWcaYKIptkNBuOIcZCCT0d4MRvuuH88fEoEH62IuQe0OtraD3ebQEoTRk8XCBoknUNc1Y67pw==", "requires": { - "call-bind": "^1.0.2", + "call-bind": "^1.0.7", "for-each": "^0.3.3", - "has-proto": "^1.0.1", - "is-typed-array": "^1.1.10" + "gopd": "^1.0.1", + "has-proto": "^1.0.3", + "is-typed-array": "^1.1.13" } }, "typed-array-byte-offset": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.0.tgz", - "integrity": "sha512-RD97prjEt9EL8YgAgpOkf3O4IF9lhJFr9g0htQkm0rchFp/Vx7LW5Q8fSXXub7BXAODyUQohRMyOc3faCPd0hg==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.2.tgz", + "integrity": "sha512-Ous0vodHa56FviZucS2E63zkgtgrACj7omjwd/8lTEMEPFFyjfixMZ1ZXenpgCFBBt4EC1J2XsyVS2gkG0eTFA==", "requires": { - "available-typed-arrays": "^1.0.5", - "call-bind": "^1.0.2", + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.7", "for-each": "^0.3.3", - "has-proto": "^1.0.1", - "is-typed-array": "^1.1.10" + "gopd": "^1.0.1", + "has-proto": "^1.0.3", + "is-typed-array": "^1.1.13" } }, "typed-array-length": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.4.tgz", - "integrity": "sha512-KjZypGq+I/H7HI5HlOoGHkWUUGq+Q0TPhQurLbyrVrvnKTBgzLhIJ7j6J/XTQOi0d1RjyZ0wdas8bKs2p0x3Ng==", + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.6.tgz", + "integrity": "sha512-/OxDN6OtAk5KBpGb28T+HZc2M+ADtvRxXrKKbUwtsLgdoxgX13hyy7ek6bFRl5+aBs2yZzB0c4CnQfAtVypW/g==", "requires": { - "call-bind": "^1.0.2", + "call-bind": "^1.0.7", "for-each": "^0.3.3", - "is-typed-array": "^1.1.9" + "gopd": "^1.0.1", + "has-proto": "^1.0.3", + "is-typed-array": "^1.1.13", + "possible-typed-array-names": "^1.0.0" } }, "typedarray-to-buffer": { @@ -41079,15 +41257,15 @@ } }, "which-typed-array": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.11.tgz", - "integrity": "sha512-qe9UWWpkeG5yzZ0tNYxDmd7vo58HDBc39mZ0xWWpolAGADdFOzkfamWLDxkOWcvHQKVmdTyQdLD4NOfjLWTKew==", + "version": "1.1.15", + "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.15.tgz", + "integrity": "sha512-oV0jmFtUky6CXfkqehVvBP/LSWJ2sy4vWMioiENyJLePrBO/yKyV9OyJySfAKosh+RYkIl5zJCNZ8/4JncrpdA==", "requires": { - "available-typed-arrays": "^1.0.5", - "call-bind": "^1.0.2", + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.7", "for-each": "^0.3.3", "gopd": "^1.0.1", - "has-tostringtag": "^1.0.0" + "has-tostringtag": "^1.0.2" } }, "workbox-background-sync": { diff --git a/frontend/package.json b/frontend/package.json index 21b0399ed8..f48965fbdd 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -1,6 +1,6 @@ { "name": "intelowl", - "version": "5.0.0", + "version": "6.0.0", "private": true, "proxy": "http://localhost:80/", "dependencies": { @@ -9,7 +9,7 @@ "axios": "^1.6.0", "axios-hooks": "^3.1.5", "bootstrap": "^5.3.2", - "classnames": "^2.3.1", + "classnames": "^2.5.1", "flag-icons": "^7.1.0", "formik": "^2.4.5", "js-cookie": "^3.0.5", @@ -20,7 +20,7 @@ "react-error-boundary": "^4.0.11", "react-google-recaptcha": "^2.1.0", "react-icons": "^4.12.0", - "react-joyride": "^2.7.2", + "react-joyride": "^2.8.1", "react-json-tree": "^0.18.0", "react-markdown": "^8.0.7", "react-router-dom": "^6.22.0", @@ -30,7 +30,7 @@ "react-use": "^17.5.0", "reactflow": "^11.10.4", "reactstrap": "^9.2.1", - "recharts": "^2.8.0", + "recharts": "^2.12.6", "zustand": "^4.5.2" }, "scripts": { @@ -72,12 +72,12 @@ "eslint-config-prettier": "^9.1.0", "eslint-plugin-import": "^2.29.1", "eslint-plugin-jsx-a11y": "^6.8.0", - "eslint-plugin-react": "^7.33.2", + "eslint-plugin-react": "^7.34.1", "eslint-plugin-react-hooks": "^4.5.0", "jest": "^29.7.0", "jest-environment-jsdom": "^29.7.0", "prettier": "^3.2.5", - "sass": "^1.61.0", + "sass": "^1.77.0", "stylelint": "^14.9.1", "stylelint-config-prettier": "^9.0.3", "stylelint-config-standard-scss": "^4.0.0" diff --git a/frontend/src/components/GuideWrapper.jsx b/frontend/src/components/GuideWrapper.jsx index 891842e424..32ccd1ce27 100644 --- a/frontend/src/components/GuideWrapper.jsx +++ b/frontend/src/components/GuideWrapper.jsx @@ -103,13 +103,29 @@ export default function GuideWrapper() { disableBeacon: true, }, { - target: "#jobsHistory", + target: "#Jobs", content: (
-

Job History

+

Jobs History

- Here you could see the list of all previous jobs and expand over the - details through clicking that particular job from the table + Jobs are simple analysis of an observable or a file. Here you could + see the list of all previous jobs and expand over the details + through clicking that particular job from the table +

+
+ ), + disableBeacon: true, + }, + { + target: "#Investigations", + content: ( +
+

Investigations History

+

+ Investigations are a framework to connect jobs with each other. Here + you could see the list of all previous investigations and expand + over the details through clicking that particular investigation from + the table

), @@ -208,7 +224,7 @@ export default function GuideWrapper() { if (type === "step:after") { if (action === "close") { setGuideState({ run: true, stepIndex: 7 }); - navigate("/jobs"); + navigate("/history/jobs"); } else { setGuideState({ run: false, stepIndex: 0 }); navigate("/"); @@ -219,7 +235,6 @@ export default function GuideWrapper() { if (type === "step:after") { if (action === "close") { setGuideState({ run: true, stepIndex: 8 }); - navigate("/dashboard"); } else { setGuideState({ run: false, stepIndex: 0 }); navigate("/"); @@ -230,6 +245,7 @@ export default function GuideWrapper() { if (type === "step:after") { if (action === "close") { setGuideState({ run: true, stepIndex: 9 }); + navigate("/dashboard"); } else { setGuideState({ run: false, stepIndex: 0 }); navigate("/"); @@ -240,6 +256,16 @@ export default function GuideWrapper() { if (type === "step:after") { if (action === "close") { setGuideState({ run: true, stepIndex: 10 }); + } else { + setGuideState({ run: false, stepIndex: 0 }); + navigate("/"); + } + } + break; + case 10: + if (type === "step:after") { + if (action === "close") { + setGuideState({ run: true, stepIndex: 11 }); navigate("/"); } else { setGuideState({ run: false, stepIndex: 0 }); diff --git a/frontend/src/components/History.jsx b/frontend/src/components/History.jsx index d717850b0c..a39897617c 100644 --- a/frontend/src/components/History.jsx +++ b/frontend/src/components/History.jsx @@ -6,6 +6,7 @@ import { Button, Col } from "reactstrap"; import { RouterTabs, FallBackLoading } from "@certego/certego-ui"; import { useNavigate, useLocation } from "react-router-dom"; +import { useGuideContext } from "../contexts/GuideContext"; import { createInvestigation } from "./investigations/result/investigationApi"; const JobsTable = React.lazy(() => import("./jobs/table/JobsTable")); @@ -54,6 +55,17 @@ export default function History() { const location = useLocation(); const isJobsTablePage = location?.pathname.includes("jobs"); + const { guideState, setGuideState } = useGuideContext(); + + React.useEffect(() => { + if (guideState.tourActive) { + setTimeout(() => { + setGuideState({ run: true, stepIndex: 7 }); + }, 200); + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []); + const onClick = async () => { if (isJobsTablePage) { navigate("/scan"); @@ -69,7 +81,13 @@ export default function History() { const createButton = ( - diff --git a/frontend/src/components/common/icon/icons.jsx b/frontend/src/components/common/icon/icons.jsx index 16240425cd..7c784de13e 100644 --- a/frontend/src/components/common/icon/icons.jsx +++ b/frontend/src/components/common/icon/icons.jsx @@ -7,12 +7,13 @@ import { MdComment, MdFileDownload, } from "react-icons/md"; +import { FaRegStopCircle } from "react-icons/fa"; // These function are needed in IconButton because it expects Icon as a function export function DeleteIcon() { return ( - + Delete @@ -53,3 +54,12 @@ export function downloadReportIcon() { export function SpinnerIcon() { return ; } + +export function killJobIcon() { + return ( + + + Kill job + + ); +} diff --git a/frontend/src/components/dashboard/Dashboard.jsx b/frontend/src/components/dashboard/Dashboard.jsx index 32737e5d0a..0ef0970da2 100644 --- a/frontend/src/components/dashboard/Dashboard.jsx +++ b/frontend/src/components/dashboard/Dashboard.jsx @@ -47,7 +47,7 @@ export default function Dashboard() { useEffect(() => { if (guideState.tourActive) { setTimeout(() => { - setGuideState({ run: true, stepIndex: 8 }); + setGuideState({ run: true, stepIndex: 9 }); }, 100); } // eslint-disable-next-line react-hooks/exhaustive-deps diff --git a/frontend/src/components/investigations/flow/CustomJobNode.jsx b/frontend/src/components/investigations/flow/CustomJobNode.jsx index 8b8fa8fb01..eeba0defa4 100644 --- a/frontend/src/components/investigations/flow/CustomJobNode.jsx +++ b/frontend/src/components/investigations/flow/CustomJobNode.jsx @@ -2,8 +2,12 @@ import React from "react"; import PropTypes from "prop-types"; import { NodeToolbar, Handle, Position } from "reactflow"; import "reactflow/dist/style.css"; -import { Button } from "reactstrap"; +import { Button, UncontrolledTooltip } from "reactstrap"; import { AiOutlineLink } from "react-icons/ai"; +import { LuGitBranchPlus } from "react-icons/lu"; +import { MdContentCopy } from "react-icons/md"; + +import { CopyToClipboardButton, DateHoverable } from "@certego/certego-ui"; import { RemoveJob } from "./investigationActions"; @@ -23,8 +27,17 @@ function CustomJobNode({ data }) { id={`toolbar-job-${data.id}`} >
+ + Copy + + + Go to job #{data.id} result page + + + + Analyze the same observable again + {data.isFirstLevel && }
Playbook: {data?.playbook}
+
+ Created: + + + +
diff --git a/frontend/src/components/investigations/flow/utils.js b/frontend/src/components/investigations/flow/utils.js index b4247290f0..13c537b9ce 100644 --- a/frontend/src/components/investigations/flow/utils.js +++ b/frontend/src/components/investigations/flow/utils.js @@ -23,6 +23,7 @@ function addJobNode( refetchTree, refetchInvestigation, isFirstLevel: isFirstLevel || false, + created: job.received_request_time, }, type: "jobNode", }); diff --git a/frontend/src/components/investigations/result/InvestigationInfoCard.jsx b/frontend/src/components/investigations/result/InvestigationInfoCard.jsx index 22541db0dd..5e9e1d10fe 100644 --- a/frontend/src/components/investigations/result/InvestigationInfoCard.jsx +++ b/frontend/src/components/investigations/result/InvestigationInfoCard.jsx @@ -25,7 +25,7 @@ import { StatusTag } from "../../common/StatusTag"; import { TLPTag } from "../../common/TLPTag"; import { updateInvestigation } from "./investigationApi"; -export function InvestigationInfoCard({ investigation }) { +export function InvestigationInfoCard({ investigation, refetchTree }) { // local state const [isOpen, setIsOpen] = React.useState(false); @@ -39,6 +39,7 @@ export function InvestigationInfoCard({ investigation }) { const success = await updateInvestigation(investigation.id, { name: investigationName, }); + if (success) refetchTree(); if (!success) return; } setIsEditing(false); @@ -152,4 +153,5 @@ export function InvestigationInfoCard({ investigation }) { InvestigationInfoCard.propTypes = { investigation: PropTypes.object.isRequired, + refetchTree: PropTypes.func.isRequired, }; diff --git a/frontend/src/components/investigations/result/InvestigationOverview.jsx b/frontend/src/components/investigations/result/InvestigationOverview.jsx index 5a57f82705..a3754d54db 100644 --- a/frontend/src/components/investigations/result/InvestigationOverview.jsx +++ b/frontend/src/components/investigations/result/InvestigationOverview.jsx @@ -3,7 +3,7 @@ import PropTypes from "prop-types"; import useAxios from "axios-hooks"; import { Col, Row, Container, Input } from "reactstrap"; import { MdEdit } from "react-icons/md"; -import { BsFillCheckSquareFill } from "react-icons/bs"; +import { BsFillCheckSquareFill, BsMarkdown } from "react-icons/bs"; import { useLocation } from "react-router-dom"; import { IconButton, Loader } from "@certego/certego-ui"; @@ -14,6 +14,7 @@ import { InvestigationActionsBar } from "./InvestigationActionBar"; import { updateInvestigation } from "./investigationApi"; import { InvestigationFlow } from "../flow/InvestigationFlow"; import { INVESTIGATION_BASE_URI } from "../../../constants/apiURLs"; +import { markdownToHtml } from "../../common/markdownToHtml"; export function InvestigationOverview({ isRunningInvestigation, @@ -78,63 +79,80 @@ export function InvestigationOverview({ {/* investigation metadata card */} - + -
+
Description - {isEditing ? ( - <> - - { - setInvestigationDescription(event.target.value); - }} - placeholder="Enter a description" - value={investigationDescription} - style={{ minHeight: "200px", overflowY: "auto" }} - className="bg-dark" - /> - - ) : ( - <> - setIsEditing(true)} - title="Edit description" - titlePlacement="top" - /> -
- {investigationDescription || "No description"} -
- + setIsEditing(true)} + title="Edit description" + titlePlacement="top" + /> + + {isEditing && ( + )}
+ + {isEditing ? ( + { + setInvestigationDescription(event.target.value); + }} + placeholder="Enter a description" + value={investigationDescription} + style={{ minHeight: "200px", overflowY: "auto" }} + className="bg-dark" + /> + ) : ( +
+ {investigationDescription + ? markdownToHtml(investigationDescription) + : "No description"} +
+ )} +
- + {job.investigation && ( <> @@ -59,9 +65,9 @@ export function JobInfoCard({ job }) { -

+

{job.is_sample ? ( - + Toggle Job Metadata @@ -101,14 +107,14 @@ export function JobInfoCard({ job }) { - + {[ - ["Status", ], + ["Status", ], ["TLP", ], ["User", job.user?.username], ["MD5", job.md5], @@ -134,7 +140,7 @@ export function JobInfoCard({ job }) { ), ], ].map(([key, value]) => ( - + {key}
{value}
@@ -155,33 +161,85 @@ export function JobInfoCard({ job }) { ], [ "Tags", - job.tags.map((tag) => ( - - )), + job.tags.length ? ( + job.tags.map((tag) => ( + + )) + ) : ( + None + ), ], [ "Warning(s)", -
    - {job.warnings.map((error) => ( -
  • {error}
  • - ))} -
, + <> +
+ {job.warnings.length} warnings + + + Toggle Job Warnings + +
+ +
    + {job.warnings.map((error) => ( +
  • {error}
  • + ))} +
+
+ , ], [ "Error(s)", -
    - {job.errors.map((error) => ( -
  • {error}
  • - ))} -
, + <> +
+ {job.errors.length} errors + + + Toggle Job Errors + +
+ +
    + {job.errors.map((error) => ( +
  • {error}
  • + ))} +
+
+ , ], ].map(([key, value]) => ( - + {key}
{value}
))}
+ {Object.values(JobFinalStatuses).includes(job.status) && ( +
+ +
+ )}

diff --git a/frontend/src/components/jobs/result/JobIsRunningAlert.jsx b/frontend/src/components/jobs/result/JobIsRunningAlert.jsx index 5221160214..b756d56212 100644 --- a/frontend/src/components/jobs/result/JobIsRunningAlert.jsx +++ b/frontend/src/components/jobs/result/JobIsRunningAlert.jsx @@ -1,115 +1,46 @@ +/* eslint-disable id-length */ import React from "react"; import PropTypes from "prop-types"; -import { Fade } from "reactstrap"; -import { MdPauseCircleOutline } from "react-icons/md"; +import { ReactFlowProvider } from "reactflow"; +import "reactflow/dist/style.css"; +import { IconButton } from "@certego/certego-ui"; -import { IconAlert, IconButton } from "@certego/certego-ui"; +import { JobFinalStatuses } from "../../../constants/jobConst"; +import { areYouSureConfirmDialog } from "../../common/areYouSureConfirmDialog"; import { killJob } from "./jobApi"; -import { JobStatuses } from "../../../constants/jobConst"; - -import { - reportedPluginNumber, - reportedVisualizerNumber, -} from "./utils/reportedPlugins"; +import { killJobIcon } from "../../common/icon/icons"; +import { JobIsRunningFlow } from "./flow/JobIsRunningFlow"; export function JobIsRunningAlert({ job }) { - // number of analyzers/connectors/visualizers reported (status: killed/succes/failed) - const analizersReported = reportedPluginNumber(job.analyzer_reports); - const connectorsReported = reportedPluginNumber(job.connector_reports); - const pivotsReported = reportedPluginNumber(job.pivot_reports); - const visualizersReported = reportedVisualizerNumber( - job.visualizer_reports, - job.visualizers_to_execute, - ); - - /* Check if analyzers/connectors/visualizers are completed - The analyzers are completed from the "analyzers_completed" status (index=3) to the last status - The connectors are completed from the "connectors_completed" status (index=5) to the last status - The visualizers are completed from the "visualizers_completed" status (index=7) to the last status - */ - const analyzersCompleted = Object.values(JobStatuses) - .slice(3) - .includes(job.status); - const connectorsCompleted = Object.values(JobStatuses) - .slice(5) - .includes(job.status); - const pivotsCompleted = Object.values(JobStatuses) - .slice(7) - .includes(job.status); - const visualizersCompleted = Object.values(JobStatuses) - .slice(9) - .includes(job.status); - - const alertElements = [ - { - step: 1, - type: "ANALYZERS", - completed: - analizersReported === job.analyzers_to_execute.length && - analyzersCompleted, - report: `${analizersReported}/${job.analyzers_to_execute.length}`, - }, - { - step: 2, - type: "CONNECTORS", - completed: - connectorsReported === job.connectors_to_execute.length && - connectorsCompleted, - report: `${connectorsReported}/${job.connectors_to_execute.length}`, - }, - { - step: 3, - type: "PIVOTS", - completed: - pivotsReported === job.pivots_to_execute.length && pivotsCompleted, - report: `${pivotsReported}/${job.pivots_to_execute.length}`, - }, - { - step: 4, - type: "VISUALIZERS", - completed: - visualizersReported === job.visualizers_to_execute.length && - visualizersCompleted, - report: `${visualizersReported}/${job.visualizers_to_execute.length}`, - }, - ]; + const onKillJobBtnClick = async () => { + const sure = await areYouSureConfirmDialog(`Kill Job #${job.id}`); + if (!sure) return null; + await killJob(job.id); + return null; + }; return ( - - -
- This job is currently running - . -
- {alertElements.map((element) => ( -
- STEP {element.step}: {element.type} RUNNING - - -  reported {element.report} - -
- ))} - {job.permissions?.kill && ( - killJob(job.id)} - className="mt-2" - /> - )} -
-
+ <> + + + +
+ {job.permissions?.kill && + !Object.values(JobFinalStatuses).includes(job.status) && ( + + )} +
+ ); } diff --git a/frontend/src/components/jobs/result/JobOverview.jsx b/frontend/src/components/jobs/result/JobOverview.jsx index dbe3808209..a1628196d3 100644 --- a/frontend/src/components/jobs/result/JobOverview.jsx +++ b/frontend/src/components/jobs/result/JobOverview.jsx @@ -19,12 +19,7 @@ import { Loader } from "@certego/certego-ui"; import { JSONTree } from "react-json-tree"; import { useNavigate, useLocation } from "react-router-dom"; -import { - AnalyzersReportTable, - ConnectorsReportTable, - PivotsReportTable, - VisualizersReportTable, -} from "./pluginReportTables"; +import { PluginsReportTable } from "./pluginReportTables"; import { reportedPluginNumber, reportedVisualizerNumber, @@ -39,6 +34,7 @@ import { JobResultSections } from "../../../constants/miscConst"; import { JobInfoCard } from "./JobInfoCard"; import { JobIsRunningAlert } from "./JobIsRunningAlert"; import { JobActionsBar } from "./bar/JobActionBar"; +import { usePluginConfigurationStore } from "../../../stores/usePluginConfigurationStore"; /* THESE IDS CANNOT BE EMPTY! We perform a redirect in case the user landed in the visualzier page without a visualizer, @@ -63,6 +59,26 @@ export function JobOverview({ const isSelectedUI = section === JobResultSections.VISUALIZER; + const [ + analyzersLoading, + connectorsLoading, + visualizersLoading, + pivotsLoading, + analyzers, + connectors, + visualizers, + pivots, + ] = usePluginConfigurationStore((state) => [ + state.analyzersLoading, + state.connectorsLoading, + state.visualizersLoading, + state.pivotsLoading, + state.analyzers, + state.connectors, + state.visualizers, + state.pivots, + ]); + const rawElements = React.useMemo( () => [ { @@ -80,7 +96,15 @@ export function JobOverview({ />
), - report: , + report: ( + + ), }, { name: "connector", @@ -97,7 +121,15 @@ export function JobOverview({ /> ), - report: , + report: ( + + ), }, { name: "pivot", @@ -114,7 +146,15 @@ export function JobOverview({ /> ), - report: , + report: ( + + ), }, { name: "visualizer", @@ -135,7 +175,15 @@ export function JobOverview({ /> ), - report: , + report: ( + + ), }, { name: "full", @@ -158,7 +206,14 @@ export function JobOverview({ ), }, ], - [job], + // eslint-disable-next-line react-hooks/exhaustive-deps + [ + job, + analyzersLoading, + connectorsLoading, + visualizersLoading, + pivotsLoading, + ], ); // state @@ -321,7 +376,10 @@ export function JobOverview({ {isRunningJob && ( - + )} diff --git a/frontend/src/components/jobs/result/bar/JobActionBar.jsx b/frontend/src/components/jobs/result/bar/JobActionBar.jsx index 8e1e78171a..dd3bc3cefe 100644 --- a/frontend/src/components/jobs/result/bar/JobActionBar.jsx +++ b/frontend/src/components/jobs/result/bar/JobActionBar.jsx @@ -63,7 +63,7 @@ export function JobActionsBar({ job }) { ); } else { addToast("Retrying the same job...", null, "spinner", false, 2000); - const jobId = await createJob( + const response = await createJob( [job.observable_name], job.observable_classification, job.playbook_requested, @@ -76,7 +76,10 @@ export function JobActionsBar({ job }) { 0, ); setTimeout( - () => navigate(`/jobs/${jobId[0]}/${JobResultSections.VISUALIZER}/`), + () => + navigate( + `/jobs/${response.jobIds[0]}/${JobResultSections.VISUALIZER}/`, + ), 1000, ); } diff --git a/frontend/src/components/jobs/result/flow/CustomJobPipelineNode.jsx b/frontend/src/components/jobs/result/flow/CustomJobPipelineNode.jsx new file mode 100644 index 0000000000..c1d49758d9 --- /dev/null +++ b/frontend/src/components/jobs/result/flow/CustomJobPipelineNode.jsx @@ -0,0 +1,62 @@ +import React from "react"; +import PropTypes from "prop-types"; +import { Handle, Position } from "reactflow"; +import "reactflow/dist/style.css"; +import { StatusIcon } from "../../../common/icon/StatusIcon"; + +function CustomJobPipelineNode({ data }) { + let statusIcon = "pending"; + if (data.completed) statusIcon = "success"; + else if (data.running) statusIcon = "running"; + + return ( + <> +
+ +
+
+ {data?.label} {data.running && "RUNNING"} + {data.completed && "COMPLETED"}{" "} +
+ Reported {data.report} +
+
+