diff --git a/.github/CHANGELOG.md b/.github/CHANGELOG.md index 5bb7b503a6..7684aa8bda 100644 --- a/.github/CHANGELOG.md +++ b/.github/CHANGELOG.md @@ -2,8 +2,24 @@ [**Upgrade Guide**](https://intelowl.readthedocs.io/en/latest/Installation.md#update-to-the-most-recent-version) +## [v5.2.2](https://github.com/intelowlproject/IntelOwl/releases/tag/v5.2.2) + +This release has been done mainly to adjusts a broken database migration introduced in the previous release. + +**Main Improvements** +* Added new analyzers for [DNS0](https://docs.dns0.eu/) PassiveDNS data +* Added the chance to collect metrics ([Business Intelligence](https://intelowl.readthedocs.io/en/develop/Advanced-Configuration.html#business-intelligence) regarding Plugins Usage and send it to an ElasticSearch instance. +* Added new buttons to test ["Healthcheck" and "Pull" operations](https://intelowl.readthedocs.io/en/latest/Usage.html#special-plugins-operations) for each Plugin (A feature introduced in the previous version) + +**Other improvements** +* Various generic fixes and adjustments in the GUI +* dependencies upgrades +* adjusted contribution guides + ## [v5.2.1](https://github.com/intelowlproject/IntelOwl/releases/tag/v5.2.1) +!!! This release has been found with a broken database migration !!! Please upgrade to v5.2.2 to fix the problem. + **General improvements** * Incremented wait time of containers' healthchecks to avoid to break clean installations * Improvements to the "Scan page": diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 161346c658..a47334b036 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -19,17 +19,17 @@ Please delete options that are not relevant. - [ ] A new plugin (analyzer, connector, visualizer, playbook, pivot or ingestor) was added or changed, in which case: - [ ] I strictly followed the documentation ["How to create a Plugin"](https://intelowl.readthedocs.io/en/latest/Contribute.html#how-to-add-a-new-plugin) - [ ] [Usage](https://github.com/intelowlproject/IntelOwl/blob/master/docs/source/Usage.md) file was updated. - - [ ] [Advanced-Usage](./Advanced-Usage.md) was updated (in case the plugin provides additional optional configuration). + - [ ] [Advanced-Usage](https://github.com/intelowlproject/IntelOwl/blob/master/docs/source/Advanced-Usage.md) was updated (in case the plugin provides additional optional configuration). - [ ] If the plugin requires mocked testing, `_monkeypatch()` was used in its class to apply the necessary decorators. - - [ ] I have dumped the configuration from Django Admin using the `dumpplugin` command and added it in the project as a data migration. [Doc](["How to create a Plugin"](https://intelowl.readthedocs.io/en/latest/Contribute.html#how-to-add-a-new-plugin)) + - [ ] I have dumped the configuration from Django Admin using the `dumpplugin` command and added it in the project as a data migration. (["How to share a plugin with the community"](https://intelowl.readthedocs.io/en/latest/Contribute.html#how-to-share-your-plugin-with-the-community)) - [ ] If a File analyzer was added and it supports a mimetype which is not already supported, you added a sample of that type inside the archive `test_files.zip` and you added the default tests for that mimetype in [test_classes.py](https://github.com/intelowlproject/IntelOwl/blob/master/tests/analyzers_manager/test_classes.py). - - [ ] If you created a new analyzer and it is free (does not require API keys), please add it in the `FREE_TO_USE_ANALYZERS` playbook in `playbook_config.json`. + - [ ] If you created a new analyzer and it is free (does not require API keys), please add it in the `FREE_TO_USE_ANALYZERS` playbook by following [this guide](https://intelowl.readthedocs.io/en/latest/Contribute.html#how-to-modify-a-plugin). - [ ] Check if it could make sense to add that analyzer/connector to other [freely available playbooks](https://intelowl.readthedocs.io/en/develop/Usage.html#list-of-pre-built-playbooks). - [ ] I have provided the resulting raw JSON of a finished analysis and a screenshot of the results. - [ ] If external libraries/packages with restrictive licenses were used, they were added in the [Legal Notice](https://github.com/certego/IntelOwl/blob/master/.github/legal_notice.md) section. - [ ] Linters (`Black`, `Flake`, `Isort`) gave 0 errors. If you have correctly installed [pre-commit](https://intelowl.readthedocs.io/en/latest/Contribute.html#how-to-start-setup-project-and-development-instance), it does these checks and adjustments on your behalf. - [ ] I have added tests for the feature/bug I solved (see `tests` folder). All the tests (new and old ones) gave 0 errors. -- [ ] If changes were made to an existing model/serializer/view, the docs were updated and regenerated (check [CONTRIBUTE.md](./Contribute.md)). +- [ ] If changes were made to an existing model/serializer/view, the docs were updated and regenerated (check [CONTRIBUTE.md](https://github.com/intelowlproject/IntelOwl/blob/master/docs/source/Contribute.md)). - [ ] If the GUI has been modified: - [ ] I have a provided a screenshot of the result in the PR. - [ ] I have created new frontend tests for the new component or updated existing ones. diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index eb368d0b0e..948a56bc92 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -44,7 +44,7 @@ jobs: fetch-depth: 2 - name: Set up Python - uses: actions/setup-python@v4.7.0 + uses: actions/setup-python@v5.0.0 with: python-version: '3.9' @@ -65,7 +65,7 @@ jobs: # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v2 + uses: github/codeql-action/init@v3 with: languages: python # Override the default behavior so that the action doesn't attempt @@ -93,4 +93,4 @@ jobs: # make release - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2 + uses: github/codeql-action/analyze@v3 diff --git a/.github/workflows/pull_request_automation.yml b/.github/workflows/pull_request_automation.yml index 098c14d46c..af49183656 100644 --- a/.github/workflows/pull_request_automation.yml +++ b/.github/workflows/pull_request_automation.yml @@ -37,7 +37,7 @@ jobs: uses: actions/checkout@v4.1.0 - name: Set up Python - uses: actions/setup-python@v4.7.0 + uses: actions/setup-python@v5.0.0 with: python-version: 3.9 diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml index c47c7ce466..11f9dbda2e 100644 --- a/.github/workflows/scorecard.yml +++ b/.github/workflows/scorecard.yml @@ -67,6 +67,6 @@ jobs: # Upload the results to GitHub's code scanning dashboard. - name: "Upload to code-scanning" - uses: github/codeql-action/upload-sarif@807578363a7869ca324a79039e6db9c843e0e100 # v2.1.27 + uses: github/codeql-action/upload-sarif@03e7845b7bfcd5e7fb63d1ae8c61b0e791134fab # v2.22.11 with: sarif_file: results.sarif diff --git a/.github/workflows/sphinx_build_on_release.yml b/.github/workflows/sphinx_build_on_release.yml index 4f4e3276d1..8eb7101aa4 100644 --- a/.github/workflows/sphinx_build_on_release.yml +++ b/.github/workflows/sphinx_build_on_release.yml @@ -14,7 +14,7 @@ jobs: steps: - uses: actions/checkout@v4.1.0 - name: Set up Python - uses: actions/setup-python@v4.7.0 + uses: actions/setup-python@v5.0.0 with: python-version: 3.9 - name: Install docs requirements diff --git a/api_app/analyzers_manager/migrations/0052_analyzer_config_dns0_names.py b/api_app/analyzers_manager/migrations/0052_analyzer_config_dns0_names.py new file mode 100644 index 0000000000..91bb6a59f5 --- /dev/null +++ b/api_app/analyzers_manager/migrations/0052_analyzer_config_dns0_names.py @@ -0,0 +1,209 @@ +from django.db import migrations +from django.db.models.fields.related_descriptors import ( + ForwardManyToOneDescriptor, + ForwardOneToOneDescriptor, + ManyToManyDescriptor, +) + +plugin = { + "name": "DNS0_names", + "python_module": { + "module": "dns0.dns0_names.DNS0Names", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + "description": "Run advanced searches on billions of current and historical domain names. [API](https://docs.dns0.eu/dns-api/names).", + "disabled": False, + "soft_time_limit": 60, + "routing_key": "default", + "health_check_status": True, + "type": "observable", + "docker_based": False, + "maximum_tlp": "RED", + "observable_supported": ["url", "domain", "generic"], + "supported_filetypes": [], + "run_hash": False, + "run_hash_type": "", + "not_supported_filetypes": [], + "health_check_task": None, + "model": "analyzers_manager.AnalyzerConfig", +} + +params = [ + { + "name": "api_key", + "type": "str", + "description": "", + "is_secret": True, + "required": False, + "python_module": { + "module": "dns0.dns0_names.DNS0Names", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + }, + { + "name": "root", + "type": "bool", + "description": "Limit results to root domains.", + "is_secret": False, + "required": False, + "python_module": { + "module": "dns0.dns0_names.DNS0Names", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + }, + { + "name": "fuzzy", + "type": "list", + "description": "Apply fuzziness to q. Accepts a comma-separated list of fuzzy algorithms, or all to apply them all.", + "is_secret": False, + "required": False, + "python_module": { + "module": "dns0.dns0_names.DNS0Names", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + }, + { + "name": "from", + "type": "str", + "description": "Limit results to names seen after this date.", + "is_secret": False, + "required": False, + "python_module": { + "module": "dns0.dns0_names.DNS0Names", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + }, + { + "name": "to", + "type": "str", + "description": "Limit results to names seen before this date.", + "is_secret": False, + "required": False, + "python_module": { + "module": "dns0.dns0_names.DNS0Names", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + }, + { + "name": "not_before", + "type": "str", + "description": "Limit results to names not seen before this date.", + "is_secret": False, + "required": False, + "python_module": { + "module": "dns0.dns0_names.DNS0Names", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + }, + { + "name": "sort", + "type": "str", + "description": "Available sorts are first_seen (the default) or last_seen. Both are descending sorts (most recent first).", + "is_secret": False, + "required": False, + "python_module": { + "module": "dns0.dns0_names.DNS0Names", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + }, + { + "name": "format", + "type": "str", + "description": "Available formats are json and dig. Default format is based on the Accept HTTP header.", + "is_secret": False, + "required": False, + "python_module": { + "module": "dns0.dns0_names.DNS0Names", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + }, + { + "name": "limit", + "type": "int", + "description": "Limit the number of results.", + "is_secret": False, + "required": False, + "python_module": { + "module": "dns0.dns0_names.DNS0Names", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + }, + { + "name": "offset", + "type": "int", + "description": "Used for pagination.", + "is_secret": False, + "required": False, + "python_module": { + "module": "dns0.dns0_names.DNS0Names", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + }, +] + +values = [] + + +def _get_real_obj(Model, field, value): + if ( + type(getattr(Model, field)) + in [ForwardManyToOneDescriptor, ForwardOneToOneDescriptor] + and value + ): + other_model = getattr(Model, field).get_queryset().model + # in case is a dictionary, we have to retrieve the object with every key + if isinstance(value, dict): + real_vals = {} + for key, real_val in value.items(): + real_vals[key] = _get_real_obj(other_model, key, real_val) + value = other_model.objects.get_or_create(**real_vals)[0] + # it is just the primary key serialized + else: + value = other_model.objects.get(pk=value) + return value + + +def _create_object(Model, data): + mtm, no_mtm = {}, {} + for field, value in data.items(): + if type(getattr(Model, field)) is ManyToManyDescriptor: + mtm[field] = value + else: + value = _get_real_obj(Model, field, value) + no_mtm[field] = value + try: + o = Model.objects.get(**no_mtm) + except Model.DoesNotExist: + o = Model(**no_mtm) + o.full_clean() + o.save() + for field, value in mtm.items(): + attribute = getattr(o, field) + attribute.set(value) + + +def migrate(apps, schema_editor): + Parameter = apps.get_model("api_app", "Parameter") + PluginConfig = apps.get_model("api_app", "PluginConfig") + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + _create_object(Model, plugin) + for param in params: + _create_object(Parameter, param) + for value in values: + _create_object(PluginConfig, value) + + +def reverse_migrate(apps, schema_editor): + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + Model.objects.get(name=plugin["name"]).delete() + + +class Migration(migrations.Migration): + dependencies = [ + ("api_app", "0051_pythonmodule_health_check_schedule_and_more"), + ("analyzers_manager", "0051_analyzerreport_parameters"), + ] + + operations = [migrations.RunPython(migrate, reverse_migrate)] diff --git a/api_app/analyzers_manager/migrations/0053_analyzer_config_dns0_rrsets_data.py b/api_app/analyzers_manager/migrations/0053_analyzer_config_dns0_rrsets_data.py new file mode 100644 index 0000000000..9380d146f3 --- /dev/null +++ b/api_app/analyzers_manager/migrations/0053_analyzer_config_dns0_rrsets_data.py @@ -0,0 +1,232 @@ +from django.db import migrations +from django.db.models.fields.related_descriptors import ( + ForwardManyToOneDescriptor, + ForwardOneToOneDescriptor, + ManyToManyDescriptor, +) + +plugin = { + "name": "DNS0_rrsets_data", + "python_module": { + "module": "dns0.dns0_rrsets.DNS0Rrsets", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + "description": "Query billions of current and historical DNS resource records sets. [API](https://docs.dns0.eu/dns-api/rrsets).", + "disabled": False, + "soft_time_limit": 60, + "routing_key": "default", + "health_check_status": True, + "type": "observable", + "docker_based": False, + "maximum_tlp": "RED", + "observable_supported": ["url", "domain", "generic"], + "supported_filetypes": [], + "run_hash": False, + "run_hash_type": "", + "not_supported_filetypes": [], + "health_check_task": None, + "model": "analyzers_manager.AnalyzerConfig", +} + +params = [ + { + "name": "direction", + "type": "str", + "description": "Used to dispatch matching direction.", + "is_secret": False, + "required": False, + "python_module": { + "module": "dns0.dns0_rrsets.DNS0Rrsets", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + }, + { + "name": "api_key", + "type": "str", + "description": "", + "is_secret": True, + "required": False, + "python_module": { + "module": "dns0.dns0_rrsets.DNS0Rrsets", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + }, + { + "name": "type", + "type": "list", + "description": "Limit results to certain record types (e.g. type=NS,A,AAAA). Accepts a comma-separated list of DNS record types, either in textual or numeric form.", + "is_secret": False, + "required": False, + "python_module": { + "module": "dns0.dns0_rrsets.DNS0Rrsets", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + }, + { + "name": "from", + "type": "str", + "description": "Limit results to records seen after this date.", + "is_secret": False, + "required": False, + "python_module": { + "module": "dns0.dns0_rrsets.DNS0Rrsets", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + }, + { + "name": "to", + "type": "str", + "description": "Limit results to records seen before this date.", + "is_secret": False, + "required": False, + "python_module": { + "module": "dns0.dns0_rrsets.DNS0Rrsets", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + }, + { + "name": "not_before", + "type": "str", + "description": "Limit results to records not seen before this date.", + "is_secret": False, + "required": False, + "python_module": { + "module": "dns0.dns0_rrsets.DNS0Rrsets", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + }, + { + "name": "sort", + "type": "str", + "description": "Available sorts are first_seen (the default) or last_seen. Both are descending sorts (most recent first).", + "is_secret": False, + "required": False, + "python_module": { + "module": "dns0.dns0_rrsets.DNS0Rrsets", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + }, + { + "name": "format", + "type": "str", + "description": "Available formats are json, cof or dig. Default format is based on the Accept HTTP header.", + "is_secret": False, + "required": False, + "python_module": { + "module": "dns0.dns0_rrsets.DNS0Rrsets", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + }, + { + "name": "limit", + "type": "int", + "description": "Limit the number of results.", + "is_secret": False, + "required": False, + "python_module": { + "module": "dns0.dns0_rrsets.DNS0Rrsets", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + }, + { + "name": "offset", + "type": "int", + "description": "Used for pagination.", + "is_secret": False, + "required": False, + "python_module": { + "module": "dns0.dns0_rrsets.DNS0Rrsets", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + }, +] + +values = [ + { + "for_organization": False, + "value": "right", + "updated_at": "2023-12-13T15:29:41.780158Z", + "owner": None, + "parameter": { + "name": "direction", + "type": "str", + "description": "Used to dispatch matching direction.", + "is_secret": False, + "required": False, + "python_module": { + "module": "dns0.dns0_rrsets.DNS0Rrsets", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + }, + "analyzer_config": "DNS0_rrsets_data", + "connector_config": None, + "visualizer_config": None, + "ingestor_config": None, + "pivot_config": None, + } +] + + +def _get_real_obj(Model, field, value): + if ( + type(getattr(Model, field)) + in [ForwardManyToOneDescriptor, ForwardOneToOneDescriptor] + and value + ): + other_model = getattr(Model, field).get_queryset().model + # in case is a dictionary, we have to retrieve the object with every key + if isinstance(value, dict): + real_vals = {} + for key, real_val in value.items(): + real_vals[key] = _get_real_obj(other_model, key, real_val) + value = other_model.objects.get_or_create(**real_vals)[0] + # it is just the primary key serialized + else: + value = other_model.objects.get(pk=value) + return value + + +def _create_object(Model, data): + mtm, no_mtm = {}, {} + for field, value in data.items(): + if type(getattr(Model, field)) is ManyToManyDescriptor: + mtm[field] = value + else: + value = _get_real_obj(Model, field, value) + no_mtm[field] = value + try: + o = Model.objects.get(**no_mtm) + except Model.DoesNotExist: + o = Model(**no_mtm) + o.full_clean() + o.save() + for field, value in mtm.items(): + attribute = getattr(o, field) + attribute.set(value) + + +def migrate(apps, schema_editor): + Parameter = apps.get_model("api_app", "Parameter") + PluginConfig = apps.get_model("api_app", "PluginConfig") + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + _create_object(Model, plugin) + for param in params: + _create_object(Parameter, param) + for value in values: + _create_object(PluginConfig, value) + + +def reverse_migrate(apps, schema_editor): + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + Model.objects.get(name=plugin["name"]).delete() + + +class Migration(migrations.Migration): + dependencies = [ + ("api_app", "0051_pythonmodule_health_check_schedule_and_more"), + ("analyzers_manager", "0052_analyzer_config_dns0_names"), + ] + + operations = [migrations.RunPython(migrate, reverse_migrate)] diff --git a/api_app/analyzers_manager/migrations/0054_analyzer_config_dns0_rrsets_name.py b/api_app/analyzers_manager/migrations/0054_analyzer_config_dns0_rrsets_name.py new file mode 100644 index 0000000000..1aa12e1fce --- /dev/null +++ b/api_app/analyzers_manager/migrations/0054_analyzer_config_dns0_rrsets_name.py @@ -0,0 +1,232 @@ +from django.db import migrations +from django.db.models.fields.related_descriptors import ( + ForwardManyToOneDescriptor, + ForwardOneToOneDescriptor, + ManyToManyDescriptor, +) + +plugin = { + "name": "DNS0_rrsets_name", + "python_module": { + "module": "dns0.dns0_rrsets.DNS0Rrsets", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + "description": "Query billions of current and historical DNS resource records sets. [API](https://docs.dns0.eu/dns-api/rrsets).", + "disabled": False, + "soft_time_limit": 60, + "routing_key": "default", + "health_check_status": True, + "type": "observable", + "docker_based": False, + "maximum_tlp": "RED", + "observable_supported": ["domain"], + "supported_filetypes": [], + "run_hash": False, + "run_hash_type": "", + "not_supported_filetypes": [], + "health_check_task": None, + "model": "analyzers_manager.AnalyzerConfig", +} + +params = [ + { + "name": "direction", + "type": "str", + "description": "Used to dispatch matching direction.", + "is_secret": False, + "required": False, + "python_module": { + "module": "dns0.dns0_rrsets.DNS0Rrsets", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + }, + { + "name": "api_key", + "type": "str", + "description": "", + "is_secret": True, + "required": False, + "python_module": { + "module": "dns0.dns0_rrsets.DNS0Rrsets", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + }, + { + "name": "type", + "type": "list", + "description": "Limit results to certain record types (e.g. type=NS,A,AAAA). Accepts a comma-separated list of DNS record types, either in textual or numeric form.", + "is_secret": False, + "required": False, + "python_module": { + "module": "dns0.dns0_rrsets.DNS0Rrsets", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + }, + { + "name": "from", + "type": "str", + "description": "Limit results to records seen after this date.", + "is_secret": False, + "required": False, + "python_module": { + "module": "dns0.dns0_rrsets.DNS0Rrsets", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + }, + { + "name": "to", + "type": "str", + "description": "Limit results to records seen before this date.", + "is_secret": False, + "required": False, + "python_module": { + "module": "dns0.dns0_rrsets.DNS0Rrsets", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + }, + { + "name": "not_before", + "type": "str", + "description": "Limit results to records not seen before this date.", + "is_secret": False, + "required": False, + "python_module": { + "module": "dns0.dns0_rrsets.DNS0Rrsets", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + }, + { + "name": "sort", + "type": "str", + "description": "Available sorts are first_seen (the default) or last_seen. Both are descending sorts (most recent first).", + "is_secret": False, + "required": False, + "python_module": { + "module": "dns0.dns0_rrsets.DNS0Rrsets", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + }, + { + "name": "format", + "type": "str", + "description": "Available formats are json, cof or dig. Default format is based on the Accept HTTP header.", + "is_secret": False, + "required": False, + "python_module": { + "module": "dns0.dns0_rrsets.DNS0Rrsets", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + }, + { + "name": "limit", + "type": "int", + "description": "Limit the number of results.", + "is_secret": False, + "required": False, + "python_module": { + "module": "dns0.dns0_rrsets.DNS0Rrsets", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + }, + { + "name": "offset", + "type": "int", + "description": "Used for pagination.", + "is_secret": False, + "required": False, + "python_module": { + "module": "dns0.dns0_rrsets.DNS0Rrsets", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + }, +] + +values = [ + { + "for_organization": False, + "value": "left", + "updated_at": "2023-12-13T15:28:50.359044Z", + "owner": None, + "parameter": { + "name": "direction", + "type": "str", + "description": "Used to dispatch matching direction.", + "is_secret": False, + "required": False, + "python_module": { + "module": "dns0.dns0_rrsets.DNS0Rrsets", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + }, + "analyzer_config": "DNS0_rrsets_name", + "connector_config": None, + "visualizer_config": None, + "ingestor_config": None, + "pivot_config": None, + } +] + + +def _get_real_obj(Model, field, value): + if ( + type(getattr(Model, field)) + in [ForwardManyToOneDescriptor, ForwardOneToOneDescriptor] + and value + ): + other_model = getattr(Model, field).get_queryset().model + # in case is a dictionary, we have to retrieve the object with every key + if isinstance(value, dict): + real_vals = {} + for key, real_val in value.items(): + real_vals[key] = _get_real_obj(other_model, key, real_val) + value = other_model.objects.get_or_create(**real_vals)[0] + # it is just the primary key serialized + else: + value = other_model.objects.get(pk=value) + return value + + +def _create_object(Model, data): + mtm, no_mtm = {}, {} + for field, value in data.items(): + if type(getattr(Model, field)) is ManyToManyDescriptor: + mtm[field] = value + else: + value = _get_real_obj(Model, field, value) + no_mtm[field] = value + try: + o = Model.objects.get(**no_mtm) + except Model.DoesNotExist: + o = Model(**no_mtm) + o.full_clean() + o.save() + for field, value in mtm.items(): + attribute = getattr(o, field) + attribute.set(value) + + +def migrate(apps, schema_editor): + Parameter = apps.get_model("api_app", "Parameter") + PluginConfig = apps.get_model("api_app", "PluginConfig") + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + _create_object(Model, plugin) + for param in params: + _create_object(Parameter, param) + for value in values: + _create_object(PluginConfig, value) + + +def reverse_migrate(apps, schema_editor): + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + Model.objects.get(name=plugin["name"]).delete() + + +class Migration(migrations.Migration): + dependencies = [ + ("api_app", "0051_pythonmodule_health_check_schedule_and_more"), + ("analyzers_manager", "0053_analyzer_config_dns0_rrsets_data"), + ] + + operations = [migrations.RunPython(migrate, reverse_migrate)] diff --git a/api_app/analyzers_manager/migrations/0055_analyzerreport_sent_to_bi.py b/api_app/analyzers_manager/migrations/0055_analyzerreport_sent_to_bi.py new file mode 100644 index 0000000000..a961a90ce9 --- /dev/null +++ b/api_app/analyzers_manager/migrations/0055_analyzerreport_sent_to_bi.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.8 on 2023-12-20 14:14 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("analyzers_manager", "0054_analyzer_config_dns0_rrsets_name"), + ] + + operations = [ + migrations.AddField( + model_name="analyzerreport", + name="sent_to_bi", + field=models.BooleanField(default=False, editable=False), + ), + ] diff --git a/api_app/analyzers_manager/migrations/0056_alter_analyzer_config_dns0_rrsets_data.py b/api_app/analyzers_manager/migrations/0056_alter_analyzer_config_dns0_rrsets_data.py new file mode 100644 index 0000000000..737a6e8ff3 --- /dev/null +++ b/api_app/analyzers_manager/migrations/0056_alter_analyzer_config_dns0_rrsets_data.py @@ -0,0 +1,62 @@ +from django.db import migrations + +from api_app.analyzers_manager.constants import ObservableTypes + + +def migrate(apps, schema_editor): + AnalyzerConfig = apps.get_model("analyzers_manager", "AnalyzerConfig") + config = AnalyzerConfig.objects.get(name="DNS0_rrsets_data") + config.observable_supported = [ + ObservableTypes.DOMAIN, + ObservableTypes.URL, + ObservableTypes.GENERIC, + ObservableTypes.IP, + ] + config.full_clean() + config.save() + + PythonModule = apps.get_model("api_app", "PythonModule") + Parameter = apps.get_model("api_app", "Parameter") + pm = PythonModule.objects.get( + module="dns0.dns0_rrsets.DNS0Rrsets", + base_path="api_app.analyzers_manager.observable_analyzers", + ) + p = Parameter( + name="include_subdomain", + type="bool", + description="Search for subdomains.", + is_secret=False, + required=False, + python_module=pm, + ) + p.full_clean() + p.save() + + +def reverse_migrate(apps, schema_editor): + AnalyzerConfig = apps.get_model("analyzers_manager", "AnalyzerConfig") + config = AnalyzerConfig.objects.get(name="DNS0_rrsets_data") + config.observable_supported = [ + ObservableTypes.DOMAIN, + ObservableTypes.URL, + ObservableTypes.GENERIC, + ] + config.full_clean() + config.save() + + PythonModule = apps.get_model("api_app", "PythonModule") + Parameter = apps.get_model("api_app", "Parameter") + pm = PythonModule.objects.get( + module="dns0.dns0_rrsets.DNS0Rrsets", + base_path="api_app.analyzers_manager.observable_analyzers", + ) + Parameter(name="include_subdomain", python_module=pm).delete() + + +class Migration(migrations.Migration): + dependencies = [ + ("api_app", "0052_periodic_task_bi"), + ("analyzers_manager", "0055_analyzerreport_sent_to_bi"), + ] + + operations = [migrations.RunPython(migrate, reverse_migrate)] diff --git a/api_app/analyzers_manager/models.py b/api_app/analyzers_manager/models.py index a2e3dfe366..42bc289cd8 100644 --- a/api_app/analyzers_manager/models.py +++ b/api_app/analyzers_manager/models.py @@ -13,6 +13,7 @@ TypeChoices, ) from api_app.analyzers_manager.exceptions import AnalyzerConfigurationException +from api_app.analyzers_manager.queryset import AnalyzerReportQuerySet from api_app.choices import TLP, PythonModuleBasePaths from api_app.fields import ChoiceArrayField from api_app.models import AbstractReport, PythonConfig, PythonModule @@ -21,6 +22,7 @@ class AnalyzerReport(AbstractReport): + objects = AnalyzerReportQuerySet.as_manager() config = models.ForeignKey( "AnalyzerConfig", related_name="reports", null=False, on_delete=models.CASCADE ) diff --git a/api_app/analyzers_manager/observable_analyzers/dns0/__init__.py b/api_app/analyzers_manager/observable_analyzers/dns0/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api_app/analyzers_manager/observable_analyzers/dns0/dns0_base.py b/api_app/analyzers_manager/observable_analyzers/dns0/dns0_base.py new file mode 100644 index 0000000000..8ac3eb7dd2 --- /dev/null +++ b/api_app/analyzers_manager/observable_analyzers/dns0/dns0_base.py @@ -0,0 +1,124 @@ +import re +import typing +from abc import ABCMeta +from logging import getLogger + +import dateparser + +from api_app.analyzers_manager.classes import BaseAnalyzerMixin +from api_app.analyzers_manager.exceptions import ( + AnalyzerConfigurationException, + AnalyzerRunException, +) + +_supported_sort_types = [ + "first_seen", + "last_seen", +] + +_min_limit_value = 0 +_max_limit_value = 50000 + +_min_offset_value = 0 + +logger = getLogger(__name__) + + +class DNS0Mixin(BaseAnalyzerMixin, metaclass=ABCMeta): + base_url: str = "https://api.dns0.eu/" + + _api_key: str + from_date: str = "-1M" + sort: str + format: str + limit: int = 100 + offset: int + + def config(self, runtime_configuration: typing.Dict): + super().config(runtime_configuration) + # workaround to not being able to use "from" as variable name + if not hasattr(self, "from"): + setattr(self, "from", self.from_date) + + def _create_headers(self): + headers = {"Accept": "application/json", "User-Agent": "IntelOwl"} + if hasattr(self, "_api_key") and self._api_key: + headers["Authorization"] = f"Bearer {self._api_key}" + return headers + + @staticmethod + def convert_date_type(date_string): + if not date_string: + return False + + date_parsed = ( + DNS0Mixin.convert_unix_timestamp(date_string) + or DNS0Mixin.convert_relative_date(date_string) + or DNS0Mixin.convert_date(date_string) + ) + if not date_parsed: + raise AnalyzerRunException("Error in date format!") + return date_parsed + + @staticmethod + def convert_relative_date(date): + # accepts string matching the format: + # - at the beginning + # a number + # a character indicating Year, Month or Day + pattern = re.compile(r"-\d+[YMD]") + if match := pattern.match(date): + return match.group() + return False + + @staticmethod + def convert_date(date): + pattern = re.compile(r"^(\d{4}-\d{2}-\d{2})$") + if match := pattern.match(date): + return dateparser.parse(match.group()) + return False + + @staticmethod + def convert_unix_timestamp(timestamp): + try: + return str(int(timestamp)) + except Exception: + return False + + def _validate_params(self): + if hasattr(self, "sort") and self.sort not in _supported_sort_types: + raise AnalyzerConfigurationException( + f"Sort type {self.sort} not supported! " + f"Available sort types are: {_supported_sort_types}" + ) + + if ( + hasattr(self, "limit") + and not _min_limit_value < self.limit <= _max_limit_value + ): + raise AnalyzerConfigurationException( + f"{self.limit} is out of bound! " + f"Max value is {_max_limit_value}, min value is {_min_limit_value}" + ) + + if hasattr(self, "offset") and self.offset < _min_offset_value: + raise AnalyzerConfigurationException( + f"{self.offset} can't be below {_min_offset_value}" + ) + + def _create_params(self): + params = {} + # convert dates to correct format + dates = ["from", "to", "not_before"] + parameters = ["sort", "format", "limit", "offset"] + + for date in dates: + if getattr(self, date, None): + if result := self.convert_date_type(getattr(self, date)): + params[date] = result + + for p in parameters: + if getattr(self, p, None): + params[p] = getattr(self, p) + + return params diff --git a/api_app/analyzers_manager/observable_analyzers/dns0/dns0_names.py b/api_app/analyzers_manager/observable_analyzers/dns0/dns0_names.py new file mode 100644 index 0000000000..c50157b4fa --- /dev/null +++ b/api_app/analyzers_manager/observable_analyzers/dns0/dns0_names.py @@ -0,0 +1,206 @@ +from logging import getLogger +from typing import Dict +from urllib.parse import urlparse + +import requests + +from api_app.analyzers_manager import classes +from api_app.analyzers_manager.exceptions import ( + AnalyzerConfigurationException, + AnalyzerRunException, +) +from api_app.analyzers_manager.models import AnalyzerConfig +from api_app.analyzers_manager.observable_analyzers.dns0.dns0_base import DNS0Mixin +from api_app.models import Parameter, PluginConfig +from tests.mock_utils import MockUpResponse, if_mock_connections, patch + +logger = getLogger(__name__) + +_supported_fuzzy_params = [ + "swap", + "omit", + "repeat", + "add", + "typo", + "bitflip", + "hyphen", + "fatfinger", + "subdomain", + "vowels", + "homoglyph", + "all", +] + +_supported_format_types = [ + "json", + "dig", +] + + +class DNS0Names(classes.ObservableAnalyzer, DNS0Mixin): + endpoint: str = "names" + + root: bool + fuzzy: list[str] + + def config(self, runtime_configuration: Dict): + super().config(runtime_configuration) + self._validate_params() + + def run(self): + params = self._create_params() + headers = self._create_headers() + + response = requests.get( + self.base_url + self.endpoint, params=params, headers=headers + ) + try: + response.raise_for_status() + except requests.HTTPError as e: + raise AnalyzerRunException(e) + + return response.json() + + def update(self) -> bool: + pass + + def _validate_params(self): + super()._validate_params() + if hasattr(self, "fuzzy") and any( + fuzzy_params not in _supported_fuzzy_params for fuzzy_params in self.fuzzy + ): + raise AnalyzerConfigurationException( + "Fuzzy type not supported! " + "The list of supported fuzzy is at: " + "https://docs.dns0.eu/dns-api/names#fuzziness" + ) + + if hasattr(self, "format") and self.format not in _supported_format_types: + raise AnalyzerConfigurationException( + f"Format type {self.format} not supported! " + f"Available format types are: {_supported_format_types}" + ) + + def _create_params(self): + params = super()._create_params() + target_observable = self.observable_name + if self.observable_classification == self.ObservableTypes.URL: + target_observable = urlparse(self.observable_name).hostname + params["q"] = target_observable + + # convert root parameter into 1 or 0 + if hasattr(self, "root") and self.root: + params["root"] = int(self.root) + + # pass list of fuzzy parameter + if hasattr(self, "fuzzy") and self.fuzzy: + params["fuzzy"] = self.fuzzy + + return params + + @classmethod + def _monkeypatch(cls): + ac = AnalyzerConfig.objects.get(name="DNS0_names") + PluginConfig.objects.get_or_create( + analyzer_config=ac, + parameter=Parameter.objects.get( + name="from", python_module__pk=ac.python_module_id + ), + for_organization=False, + owner=None, + value="-1M", + ) + PluginConfig.objects.get_or_create( + analyzer_config=ac, + parameter=Parameter.objects.get( + name="to", python_module__pk=ac.python_module_id + ), + for_organization=False, + owner=None, + value="", + ) + PluginConfig.objects.get_or_create( + analyzer_config=ac, + parameter=Parameter.objects.get( + name="not_before", python_module__pk=ac.python_module_id + ), + for_organization=False, + owner=None, + value="", + ) + PluginConfig.objects.get_or_create( + analyzer_config=ac, + parameter=Parameter.objects.get( + name="sort", python_module__pk=ac.python_module_id + ), + for_organization=False, + owner=None, + value="first_seen", + ) + PluginConfig.objects.get_or_create( + analyzer_config=ac, + parameter=Parameter.objects.get( + name="format", python_module__pk=ac.python_module_id + ), + for_organization=False, + owner=None, + value="json", + ) + PluginConfig.objects.get_or_create( + analyzer_config=ac, + parameter=Parameter.objects.get( + name="limit", python_module__pk=ac.python_module_id + ), + for_organization=False, + owner=None, + value=100, + ) + PluginConfig.objects.get_or_create( + analyzer_config=ac, + parameter=Parameter.objects.get( + name="offset", python_module__pk=ac.python_module_id + ), + for_organization=False, + owner=None, + value=0, + ) + PluginConfig.objects.get_or_create( + analyzer_config=ac, + parameter=Parameter.objects.get( + name="fuzzy", python_module__pk=ac.python_module_id + ), + for_organization=False, + owner=None, + value=[], + ) + PluginConfig.objects.get_or_create( + analyzer_config=ac, + parameter=Parameter.objects.get( + name="root", python_module__pk=ac.python_module_id + ), + for_organization=False, + owner=None, + value=True, + ) + + patches = [ + if_mock_connections( + patch( + "requests.get", + return_value=MockUpResponse( + { + "data": [ + { + "first_seen": "2023-12-14T16:37:44.000Z", + "last_seen": "2023-12-14T16:37:44.000Z", + "name": "gcfr2.example.opentlc.com.", + } + ], + "meta": {"results": 834824}, + }, + 200, + ), + ), + ) + ] + return super()._monkeypatch(patches=patches) diff --git a/api_app/analyzers_manager/observable_analyzers/dns0/dns0_rrsets.py b/api_app/analyzers_manager/observable_analyzers/dns0/dns0_rrsets.py new file mode 100644 index 0000000000..2ef643f9e8 --- /dev/null +++ b/api_app/analyzers_manager/observable_analyzers/dns0/dns0_rrsets.py @@ -0,0 +1,223 @@ +from logging import getLogger +from typing import Dict + +import requests + +from api_app.analyzers_manager import classes +from api_app.analyzers_manager.exceptions import ( + AnalyzerConfigurationException, + AnalyzerRunException, +) +from api_app.analyzers_manager.models import AnalyzerConfig +from api_app.analyzers_manager.observable_analyzers.dns0.dns0_base import DNS0Mixin +from api_app.models import Parameter, PluginConfig +from tests.mock_utils import MockUpResponse, if_mock_connections, patch + +logger = getLogger(__name__) + +_supported_format_types = [ + "json", + "cof", + "dig", +] + +_supported_directions = [ + "right", + "left", +] + + +class DNS0Rrsets(classes.ObservableAnalyzer, DNS0Mixin): + endpoint: str = "rrsets" + + direction: str + name: str + data: str + type: list[str] + include_subdomain: bool + + def config(self, runtime_configuration: Dict): + super().config(runtime_configuration) + self._validate_params() + + def run(self): + params = self._create_params() + headers = self._create_headers() + + response = requests.get( + self.base_url + self.endpoint, params=params, headers=headers + ) + try: + response.raise_for_status() + except requests.HTTPError as e: + raise AnalyzerRunException(e) + + return response.json() + + def update(self) -> bool: + pass + + def _validate_params(self): + super()._validate_params() + if hasattr(self, "direction") and self.direction not in _supported_directions: + raise AnalyzerConfigurationException("Matching direction not specified!") + + if hasattr(self, "format") and self.format not in _supported_format_types: + raise AnalyzerConfigurationException( + f"Format type {self.format} not supported! " + f"Available format types are: {_supported_format_types}" + ) + + def _create_params(self): + params = super()._create_params() + query_type = None + if hasattr(self, "direction") and self.direction: + if self.direction == "left": + query_type = "name" + elif self.direction == "right": + query_type = "data" + + query = self.observable_name + if hasattr(self, "include_subdomain") and self.include_subdomain: + query = "." + query + params[query_type] = query + + # pass list of dns types parameter + if hasattr(self, "type") and self.type: + # convert the element that are int + res = [int(elem) if elem.isdigit() else elem for elem in self.type] + params["type"] = res + + return params + + @classmethod + def _monkeypatch(cls): + for config in ["DNS0_rrsets_data", "DNS0_rrsets_name"]: + ac = AnalyzerConfig.objects.get(name=config) + PluginConfig.objects.get_or_create( + analyzer_config=ac, + parameter=Parameter.objects.get( + name="from", python_module__pk=ac.python_module_id + ), + for_organization=False, + owner=None, + value="-1M", + ) + PluginConfig.objects.get_or_create( + analyzer_config=ac, + parameter=Parameter.objects.get( + name="to", python_module__pk=ac.python_module_id + ), + for_organization=False, + owner=None, + value="", + ) + PluginConfig.objects.get_or_create( + analyzer_config=ac, + parameter=Parameter.objects.get( + name="not_before", python_module__pk=ac.python_module_id + ), + for_organization=False, + owner=None, + value="", + ) + PluginConfig.objects.get_or_create( + analyzer_config=ac, + parameter=Parameter.objects.get( + name="sort", python_module__pk=ac.python_module_id + ), + for_organization=False, + owner=None, + value="first_seen", + ) + PluginConfig.objects.get_or_create( + analyzer_config=ac, + parameter=Parameter.objects.get( + name="format", python_module__pk=ac.python_module_id + ), + for_organization=False, + owner=None, + value="json", + ) + PluginConfig.objects.get_or_create( + analyzer_config=ac, + parameter=Parameter.objects.get( + name="limit", python_module__pk=ac.python_module_id + ), + for_organization=False, + owner=None, + value=100, + ) + PluginConfig.objects.get_or_create( + analyzer_config=ac, + parameter=Parameter.objects.get( + name="offset", python_module__pk=ac.python_module_id + ), + for_organization=False, + owner=None, + value=0, + ) + PluginConfig.objects.get_or_create( + analyzer_config=ac, + parameter=Parameter.objects.get( + name="type", python_module__pk=ac.python_module_id + ), + for_organization=False, + owner=None, + value=[], + ) + PluginConfig.objects.get_or_create( + analyzer_config=ac, + parameter=Parameter.objects.get( + name="include_subdomain", python_module__pk=ac.python_module_id + ), + for_organization=False, + owner=None, + value=False, + ) + + ac = AnalyzerConfig.objects.get(name="DNS0_rrsets_name") + PluginConfig.objects.get_or_create( + analyzer_config=ac, + parameter=Parameter.objects.get( + name="direction", python_module__pk=ac.python_module_id + ), + for_organization=False, + owner=None, + value="left", + ) + + ac = AnalyzerConfig.objects.get(name="DNS0_rrsets_data") + PluginConfig.objects.get_or_create( + analyzer_config=ac, + parameter=Parameter.objects.get( + name="direction", python_module__pk=ac.python_module_id + ), + for_organization=False, + owner=None, + value="right", + ) + + patches = [ + if_mock_connections( + patch( + "requests.get", + return_value=MockUpResponse( + { + "data": [ + { + "first_seen": "2023-04-15T16:50:52.000Z", + "last_seen": "2023-12-14T00:23:52.000Z", + "name": "example.com.", + "type": "A", + "data": ["93.184.216.34"], + } + ], + "meta": {"results": 6}, + }, + 200, + ), + ), + ) + ] + return super()._monkeypatch(patches=patches) diff --git a/api_app/analyzers_manager/observable_analyzers/dnsdb.py b/api_app/analyzers_manager/observable_analyzers/dnsdb.py index 6d39197976..a5c817abc1 100644 --- a/api_app/analyzers_manager/observable_analyzers/dnsdb.py +++ b/api_app/analyzers_manager/observable_analyzers/dnsdb.py @@ -5,8 +5,8 @@ from typing import Dict from urllib.parse import urlparse +import dateparser import requests -from dateutil import parser as dateutil_parser from api_app.analyzers_manager import classes from api_app.analyzers_manager.exceptions import AnalyzerRunException @@ -122,7 +122,7 @@ def convert_date_type(date_string): :rtype: int """ try: - return int(dateutil_parser.parse(date_string).timestamp()) + return int(dateparser.parse(date_string).timestamp()) except ValueError: error_message = f"{date_string} cannot be converted to a valid datetime" except TypeError: diff --git a/api_app/analyzers_manager/queryset.py b/api_app/analyzers_manager/queryset.py new file mode 100644 index 0000000000..e9b0ec96d3 --- /dev/null +++ b/api_app/analyzers_manager/queryset.py @@ -0,0 +1,14 @@ +from typing import TYPE_CHECKING, Type + +from api_app.queryset import AbstractReportQuerySet + +if TYPE_CHECKING: + from api_app.analyzers_manager.serializers import AnalyzerReportBISerializer + + +class AnalyzerReportQuerySet(AbstractReportQuerySet): + @classmethod + def _get_serializer_class(cls) -> Type["AnalyzerReportBISerializer"]: + from api_app.analyzers_manager.serializers import AnalyzerReportBISerializer + + return AnalyzerReportBISerializer diff --git a/api_app/analyzers_manager/serializers.py b/api_app/analyzers_manager/serializers.py index 0ad40a3f09..7f326e12b9 100644 --- a/api_app/analyzers_manager/serializers.py +++ b/api_app/analyzers_manager/serializers.py @@ -2,6 +2,7 @@ # See the file 'LICENSE' for copying permission. from ..serializers import ( + AbstractReportBISerializer, AbstractReportSerializer, CrontabScheduleSerializer, PeriodicTaskSerializer, @@ -18,6 +19,13 @@ class Meta: list_serializer_class = AbstractReportSerializer.Meta.list_serializer_class +class AnalyzerReportBISerializer(AbstractReportBISerializer): + class Meta: + model = AnalyzerReport + fields = AbstractReportBISerializer.Meta.fields + list_serializer_class = AbstractReportBISerializer.Meta.list_serializer_class + + class AnalyzerConfigSerializer(PythonConfigSerializer): class Meta: model = AnalyzerConfig diff --git a/api_app/connectors_manager/migrations/0027_connectorreport_sent_to_bi.py b/api_app/connectors_manager/migrations/0027_connectorreport_sent_to_bi.py new file mode 100644 index 0000000000..ec55d22bab --- /dev/null +++ b/api_app/connectors_manager/migrations/0027_connectorreport_sent_to_bi.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.8 on 2023-12-20 14:14 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("connectors_manager", "0026_connectorreport_parameters"), + ] + + operations = [ + migrations.AddField( + model_name="connectorreport", + name="sent_to_bi", + field=models.BooleanField(default=False, editable=False), + ), + ] diff --git a/api_app/connectors_manager/models.py b/api_app/connectors_manager/models.py index 03335c61fb..a7e2b26597 100644 --- a/api_app/connectors_manager/models.py +++ b/api_app/connectors_manager/models.py @@ -5,10 +5,12 @@ from api_app.choices import TLP, PythonModuleBasePaths from api_app.connectors_manager.exceptions import ConnectorConfigurationException +from api_app.connectors_manager.queryset import ConnectorReportQuerySet from api_app.models import AbstractReport, PythonConfig, PythonModule class ConnectorReport(AbstractReport): + objects = ConnectorReportQuerySet.as_manager() config = models.ForeignKey( "ConnectorConfig", related_name="reports", null=False, on_delete=models.CASCADE ) diff --git a/api_app/connectors_manager/queryset.py b/api_app/connectors_manager/queryset.py new file mode 100644 index 0000000000..1450d22a1a --- /dev/null +++ b/api_app/connectors_manager/queryset.py @@ -0,0 +1,14 @@ +from typing import TYPE_CHECKING, Type + +from api_app.queryset import AbstractReportQuerySet + +if TYPE_CHECKING: + from api_app.connectors_manager.serializers import ConnectorReportBISerializer + + +class ConnectorReportQuerySet(AbstractReportQuerySet): + @classmethod + def _get_serializer_class(cls) -> Type["ConnectorReportBISerializer"]: + from api_app.connectors_manager.serializers import ConnectorReportBISerializer + + return ConnectorReportBISerializer diff --git a/api_app/connectors_manager/serializers.py b/api_app/connectors_manager/serializers.py index 5d4cbb4c22..4489526889 100644 --- a/api_app/connectors_manager/serializers.py +++ b/api_app/connectors_manager/serializers.py @@ -2,6 +2,7 @@ # See the file 'LICENSE' for copying permission. from ..serializers import ( + AbstractReportBISerializer, AbstractReportSerializer, PythonConfigSerializer, PythonConfigSerializerForMigration, @@ -27,3 +28,10 @@ class Meta: model = ConnectorReport fields = AbstractReportSerializer.Meta.fields list_serializer_class = AbstractReportSerializer.Meta.list_serializer_class + + +class ConnectorReportBISerializer(AbstractReportBISerializer): + class Meta: + model = ConnectorReport + fields = AbstractReportBISerializer.Meta.fields + list_serializer_class = AbstractReportBISerializer.Meta.list_serializer_class diff --git a/api_app/ingestors_manager/migrations/0014_ingestorreport_sent_to_bi.py b/api_app/ingestors_manager/migrations/0014_ingestorreport_sent_to_bi.py new file mode 100644 index 0000000000..f763d1abe8 --- /dev/null +++ b/api_app/ingestors_manager/migrations/0014_ingestorreport_sent_to_bi.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.8 on 2023-12-20 14:14 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("ingestors_manager", "0013_ingestorreport_parameters"), + ] + + operations = [ + migrations.AddField( + model_name="ingestorreport", + name="sent_to_bi", + field=models.BooleanField(default=False, editable=False), + ), + ] diff --git a/api_app/ingestors_manager/models.py b/api_app/ingestors_manager/models.py index 42e91df99c..70ef0cbbcc 100644 --- a/api_app/ingestors_manager/models.py +++ b/api_app/ingestors_manager/models.py @@ -8,6 +8,7 @@ from api_app.choices import PythonModuleBasePaths from api_app.ingestors_manager.exceptions import IngestorConfigurationException +from api_app.ingestors_manager.queryset import IngestorReportQuerySet from api_app.interfaces import CreateJobsFromPlaybookInterface from api_app.models import AbstractReport, Job, PythonConfig, PythonModule from api_app.playbooks_manager.models import PlaybookConfig @@ -17,6 +18,7 @@ class IngestorReport(AbstractReport): + objects = IngestorReportQuerySet.as_manager() config = models.ForeignKey( "IngestorConfig", related_name="reports", on_delete=models.CASCADE ) diff --git a/api_app/ingestors_manager/queryset.py b/api_app/ingestors_manager/queryset.py new file mode 100644 index 0000000000..386a7cdb48 --- /dev/null +++ b/api_app/ingestors_manager/queryset.py @@ -0,0 +1,14 @@ +from typing import TYPE_CHECKING, Type + +from api_app.queryset import AbstractReportQuerySet + +if TYPE_CHECKING: + from api_app.ingestors_manager.serializers import IngestorReportBISerializer + + +class IngestorReportQuerySet(AbstractReportQuerySet): + @classmethod + def _get_serializer_class(cls) -> Type["IngestorReportBISerializer"]: + from api_app.ingestors_manager.serializers import IngestorReportBISerializer + + return IngestorReportBISerializer diff --git a/api_app/ingestors_manager/serializers.py b/api_app/ingestors_manager/serializers.py index b4c26025c3..d5838a489b 100644 --- a/api_app/ingestors_manager/serializers.py +++ b/api_app/ingestors_manager/serializers.py @@ -5,6 +5,7 @@ from certego_saas.apps.user.serializers import UserSerializer from ..serializers import ( + AbstractReportBISerializer, AbstractReportSerializer, CrontabScheduleSerializer, PeriodicTaskSerializer, @@ -53,3 +54,16 @@ def get_name(cls, instance: IngestorReport): def to_internal_value(self, data): raise NotImplementedError() + + +class IngestorReportBISerializer(AbstractReportBISerializer): + name = rfs.SerializerMethodField() + + class Meta: + model = IngestorReport + fields = AbstractReportBISerializer.Meta.fields + list_serializer_class = AbstractReportBISerializer.Meta.list_serializer_class + + @classmethod + def get_name(cls, instance: IngestorReport): + return instance.name or instance.config.pk diff --git a/api_app/management/commands/dumpplugin.py b/api_app/management/commands/dumpplugin.py index 373499c1eb..ce821f4f4b 100644 --- a/api_app/management/commands/dumpplugin.py +++ b/api_app/management/commands/dumpplugin.py @@ -11,7 +11,7 @@ from api_app.connectors_manager.serializers import ConnectorConfigSerializerForMigration from api_app.ingestors_manager.models import IngestorConfig from api_app.ingestors_manager.serializers import IngestorConfigSerializerForMigration -from api_app.models import PluginConfig +from api_app.models import PluginConfig, PythonConfig from api_app.pivots_manager.models import PivotConfig from api_app.pivots_manager.serializers import PivotConfigSerializerForMigration from api_app.serializers import ( @@ -48,7 +48,7 @@ def add_arguments(parser): ) @staticmethod - def _get_serialization(obj, serializer_class): + def _get_serialization(obj: PythonConfig, serializer_class): obj_data = serializer_class(obj).data obj_data["model"] = f"{obj._meta.app_label}.{obj._meta.object_name}" params_data = [] @@ -62,6 +62,7 @@ def _get_serialization(obj, serializer_class): for_organization=False, parameter=parameter, parameter__is_secret=False, + **{f"{obj.snake_case_name}__pk": obj.pk}, ) except PluginConfig.DoesNotExist: ... @@ -161,7 +162,7 @@ def _get_last_migration(app): return MigrationRecorder.Migration.objects.filter(app=app).latest("id").name - def _migration_file(self, obj, serializer_class, app): + def _migration_file(self, obj: PythonConfig, serializer_class, app): obj_data, param_data, values_data = self._get_serialization( obj, serializer_class ) @@ -222,7 +223,7 @@ def handle(self, *args, **options): if config_class == IngestorConfig.__name__ else (PivotConfig, PivotConfigSerializerForMigration) ) - obj = class_.objects.get(name=config_name) + obj: PythonConfig = class_.objects.get(name=config_name) app = obj._meta.app_label content = self._migration_file(obj, serializer_class, app) name_file = self._name_file(obj, app) diff --git a/api_app/migrations/0050_python_module_update_task.py b/api_app/migrations/0050_python_module_update_task.py index c66dc243bd..e637728e60 100644 --- a/api_app/migrations/0050_python_module_update_task.py +++ b/api_app/migrations/0050_python_module_update_task.py @@ -9,7 +9,10 @@ def migrate(apps, schema_editor): PythonModule = apps.get_model("api_app", "PythonModule") for task in PeriodicTask.objects.filter(name__endswith="Analyzer"): pm_pk = task.analyzer.python_module - pm = PythonModule.objects.get(module=pm_pk) + if isinstance(pm_pk, str): + pm = PythonModule.objects.get(module=pm_pk) + else: + pm = PythonModule.objects.get(pk=pm_pk.pk) task.name = pm.base_path + "." + pm.module + "Update" task.save() if pm.update_task: diff --git a/api_app/migrations/0052_periodic_task_bi.py b/api_app/migrations/0052_periodic_task_bi.py new file mode 100644 index 0000000000..fd3c051431 --- /dev/null +++ b/api_app/migrations/0052_periodic_task_bi.py @@ -0,0 +1,36 @@ +# Generated by Django 4.1.10 on 2023-11-15 09:08 + +from django.conf import settings +from django.db import migrations + +from intel_owl.celery import get_queue_name + + +def migrate(apps, schema_editor): + CrontabSchedule = apps.get_model("django_celery_beat", "CrontabSchedule") + PeriodicTask = apps.get_model("django_celery_beat", "PeriodicTask") + + # notification + + c1 = CrontabSchedule.objects.get_or_create(minute=12)[0] + PeriodicTask.objects.create( + name="send_elastic_bi", + task="intel_owl.tasks.send_bi_to_elastic", + crontab=c1, + enabled=settings.ELASTICSEARCH_BI_ENABLED, + queue=get_queue_name("default"), + ) + + +def reverse_migrate(apps, schema_editor): + PeriodicTask = apps.get_model("django_celery_beat", "PeriodicTask") + PeriodicTask.objects.filter(name="send_elastic_bi").delete() + + +class Migration(migrations.Migration): + dependencies = [ + ("django_celery_beat", "0018_improve_crontab_helptext"), + ("api_app", "0051_pythonmodule_health_check_schedule_and_more"), + ] + + operations = [migrations.RunPython(migrate, reverse_migrate)] diff --git a/api_app/models.py b/api_app/models.py index 4aca9fa9a1..4c9e8be34b 100644 --- a/api_app/models.py +++ b/api_app/models.py @@ -892,6 +892,7 @@ def enabled_for_user(self, user: User) -> bool: class AbstractReport(models.Model): + objects = AbstractReportQuerySet.as_manager() # constants Status = ReportStatus @@ -908,8 +909,8 @@ class AbstractReport(models.Model): job = models.ForeignKey( "api_app.Job", related_name="%(class)ss", on_delete=models.CASCADE ) - objects = AbstractReportQuerySet.as_manager() parameters = models.JSONField(blank=False, null=False, editable=False) + sent_to_bi = models.BooleanField(default=False, editable=False) class Meta: abstract = True diff --git a/api_app/pivots_manager/migrations/0021_pivotreport_sent_to_bi.py b/api_app/pivots_manager/migrations/0021_pivotreport_sent_to_bi.py new file mode 100644 index 0000000000..976193d3af --- /dev/null +++ b/api_app/pivots_manager/migrations/0021_pivotreport_sent_to_bi.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.8 on 2023-12-20 14:14 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("pivots_manager", "0020_pivotreport_parameters"), + ] + + operations = [ + migrations.AddField( + model_name="pivotreport", + name="sent_to_bi", + field=models.BooleanField(default=False, editable=False), + ), + ] diff --git a/api_app/pivots_manager/models.py b/api_app/pivots_manager/models.py index 921a15922a..acf6a81896 100644 --- a/api_app/pivots_manager/models.py +++ b/api_app/pivots_manager/models.py @@ -2,7 +2,7 @@ import typing from typing import Type -from api_app.pivots_manager.queryset import PivotConfigQuerySet +from api_app.pivots_manager.queryset import PivotConfigQuerySet, PivotReportQuerySet from api_app.queryset import PythonConfigQuerySet from api_app.validators import plugin_name_validator @@ -21,6 +21,7 @@ class PivotReport(AbstractReport): + objects = PivotReportQuerySet.as_manager() config = models.ForeignKey( "PivotConfig", related_name="reports", null=False, on_delete=models.CASCADE ) diff --git a/api_app/pivots_manager/queryset.py b/api_app/pivots_manager/queryset.py index 9526013ed3..d17b97f442 100644 --- a/api_app/pivots_manager/queryset.py +++ b/api_app/pivots_manager/queryset.py @@ -1,6 +1,11 @@ +from typing import TYPE_CHECKING, Type + from django.db.models import Q -from api_app.queryset import PythonConfigQuerySet +from api_app.queryset import AbstractReportQuerySet, PythonConfigQuerySet + +if TYPE_CHECKING: + from api_app.pivots_manager.serializers import PivotReportBISerializer class PivotConfigQuerySet(PythonConfigQuerySet): @@ -27,3 +32,11 @@ def valid( | Q(related_connector_configs=None) ) return qs.distinct() + + +class PivotReportQuerySet(AbstractReportQuerySet): + @classmethod + def _get_serializer_class(cls) -> Type["PivotReportBISerializer"]: + from api_app.pivots_manager.serializers import PivotReportBISerializer + + return PivotReportBISerializer diff --git a/api_app/pivots_manager/serializers.py b/api_app/pivots_manager/serializers.py index 465f76f080..568cc9c9b1 100644 --- a/api_app/pivots_manager/serializers.py +++ b/api_app/pivots_manager/serializers.py @@ -5,6 +5,7 @@ from api_app.pivots_manager.models import PivotConfig, PivotMap, PivotReport from api_app.playbooks_manager.models import PlaybookConfig from api_app.serializers import ( + AbstractReportBISerializer, AbstractReportSerializer, PythonConfigSerializer, PythonConfigSerializerForMigration, @@ -18,6 +19,13 @@ class Meta: list_serializer_class = AbstractReportSerializer.Meta.list_serializer_class +class PivotReportBISerializer(AbstractReportBISerializer): + class Meta: + model = PivotReport + fields = AbstractReportBISerializer.Meta.fields + list_serializer_class = AbstractReportBISerializer.Meta.list_serializer_class + + class PivotMapSerializer(rfs.ModelSerializer): starting_job = rfs.PrimaryKeyRelatedField(queryset=Job.objects.all(), required=True) pivot_config = rfs.PrimaryKeyRelatedField( diff --git a/api_app/playbooks_manager/migrations/0022_add_dns0_to_free_playbook.py b/api_app/playbooks_manager/migrations/0022_add_dns0_to_free_playbook.py new file mode 100644 index 0000000000..e5668f9eb5 --- /dev/null +++ b/api_app/playbooks_manager/migrations/0022_add_dns0_to_free_playbook.py @@ -0,0 +1,30 @@ +# This file is a part of IntelOwl https://github.com/intelowlproject/IntelOwl +# See the file 'LICENSE' for copying permission. + +from django.db import migrations + + +def migrate(apps, schema_editor): + playbook_config = apps.get_model("playbooks_manager", "PlaybookConfig") + pc = playbook_config.objects.get(name="FREE_TO_USE_ANALYZERS") + pc.analyzers.add(*["DNS0_names", "DNS0_rrsets_name"]) + pc.full_clean() + pc.save() + + +def reverse_migrate(apps, schema_editor): + playbook_config = apps.get_model("playbooks_manager", "PlaybookConfig") + pc = playbook_config.objects.get(name="FREE_TO_USE_ANALYZERS") + pc.analyzers.remove(*["DNS0_names", "DNS0_rrsets_name"]) + pc.full_clean() + pc.save() + + +class Migration(migrations.Migration): + dependencies = [ + ("playbooks_manager", "0021_alter_playbookconfig_name_and_more"), + ] + + operations = [ + migrations.RunPython(migrate, reverse_migrate), + ] diff --git a/api_app/queryset.py b/api_app/queryset.py index b620399a93..3ac141a7fc 100644 --- a/api_app/queryset.py +++ b/api_app/queryset.py @@ -1,11 +1,16 @@ import datetime +import json +import logging import uuid -from typing import TYPE_CHECKING, Generator +from typing import TYPE_CHECKING, Generator, Type +from django.conf import settings from django.contrib.postgres.expressions import ArraySubquery +from django.core.paginator import Paginator if TYPE_CHECKING: from api_app.models import PythonConfig + from api_app.serializers import AbstractReportBISerializer from celery.canvas import Signature from django.db import models @@ -226,7 +231,54 @@ def annotate_value_for_user( class AbstractReportQuerySet(QuerySet): - ... + @classmethod + def _get_serializer_class(cls) -> Type["AbstractReportBISerializer"]: + raise NotImplementedError() + + @staticmethod + def _create_index_template(): + if not settings.ELASTICSEARCH_CLIENT.indices.exists_template( + name=settings.ELASTICSEARCH_BI_INDEX + ): + with open( + settings.CONFIG_ROOT / "elastic_search_mappings" / "intel_owl_bi.json" + ) as f: + body = json.load(f) + body["index_patterns"] = [f"{settings.ELASTICSEARCH_BI_INDEX}-*"] + settings.ELASTICSEARCH_CLIENT.indices.put_template( + name=settings.ELASTICSEARCH_BI_INDEX, body=body + ) + + def send_to_elastic_as_bi(self, max_timeout: int = 60) -> bool: + from elasticsearch.helpers import bulk + + BULK_MAX_SIZE = 1000 + found_errors = False + + p = Paginator(self.order_by("pk"), BULK_MAX_SIZE) + for i in p.page_range: + page = p.get_page(i) + objects: AbstractReportQuerySet = page.object_list + serializer = self._get_serializer_class()(instance=objects, many=True) + objects_serialized = serializer.data + _, errors = bulk( + settings.ELASTICSEARCH_CLIENT, + objects_serialized, + request_timeout=max_timeout, + ) + if errors: + logging.error( + f"Errors on sending to elastic: {errors}." + " We are not marking objects as sent." + ) + found_errors |= errors + else: + logging.info("BI sent") + self.model.objects.filter( + pk__in=objects.values_list("pk", flat=True) + ).update(sent_to_bi=True) + self._create_index_template() + return found_errors class ModelWithOwnershipQuerySet: diff --git a/api_app/serializers.py b/api_app/serializers.py index 3a376f0b0f..cc7f32a0a5 100644 --- a/api_app/serializers.py +++ b/api_app/serializers.py @@ -890,12 +890,6 @@ def create(self, validated_data): return last_job_for_md5 -class PluginConfigCompleteSerializer(rfs.ModelSerializer): - class Meta: - model = PluginConfig - fields = rfs.ALL_FIELDS - - class ModelWithOwnershipSerializer(rfs.ModelSerializer): class Meta: model = OwnershipAbstractModel @@ -1063,12 +1057,6 @@ def to_representation(self, data): return {elem.pop("name"): elem for elem in result} -class ParameterCompleteSerializer(rfs.ModelSerializer): - class Meta: - model = Parameter - fields = rfs.ALL_FIELDS - - class ParameterSerializer(rfs.ModelSerializer): value = SerializerMethodField() @@ -1147,6 +1135,22 @@ class Meta: fields = ["module", "base_path"] +class ParameterCompleteSerializer(rfs.ModelSerializer): + python_module = PythonModuleSerializer(read_only=True) + + class Meta: + model = Parameter + exclude = ["id"] + + +class PluginConfigCompleteSerializer(rfs.ModelSerializer): + parameter = ParameterCompleteSerializer(read_only=True) + + class Meta: + model = PluginConfig + exclude = ["id"] + + class AbstractConfigSerializer(rfs.ModelSerializer): ... @@ -1192,32 +1196,69 @@ class AbstractReportListSerializer(rfs.ListSerializer): ... -class AbstractReportSerializer(rfs.ModelSerializer): +class AbstractReportSerializerInterface(rfs.ModelSerializer): name = rfs.SlugRelatedField(read_only=True, source="config", slug_field="name") + type = rfs.SerializerMethodField(read_only=True, method_name="get_type") class Meta: - fields = ( - "id", - "name", - "process_time", - "report", - "status", - "errors", - "start_time", - "end_time", - "parameters", - ) + fields = ["name", "process_time", "status", "end_time", "parameters", "type"] list_serializer_class = AbstractReportListSerializer - def to_representation(self, instance: AbstractReport): - data = super().to_representation(instance) - data["type"] = instance.__class__.__name__.replace("Report", "").lower() - return data + def get_type(self, instance: AbstractReport): + return instance.__class__.__name__.replace("Report", "").lower() def to_internal_value(self, data): raise NotImplementedError() +class AbstractReportBISerializer(AbstractReportSerializerInterface): + application = rfs.CharField(read_only=True, default="IntelOwl") + timestamp = rfs.DateTimeField(source="start_time") + username = rfs.CharField(source="job.user.username") + environment = rfs.SerializerMethodField(method_name="get_environment") + + class Meta: + fields = AbstractReportSerializerInterface.Meta.fields + [ + "application", + "timestamp", + "username", + "environment", + ] + list_serializer_class = ( + AbstractReportSerializerInterface.Meta.list_serializer_class + ) + + def get_environment(self, instance: AbstractReport): + if settings.STAGE_PRODUCTION: + return "prod" + elif settings.STAGE_STAGING: + return "stag" + else: + return "test" + + def to_representation(self, instance: AbstractReport): + data = super().to_representation(instance) + return { + "_source": data, + "_type": "_doc", + "_index": settings.ELASTICSEARCH_BI_INDEX + "-" + now().strftime("%Y.%m"), + "_op_type": "index", + } + + +class AbstractReportSerializer(AbstractReportSerializerInterface): + class Meta: + fields = AbstractReportSerializerInterface.Meta.fields + [ + "id", + "report", + "errors", + "start_time", + ] + list_serializer_class = ( + AbstractReportSerializerInterface.Meta.list_serializer_class + ) + + class CrontabScheduleSerializer(rfs.ModelSerializer): class Meta: model = CrontabSchedule diff --git a/api_app/views.py b/api_app/views.py index 41751d5ecf..5c28dcce68 100644 --- a/api_app/views.py +++ b/api_app/views.py @@ -117,6 +117,7 @@ def ask_analysis_availability(request): ) @api_view(["POST"]) def ask_multi_analysis_availability(request): + logger.info(f"received ask_multi_analysis_availability from user {request.user}") serializer = JobAvailabilitySerializer( data=request.data, context={"request": request}, many=True ) @@ -127,8 +128,10 @@ def ask_multi_analysis_availability(request): result = [] else: result = jobs + jrs = JobResponseSerializer(result, many=True).data + logger.info(f"finished ask_multi_analysis_availability from user {request.user}") return Response( - JobResponseSerializer(result, many=True).data, + jrs, status=status.HTTP_200_OK, ) @@ -141,11 +144,14 @@ def ask_multi_analysis_availability(request): ) @api_view(["POST"]) def analyze_file(request): + logger.info(f"received analyze_file from user {request.user}") fas = FileAnalysisSerializer(data=request.data, context={"request": request}) fas.is_valid(raise_exception=True) job = fas.save(send_task=True) + jrs = JobResponseSerializer(job).data + logger.info(f"finished analyze_file from user {request.user}") return Response( - JobResponseSerializer(job).data, + jrs, status=status.HTTP_200_OK, ) @@ -170,13 +176,16 @@ def analyze_file(request): ) @api_view(["POST"]) def analyze_multiple_files(request): + logger.info(f"received analyze_multiple_files from user {request.user}") fas = FileAnalysisSerializer( data=request.data, context={"request": request}, many=True ) fas.is_valid(raise_exception=True) jobs = fas.save(send_task=True) + jrs = JobResponseSerializer(jobs, many=True).data + logger.info(f"finished analyze_multiple_files from user {request.user}") return Response( - JobResponseSerializer(jobs, many=True).data, + jrs, status=status.HTTP_200_OK, ) @@ -189,11 +198,14 @@ def analyze_multiple_files(request): ) @api_view(["POST"]) def analyze_observable(request): + logger.info(f"received analyze_observable from user {request.user}") oas = ObservableAnalysisSerializer(data=request.data, context={"request": request}) oas.is_valid(raise_exception=True) job = oas.save(send_task=True) + jrs = JobResponseSerializer(job).data + logger.info(f"finished analyze_observable from user {request.user}") return Response( - JobResponseSerializer(job).data, + jrs, status=status.HTTP_200_OK, ) @@ -214,13 +226,16 @@ def analyze_observable(request): ) @api_view(["POST"]) def analyze_multiple_observables(request): + logger.info(f"received analyze_multiple_observables from user {request.user}") oas = ObservableAnalysisSerializer( data=request.data, many=True, context={"request": request} ) oas.is_valid(raise_exception=True) jobs = oas.save(send_task=True) + jrs = JobResponseSerializer(jobs, many=True).data + logger.info(f"finished analyze_multiple_observables from user {request.user}") return Response( - JobResponseSerializer(jobs, many=True).data, + jrs, status=status.HTTP_200_OK, ) diff --git a/api_app/visualizers_manager/enums.py b/api_app/visualizers_manager/enums.py index 46c6061d0c..5856358b76 100644 --- a/api_app/visualizers_manager/enums.py +++ b/api_app/visualizers_manager/enums.py @@ -61,6 +61,7 @@ def __bool__(self): class VisualizableIcon(enum.Enum): + BOOK = "book" INFO = "info" LIKE = "like" DISLIKE = "dislike" diff --git a/api_app/visualizers_manager/migrations/0034_visualizerreport_sent_to_bi.py b/api_app/visualizers_manager/migrations/0034_visualizerreport_sent_to_bi.py new file mode 100644 index 0000000000..911eef6018 --- /dev/null +++ b/api_app/visualizers_manager/migrations/0034_visualizerreport_sent_to_bi.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.8 on 2023-12-20 14:14 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("visualizers_manager", "0033_visualizerreport_parameters"), + ] + + operations = [ + migrations.AddField( + model_name="visualizerreport", + name="sent_to_bi", + field=models.BooleanField(default=False, editable=False), + ), + ] diff --git a/api_app/visualizers_manager/models.py b/api_app/visualizers_manager/models.py index 258d03ed2a..be73c92854 100644 --- a/api_app/visualizers_manager/models.py +++ b/api_app/visualizers_manager/models.py @@ -7,10 +7,12 @@ from api_app.models import AbstractReport, PythonConfig, PythonModule from api_app.playbooks_manager.models import PlaybookConfig from api_app.visualizers_manager.exceptions import VisualizerConfigurationException +from api_app.visualizers_manager.queryset import VisualizerReportQuerySet from api_app.visualizers_manager.validators import validate_report class VisualizerReport(AbstractReport): + objects = VisualizerReportQuerySet.as_manager() config = models.ForeignKey( "VisualizerConfig", related_name="reports", null=False, on_delete=models.CASCADE ) diff --git a/api_app/visualizers_manager/queryset.py b/api_app/visualizers_manager/queryset.py new file mode 100644 index 0000000000..6d5d0cccbb --- /dev/null +++ b/api_app/visualizers_manager/queryset.py @@ -0,0 +1,14 @@ +from typing import TYPE_CHECKING, Type + +from api_app.queryset import AbstractReportQuerySet + +if TYPE_CHECKING: + from api_app.visualizers_manager.serializers import VisualizerReportBISerializer + + +class VisualizerReportQuerySet(AbstractReportQuerySet): + @classmethod + def _get_serializer_class(cls) -> Type["VisualizerReportBISerializer"]: + from api_app.visualizers_manager.serializers import VisualizerReportBISerializer + + return VisualizerReportBISerializer diff --git a/api_app/visualizers_manager/serializers.py b/api_app/visualizers_manager/serializers.py index 6132d90a66..c4ce4477eb 100644 --- a/api_app/visualizers_manager/serializers.py +++ b/api_app/visualizers_manager/serializers.py @@ -4,6 +4,7 @@ from rest_framework import serializers as rfs from ..serializers import ( + AbstractReportBISerializer, AbstractReportSerializer, PythonConfigSerializer, PythonConfigSerializerForMigration, @@ -35,5 +36,20 @@ def get_name(cls, instance: VisualizerReport): class Meta: model = VisualizerReport - fields = AbstractReportSerializer.Meta.fields + ("config",) + fields = AbstractReportSerializer.Meta.fields + [ + "config", + ] list_serializer_class = AbstractReportSerializer.Meta.list_serializer_class + + +class VisualizerReportBISerializer(AbstractReportBISerializer): + name = rfs.SerializerMethodField() + + @classmethod + def get_name(cls, instance: VisualizerReport): + return instance.name or instance.config.pk + + class Meta: + model = VisualizerReport + fields = AbstractReportBISerializer.Meta.fields + list_serializer_class = AbstractReportBISerializer.Meta.list_serializer_class diff --git a/configuration/elastic_search_mappings/intel_owl_bi.json b/configuration/elastic_search_mappings/intel_owl_bi.json new file mode 100644 index 0000000000..c345a3a4b7 --- /dev/null +++ b/configuration/elastic_search_mappings/intel_owl_bi.json @@ -0,0 +1,40 @@ +{ + "settings" : { + "number_of_shards" : 3 + }, + "mappings": { + "_doc": { + "dynamic": false, + "properties": { + "timestamp": { + "type": "date" + }, + "application": { + "type": "keyword" + }, + "username": { + "type": "keyword" + }, + "environment": { + "type": "keyword" + }, + "name": { + "type": "keyword" + }, + "process_time": { + "type": "integer" + }, + "status": { + "type": "keyword" + }, + "end_time": { + "type": "date" + }, + "parameters": { + "type": "object", + "dynamic": true + } + } + } + } +} \ No newline at end of file diff --git a/docker/.env b/docker/.env index 5e539899bb..ee238bf740 100644 --- a/docker/.env +++ b/docker/.env @@ -1,6 +1,6 @@ ### DO NOT CHANGE THIS VALUE !! ### It should be updated only when you pull latest changes off from the 'master' branch of IntelOwl. # this variable must start with "REACT_APP_" to be used in the frontend too -REACT_APP_INTELOWL_VERSION="v5.2.1" +REACT_APP_INTELOWL_VERSION="v5.2.2" # if you want to use a nfs volume for shared files # NFS_ADDRESS= diff --git a/docker/Dockerfile b/docker/Dockerfile index 3e62d2993b..a190b6e4dd 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -22,7 +22,7 @@ ENV LOG_PATH /var/log/intel_owl ARG REPO_DOWNLOADER_ENABLED=true ARG WATCHMAN=false ENV watch_logs_cmd "watch -n1 tail -n10 /var/log/intel_owl/django/api_app.log" -ARG PYELASTIC_VERSION=7.2.2 +ARG PYELASTIC_VERSION=7.4.1 ARG PYCTI_VERSION=5.10.0 RUN mkdir -p ${LOG_PATH} \ @@ -40,20 +40,22 @@ RUN apt-get update \ && rm -rf /var/lib/apt/lists/* \ && pip3 install --no-cache-dir --upgrade pip +# perl not interactive +ENV PERL_MM_USE_DEFAULT 1 +# msgconvert +RUN cpan -T Email::Outlook::Message + COPY requirements/project-requirements.txt $PYTHONPATH/project-requirements.txt COPY requirements/certego-requirements.txt $PYTHONPATH/certego-requirements.txt WORKDIR $PYTHONPATH RUN pip3 install --no-cache-dir --compile -r project-requirements.txt \ # install elasticsearch-dsl's appropriate version as specified by user - && pip3 install --no-cache-dir django-elasticsearch-dsl==${PYELASTIC_VERSION} \ + && pip3 install --no-cache-dir elasticsearch-dsl==${PYELASTIC_VERSION} \ && pip3 install --no-cache-dir pycti==${PYCTI_VERSION} \ && pip3 install --no-cache-dir --compile -r certego-requirements.txt -# perl not interactive -ENV PERL_MM_USE_DEFAULT 1 -# msgconvert -RUN cpan -T Email::Outlook::Message + COPY . $PYTHONPATH diff --git a/docker/env_file_app_ci b/docker/env_file_app_ci index 53d896f7ec..454327fde9 100644 --- a/docker/env_file_app_ci +++ b/docker/env_file_app_ci @@ -27,10 +27,15 @@ AWS_ACCESS_KEY_ID= AWS_SECRET_ACCESS_KEY= # Elastic Search Configuration -ELASTICSEARCH_ENABLED=False -ELASTICSEARCH_HOST= -ELASTICSEARCH_NO_OF_SHARDS=1 -ELASTICSEARCH_NO_OF_REPLICAS=0 +ELASTICSEARCH_DSL_ENABLED=False +ELASTICSEARCH_DSL_HOST= +ELASTICSEARCH_DSL_NO_OF_SHARDS=1 +ELASTICSEARCH_DSL_NO_OF_REPLICAS=0 + +ELASTICSEARCH_BI_ENABLED=False +ELASTICSEARCH_BI_HOST= +ELASTICSEARCH_SSL_CERTIFICATE_FILE_NAME=elastisearch.crt +ELASTICSEARCH_BI_INDEX=intelowl-bi # Test tokens diff --git a/docker/env_file_app_template b/docker/env_file_app_template index 4b723c27d2..2616627ea6 100644 --- a/docker/env_file_app_template +++ b/docker/env_file_app_template @@ -53,11 +53,16 @@ SLACK_TOKEN= DEFAULT_SLACK_CHANNEL= # Elastic Search Configuration -ELASTICSEARCH_ENABLED=False -ELASTICSEARCH_HOST= +ELASTICSEARCH_DSL_ENABLED=False +ELASTICSEARCH_DSL_HOST= # consult to: https://django-elasticsearch-dsl.readthedocs.io/en/latest/settings.html -ELASTICSEARCH_NO_OF_SHARDS=1 -ELASTICSEARCH_NO_OF_REPLICAS=0 +ELASTICSEARCH_DSL_NO_OF_SHARDS=1 +ELASTICSEARCH_DSL_NO_OF_REPLICAS=0 + +ELASTICSEARCH_BI_ENABLED=False +ELASTICSEARCH_BI_HOST= +ELASTICSEARCH_SSL_CERTIFICATE_FILE_NAME=elastisearch.crt +ELASTICSEARCH_BI_INDEX=intelowl-bi # Test tokens TEST_IP=8.8.8.8 diff --git a/docker/test.override.yml b/docker/test.override.yml index 92bc8fabcd..02a84a33c0 100644 --- a/docker/test.override.yml +++ b/docker/test.override.yml @@ -8,7 +8,7 @@ services: args: REPO_DOWNLOADER_ENABLED: ${REPO_DOWNLOADER_ENABLED} WATCHMAN: "true" - PYELASTIC_VERSION: ${PYELASTIC_VERSION:-7.2.2} + PYELASTIC_VERSION: ${PYELASTIC_VERSION:-7.4.1} PYCTI_VERSION: ${PYCTI_VERSION:-5.10.0} image: intelowlproject/intelowl:test volumes: diff --git a/docs/source/Advanced-Configuration.md b/docs/source/Advanced-Configuration.md index 146ce7048e..5f90fd1789 100644 --- a/docs/source/Advanced-Configuration.md +++ b/docs/source/Advanced-Configuration.md @@ -5,6 +5,7 @@ This page includes details about some advanced features that Intel Owl provides - [ElasticSearch](#elastic-search) - [Kibana](#kibana) - [Example Configuration](#example-configuration) + - [Business Intelligence](#business-intelligence) - [Authentication options](#authentication-options) - [OAuth support](#google-oauth2) - [LDAP](#ldap) @@ -24,7 +25,8 @@ This page includes details about some advanced features that Intel Owl provides ## ElasticSearch -Intel Owl makes use of [django-elasticsearch-dsl](https://django-elasticsearch-dsl.readthedocs.io/en/latest/about.html) to index Job results into elasticsearch. The `save` and `delete` operations are auto-synced so you always have the latest data in ES. +### DSL +IntelOwl makes use of [django-elasticsearch-dsl](https://django-elasticsearch-dsl.readthedocs.io/en/latest/about.html) to index Job results into elasticsearch. The `save` and `delete` operations are auto-synced so you always have the latest data in ES. In the `env_file_app_template`, you'd see various elasticsearch related environment variables. The user should spin their own Elastic Search instance and configure these variables. @@ -36,7 +38,7 @@ Intel Owl provides a Kibana's "Saved Object" configuration (with example dashboa 1. Setup [Elastic Search and Kibana](https://hub.docker.com/r/nshou/elasticsearch-kibana/) and say it is running in a docker service with name `elasticsearch` on port `9200` which is exposed to the shared docker network. (Alternatively, you can spin up a local Elastic Search instance, by appending `--elastic` to the `python3 start.py ...` command. Note that the local Elastic Search instance consumes large amount of memory, and hence having >=16GB is recommended.)) -2. In the `env_file_app`, we set `ELASTICSEARCH_ENABLED` to `True` and `ELASTICSEARCH_HOST` to `elasticsearch:9200`. +2. In the `env_file_app`, we set `ELASTICSEARCH_DSL_ENABLED` to `True` and `ELASTICSEARCH_DSL_HOST` to `elasticsearch:9200`. 3. Configure the version of the ElasticSearch Library used [depending on the version](https://django-elasticsearch-dsl.readthedocs.io/en/latest/about.html#features) of our Elasticsearch server. This is required for compatibility. To do that, you can leverage the option `--pyelastic-version` of the `start.py` script. The default value of that parameter indicates the version that would be installed by default. 4. Rebuild the docker images with `python3 start.py test --pyelastic-version x.x.x build` (required only if you changed the default value of `--pyelastic-version`) 5. Now start the docker containers and execute, @@ -47,6 +49,32 @@ docker exec -ti intelowl_uwsgi python manage.py search_index --rebuild This will build and populate all existing job objects into the `jobs` index. + +### Business Intelligence +IntelOwl makes use of [elasticsearch-py](https://elasticsearch-py.readthedocs.io/en/7.x/index.html) to store data that can be used for Business Intelligence purpose. +Since plugin reports are deleted periodically, this feature allows to save indefinitely small amount of data to keep track of how analyzers perform and user usage. +At the moment, the following information are sent to elastic: +- application name +- timestamp +- username +- configuration used +- process_time +- status +- end_time +- parameters + +Documents are saved in the `ELEASTICSEARCH_BI_INDEX-%YEAR-%MONTH`, allowing to manage the retention accordingly. +To activate this feature, it is necessary to set `ELASTICSEARCH_BI_ENABLED` +to `True` in the `env_file_app` and +`ELASTICSEARCH_BI_HOST` to `elasticsearch:9200` +or your elasticsearch server. +At last, you have to copy your ssl certificate in the `configuration` folder +and set `ELASTICSEARCH_SSL_CERTIFICATE_FILE_NAME` to your certificate file name. + +An [index template](https://github.com/intelowlproject/IntelOwl/configuration/elastic_search_mappings/intel_owl_bi.json) is created after the first bulk submission of reports. +If you want to use kibana to visualize your data/make dashboard, you must create an index pattern: +Go to Kibana -> Management -> Index Patterns -> search for your index and use as time field `timestamp` + ## Authentication options IntelOwl provides support for some of the most common authentication methods: diff --git a/docs/source/Advanced-Usage.md b/docs/source/Advanced-Usage.md index 9320b57288..7fbcdfa9ee 100644 --- a/docs/source/Advanced-Usage.md +++ b/docs/source/Advanced-Usage.md @@ -160,7 +160,12 @@ Some analyzers could require a special configuration: - `Yara`: - You can customize both the `repositories` parameter and `private_repositories` secret to download and use different rules from the default that IntelOwl currently support. - The `repositories` values is what will be used to actually run the analysis: if you have added private repositories, remember to add the url in `repositories` too! - - You can add local rules inside the directory at `/opt/deploy/files_required/yara/YOUR_USERNAME/custom_rules/`. Please remember that these rules are not synced in a cluster deploy: for this reason is advised to upload them on GitHub and use the `repositories` or `private_repositories` attributes. + - You can add local rules inside the directory at `/opt/deploy/files_required/yara/YOUR_USERNAME/custom_rules/`. Please remember that these rules are not synced in a cluster deploy: for this reason is advised to upload them on GitHub and use the `repositories` or `private_repositories` attributes. + +- `DNS0_rrsets_name` and `DNS0_rrsets_data` ([DNS0 API](https://docs.dns0.eu/dns-api/rrsets)): + - Both these analyzers have a default parameter named `direction` that is used to dispatch the type of query to run. + - The value `right` for this parameter runs the query using `data` API parameter. Otherwise, if the parameter value is `left` it runs the query using the `name` API parameter. + - This parameter should not be changed from default value. ## Organizations and data sharing diff --git a/docs/source/Contribute.md b/docs/source/Contribute.md index bafc012b39..0d986282b3 100644 --- a/docs/source/Contribute.md +++ b/docs/source/Contribute.md @@ -48,7 +48,7 @@ Then we strongly suggest to configure [pre-commit](https://github.com/pre-commit ```bash # From the project directory -# You should already have a virtualenv from the installation phase. Otherwise you can create one with `python3 -m venv venv` +python3 -m venv venv source venv/bin/activate # from the project base directory pip install pre-commit @@ -191,11 +191,11 @@ If the `Python Module` that you define need this type of behaviour, you have to Press `Save and continue editing` to, at the moment, manually ad the `Parameters` that the python code requires (the class attributes that you needed): - 1. *name: Name of the parameter that will be dynamically added to the python class (if is a secret, in the python code a `_` wil be prepended to the name) - 2. *type: data type, `string`, `list`, `dict`, `integer`, `boolean`, `float` - 3. *description - 4. *required: `true` or `false`, meaning that a value is necessary to allow the run of the analyzer - 5. *is_secret: `true` or `false` + 1. *name: Name of the parameter that will be dynamically added to the python class (if is a secret, in the python code a `_` wil be prepended to the name) + 2. *type: data type, `string`, `list`, `dict`, `integer`, `boolean`, `float` + 3. *description + 4. *required: `true` or `false`, meaning that a value is necessary to allow the run of the analyzer + 5. *is_secret: `true` or `false` At this point, you can follow the specific guide for each plugin diff --git a/docs/source/Usage.md b/docs/source/Usage.md index a460365870..c3c17cb271 100644 --- a/docs/source/Usage.md +++ b/docs/source/Usage.md @@ -235,6 +235,9 @@ The following is the list of the available analyzers you can run out-of-the-box. * `DNSDB`: scan an observable against the [Passive DNS Farsight Database](https://www.farsightsecurity.com/solutions/dnsdb/) (support both v1 and v2 versions) * `DNS0_EU`: Retrieve current domain resolution with DNS0.eu DoH (DNS over HTTPS) * `DNS0_EU_Malicious_Detector`: Check if a domain or an url is marked as malicious in DNS0.eu database ([Zero](https://www.dns0.eu/zero) service) +* `DNS0_names`: Run advanced searches on billions of current and historical domain names. ([DNS0 /names](https://docs.dns0.eu/dns-api/names)) +* `DNS0_rrsets_data`: Query billions of current and historical DNS resource records sets. Performs right-hand side matching. ([DNS0 /rrsets](https://docs.dns0.eu/dns-api/rrsets)) +* `DNS0_rrsets_name`: Query billions of current and historical DNS resource records sets. Performs left-hand side matching. ([DNS0 /rrsets](https://docs.dns0.eu/dns-api/rrsets)) * `DocGuard_Get`: check if an hash was analyzed on DocGuard. [DocGuard](https://www.docguard.io) * `FileScan_Search`: Finds reports and uploaded files by various tokens, like hash, filename, verdict, IOCs etc via [FileScan.io API](https://www.filescan.io/api/docs). * `FireHol_IPList`: check if an IP is in [FireHol's IPList](https://iplists.firehol.org/) diff --git a/docs/source/schema.yml b/docs/source/schema.yml index 1dc68a1683..e024d3bb5b 100644 --- a/docs/source/schema.yml +++ b/docs/source/schema.yml @@ -1,7 +1,7 @@ openapi: 3.0.3 info: title: IntelOwl API specification - version: 5.2.1 + version: 5.2.2 paths: /api/analyze_file: post: diff --git a/frontend/.eslintrc.json b/frontend/.eslintrc.json index c885b016ef..5d93e33bb8 100644 --- a/frontend/.eslintrc.json +++ b/frontend/.eslintrc.json @@ -28,6 +28,11 @@ "no-undef": "error", "no-use-before-define": "off", "import/no-unresolved": "off", + "id-length": [ + "error", + { "min": 3, "exceptions": ["_", "id", "pk", "ip", "IP"] } + ], + "no-warning-comments": "error", "no-console": [ "error", { diff --git a/frontend/src/components/Routes.jsx b/frontend/src/components/Routes.jsx index 9532ead4c2..32015f4566 100644 --- a/frontend/src/components/Routes.jsx +++ b/frontend/src/components/Routes.jsx @@ -43,9 +43,9 @@ const publicRoutesLazy = [ ), }, -].map((r) => ({ - ...r, - element: }>{r.element}, +].map((routes) => ({ + ...routes, + element: }>{routes.element}, })); // no auth public components @@ -66,11 +66,11 @@ const noAuthRoutesLazy = [ path: "/reset-password", element: , }, -].map((r) => ({ - ...r, +].map((routes) => ({ + ...routes, element: ( - }>{r.element} + }>{routes.element} ), })); @@ -196,11 +196,11 @@ const authRoutesLazy = [ ), }, -].map((r) => ({ - ...r, +].map((routes) => ({ + ...routes, element: ( - }>{r.element} + }>{routes.element} ), })); diff --git a/frontend/src/components/auth/ChangePassword.jsx b/frontend/src/components/auth/ChangePassword.jsx index 786adb860b..a877689805 100644 --- a/frontend/src/components/auth/ChangePassword.jsx +++ b/frontend/src/components/auth/ChangePassword.jsx @@ -43,7 +43,7 @@ export default function ChangePassword() { // auth store const changePassword = useAuthStore( - React.useCallback((s) => s.service.changePassword, []), + React.useCallback((state) => state.service.changePassword, []), ); // callback diff --git a/frontend/src/components/auth/Login.jsx b/frontend/src/components/auth/Login.jsx index 4d4246adda..36a53fd329 100644 --- a/frontend/src/components/auth/Login.jsx +++ b/frontend/src/components/auth/Login.jsx @@ -81,17 +81,19 @@ export default function Login() { // auth store const loginUser = useAuthStore( - React.useCallback((s) => s.service.loginUser, []), + React.useCallback((state) => state.service.loginUser, []), ); - const updateToken = useAuthStore(React.useCallback((s) => s.updateToken, [])); + const updateToken = useAuthStore( + React.useCallback((state) => state.updateToken, []), + ); // callbacks const onSubmit = React.useCallback( async (values, _formik) => { try { await loginUser(values); - } catch (e) { + } catch (error) { // handled inside loginUser } }, @@ -124,8 +126,8 @@ export default function Login() {
{ - e.preventDefault(); + onClick={(event) => { + event.preventDefault(); const url = `${AUTH_BASE_URI}/google`; axios .get(`${url}?no_redirect=true`) diff --git a/frontend/src/components/auth/Logout.jsx b/frontend/src/components/auth/Logout.jsx index 6236c36a78..deafedb453 100644 --- a/frontend/src/components/auth/Logout.jsx +++ b/frontend/src/components/auth/Logout.jsx @@ -7,7 +7,7 @@ import { useAuthStore } from "../../stores/useAuthStore"; export default function Logout() { // auth store const [loading, logoutUser] = useAuthStore( - React.useCallback((s) => [s.loading, s.service.logoutUser], []), + React.useCallback((state) => [state.loading, state.service.logoutUser], []), ); React.useEffect(() => { diff --git a/frontend/src/components/auth/Register.jsx b/frontend/src/components/auth/Register.jsx index eb6ef6e7f9..db84569a3d 100644 --- a/frontend/src/components/auth/Register.jsx +++ b/frontend/src/components/auth/Register.jsx @@ -208,7 +208,7 @@ export default function Register() { }); setShowAfterRegistrationModal(true); - } catch (e) { + } catch (error) { // handled inside registerUser } }, diff --git a/frontend/src/components/auth/ResetPassword.jsx b/frontend/src/components/auth/ResetPassword.jsx index 64ffeadf48..16ded80cc7 100644 --- a/frontend/src/components/auth/ResetPassword.jsx +++ b/frontend/src/components/auth/ResetPassword.jsx @@ -87,7 +87,7 @@ export default function ResetPassword() { await resetPassword(body); // just to give small lag setTimeout(() => navigate("/login"), 500); - } catch (e) { + } catch (error) { // handled inside resetPassword } }, diff --git a/frontend/src/components/auth/utils/EmailForm.jsx b/frontend/src/components/auth/utils/EmailForm.jsx index 201e3a6cae..8c1162a305 100644 --- a/frontend/src/components/auth/utils/EmailForm.jsx +++ b/frontend/src/components/auth/utils/EmailForm.jsx @@ -37,7 +37,7 @@ export default function EmailForm({ onFormSubmit, apiCallback, ...restProps }) { try { await apiCallback(values); onFormSubmit(); - } catch (e) { + } catch (error) { // error will be handled by apiCallback } }, diff --git a/frontend/src/components/auth/utils/registration-alert.jsx b/frontend/src/components/auth/utils/registration-alert.jsx index 5c9699e8be..dafa8b8600 100644 --- a/frontend/src/components/auth/utils/registration-alert.jsx +++ b/frontend/src/components/auth/utils/registration-alert.jsx @@ -38,7 +38,7 @@ export function AfterRegistrationModalAlert(props) { // callbacks const toggle = React.useCallback(() => { navigate("/"); - setIsOpen((o) => !o); + setIsOpen((open) => !open); }, [navigate, setIsOpen]); return ( @@ -88,7 +88,7 @@ export function ConfigurationModalAlert(props) { // callbacks const toggle = React.useCallback(() => { navigate("/"); - setIsOpen((o) => !o); + setIsOpen((open) => !open); }, [navigate, setIsOpen]); return ( diff --git a/frontend/src/components/common/markdownToHtml.jsx b/frontend/src/components/common/markdownToHtml.jsx index f362beddfe..f733925619 100644 --- a/frontend/src/components/common/markdownToHtml.jsx +++ b/frontend/src/components/common/markdownToHtml.jsx @@ -1,13 +1,18 @@ import React from "react"; import ReactMarkdown from "react-markdown"; +/** + * @param {string} text + */ export function markdownToHtml(text) { return ( , + // eslint-disable-next-line id-length a: ({ node: _, ...props }) => ( // eslint-disable-next-line jsx-a11y/anchor-has-content diff --git a/frontend/src/components/dashboard/Dashboard.jsx b/frontend/src/components/dashboard/Dashboard.jsx index 2327e0ed4c..0827fe6d3e 100644 --- a/frontend/src/components/dashboard/Dashboard.jsx +++ b/frontend/src/components/dashboard/Dashboard.jsx @@ -72,8 +72,8 @@ export default function Dashboard() { />
- {charts1.map(([id, header, Component], i) => ( - + {charts1.map(([id, header, Component], index) => ( + { const chartProps = React.useMemo( () => ({ url: JOB_AGG_STATUS_URI, - accessorFnAggregation: (d) => d, + accessorFnAggregation: (jobStatusesPerDay) => jobStatusesPerDay, componentsFn: () => - Object.entries(JobStatusColors).map(([dkey, color]) => ( + Object.entries(JobStatusColors).map(([jobStatus, jobColor]) => ( )), }), @@ -56,10 +56,15 @@ export const JobTypeBarChart = React.memo(() => { const chartProps = React.useMemo( () => ({ url: JOB_AGG_TYPE_URI, - accessorFnAggregation: (d) => d, + accessorFnAggregation: (jobTypesPerDay) => jobTypesPerDay, componentsFn: () => - Object.entries(JobTypeColors).map(([dataKey, color]) => ( - + Object.entries(JobTypeColors).map(([jobType, jobColor]) => ( + )), }), [], @@ -74,11 +79,19 @@ export const JobObsClassificationBarChart = React.memo(() => { const chartProps = React.useMemo( () => ({ url: JOB_AGG_OBS_CLASSIFICATION_URI, - accessorFnAggregation: (d) => d, + accessorFnAggregation: (jobObservableSubTypesPerDay) => + jobObservableSubTypesPerDay, componentsFn: () => - Object.entries(ObservableClassificationColors).map(([dKey, color]) => ( - - )), + Object.entries(ObservableClassificationColors).map( + ([observableClassification, observableColor]) => ( + + ), + ), }), [], ); @@ -92,16 +105,17 @@ export const JobFileMimetypeBarChart = React.memo(() => { const chartProps = React.useMemo( () => ({ url: JOB_AGG_FILE_MIMETYPE_URI, - accessorFnAggregation: (d) => d?.aggregation, + accessorFnAggregation: (jobFileSubTypesPerDay) => + jobFileSubTypesPerDay?.aggregation, componentsFn: (respData) => { - const { values: mtList } = respData; - if (!mtList || !mtList?.length) return null; - return mtList.map((mc, i) => ( + const { values: mimetypeList } = respData; + if (!mimetypeList || !mimetypeList?.length) return null; + return mimetypeList.map((mimetype, index) => ( )); }, @@ -121,11 +135,13 @@ export const JobObsNamePieChart = React.memo(() => { () => ({ url: JOB_AGG_OBS_NAME_URI, modifierFn: (respData) => - Object.entries(respData?.aggregation).map(([key, val], i) => ({ - name: key.toLowerCase(), - value: val, - fill: colors[i], - })), + Object.entries(respData?.aggregation).map( + ([observableName, analyzedTimes], index) => ({ + name: observableName.toLowerCase(), + value: analyzedTimes, + fill: colors[index], + }), + ), }), [], ); @@ -140,11 +156,13 @@ export const JobFileHashPieChart = React.memo(() => { () => ({ url: JOB_AGG_FILE_MD5_URI, modifierFn: (respData) => - Object.entries(respData?.aggregation).map(([key, val], i) => ({ - name: key.toLowerCase(), - value: val, - fill: colors[i], - })), + Object.entries(respData?.aggregation).map( + ([fileMd5, analyzedTimes], index) => ({ + name: fileMd5.toLowerCase(), + value: analyzedTimes, + fill: colors[index], + }), + ), }), [], ); diff --git a/frontend/src/components/jobs/notification/NotificationsList.jsx b/frontend/src/components/jobs/notification/NotificationsList.jsx index 02799b0460..476c3a6855 100644 --- a/frontend/src/components/jobs/notification/NotificationsList.jsx +++ b/frontend/src/components/jobs/notification/NotificationsList.jsx @@ -18,7 +18,7 @@ export default function NotificationsList({ notifications, refetchFn }) { try { await notificationMarkAsRead(notifId); await refetchFn(); - } catch (e) { + } catch (error) { // handled inside notificationMarkAsRead } }, diff --git a/frontend/src/components/jobs/notification/notificationApi.js b/frontend/src/components/jobs/notification/notificationApi.js index 73b064836a..e5faac955d 100644 --- a/frontend/src/components/jobs/notification/notificationApi.js +++ b/frontend/src/components/jobs/notification/notificationApi.js @@ -8,8 +8,8 @@ export async function notificationMarkAsRead(notifId) { try { await axios.post(`${NOTIFICATION_BASE_URI}/${notifId}/mark-as-read`); return Promise.resolve(true); - } catch (e) { - addToast("Failed!", e.parsedMsg, "danger", true); - return Promise.reject(e); + } catch (error) { + addToast("Failed!", error.parsedMsg, "danger", true); + return Promise.reject(error); } } diff --git a/frontend/src/components/jobs/notifications.jsx b/frontend/src/components/jobs/notifications.jsx index 73fb1f8fc2..f2bc15dbe0 100644 --- a/frontend/src/components/jobs/notifications.jsx +++ b/frontend/src/components/jobs/notifications.jsx @@ -12,7 +12,7 @@ export function generateJobNotification(observableName, jobId) { new Audio(`${PUBLIC_URL}/notification.mp3`) .play() .then() - .catch((e) => console.error(e)); + .catch((error) => console.error(error)); // notification icon setNotificationFavicon(true); diff --git a/frontend/src/components/jobs/result/JobOverview.jsx b/frontend/src/components/jobs/result/JobOverview.jsx index b3bf54dd20..477d57f4b9 100644 --- a/frontend/src/components/jobs/result/JobOverview.jsx +++ b/frontend/src/components/jobs/result/JobOverview.jsx @@ -280,9 +280,7 @@ export function JobOverview({ // eslint-disable-next-line react-hooks/exhaustive-deps }, [UIElements]); - const elementsToShow = (isSelectedUI ? UIElements : rawElements).sort( - (element, elementToCompare) => (element.id > elementToCompare.id ? 1 : -1), - ); + const elementsToShow = isSelectedUI ? UIElements : rawElements; return ( Error!, e.parsedMsg, "warning"); + } catch (error) { + addToast(Error!, error.parsedMsg, "warning"); } finally { formik.setSubmitting(false); } diff --git a/frontend/src/components/jobs/result/bar/comment/CommentOverview.jsx b/frontend/src/components/jobs/result/bar/comment/CommentOverview.jsx index c5db96253e..130648d328 100644 --- a/frontend/src/components/jobs/result/bar/comment/CommentOverview.jsx +++ b/frontend/src/components/jobs/result/bar/comment/CommentOverview.jsx @@ -64,7 +64,8 @@ export default function CommentOverview({ job, refetchComments }) { }; job.comments.sort( - (first, second) => new Date(second.created_at) - new Date(first.created_at), + (currentComment, nextComment) => + new Date(nextComment.created_at) - new Date(currentComment.created_at), ); return ( diff --git a/frontend/src/components/jobs/result/bar/comment/commentApi.js b/frontend/src/components/jobs/result/bar/comment/commentApi.js index 90b48663f3..2ed64dfc1a 100644 --- a/frontend/src/components/jobs/result/bar/comment/commentApi.js +++ b/frontend/src/components/jobs/result/bar/comment/commentApi.js @@ -9,10 +9,10 @@ export async function createComment(formValues) { const resp = await axios.post(`${COMMENT_BASE_URI}`, formValues); return Promise.resolve(resp); - } catch (e) { - console.error(e); - addToast("Failed!", prettifyErrors(e), "danger"); - return Promise.reject(e); + } catch (error) { + console.error(error); + addToast("Failed!", prettifyErrors(error), "danger"); + return Promise.reject(error); } } @@ -21,9 +21,9 @@ export async function deleteComment(commentId) { const resp = await axios.delete(`${COMMENT_BASE_URI}/${commentId}`); return Promise.resolve(resp); - } catch (e) { - console.error(e); - addToast("Failed!", prettifyErrors(e), "danger"); - return Promise.reject(e); + } catch (error) { + console.error(error); + addToast("Failed!", prettifyErrors(error), "danger"); + return Promise.reject(error); } } diff --git a/frontend/src/components/jobs/result/bar/jobBarApi.jsx b/frontend/src/components/jobs/result/bar/jobBarApi.jsx index 0d8983a3fc..0e9d3fa34f 100644 --- a/frontend/src/components/jobs/result/bar/jobBarApi.jsx +++ b/frontend/src/components/jobs/result/bar/jobBarApi.jsx @@ -31,10 +31,10 @@ export async function saveJobAsPlaybook(values) { "info", ); } - } catch (e) { + } catch (error) { addToast( Failed creation of playbook with name {values.name}, - e.parsedMsg, + error.parsedMsg, "warning", ); } diff --git a/frontend/src/components/jobs/result/jobApi.jsx b/frontend/src/components/jobs/result/jobApi.jsx index 660683dbf8..9003d83d06 100644 --- a/frontend/src/components/jobs/result/jobApi.jsx +++ b/frontend/src/components/jobs/result/jobApi.jsx @@ -35,8 +35,8 @@ export async function downloadJobSample(jobId) { responseType: "blob", }); blob = new Blob([resp.data]); - } catch (e) { - addToast("Failed", e.parsedMsg, "warning"); + } catch (error) { + addToast("Failed", error.parsedMsg, "warning"); } return blob; } @@ -51,12 +51,12 @@ export async function killJob(jobId) { if (success) { addToast(Sent kill request for job #{jobId}, null, "info"); } - } catch (e) { + } catch (error) { addToast( Failed. Operation: kill job #{jobId} , - e.parsedMsg, + error.parsedMsg, "warning", ); } @@ -73,12 +73,12 @@ export async function deleteJob(jobId) { if (success) { addToast(Deleted Job #{jobId}, null, "info"); } - } catch (e) { + } catch (error) { addToast( Failed. Operation: delete job #{jobId} , - e.parsedMsg, + error.parsedMsg, "warning", ); } @@ -105,12 +105,12 @@ export async function killPlugin(jobId, plugin) { "info", ); } - } catch (e) { + } catch (error) { addToast( Failed. Operation: kill {plugin.type} {plugin.name} , - e.parsedMsg, + error.parsedMsg, "warning", ); } @@ -137,12 +137,12 @@ export async function retryPlugin(jobId, plugin) { "info", ); } - } catch (e) { + } catch (error) { addToast( Failed. Operation: retry {plugin.type} {plugin.name} , - e.parsedMsg, + error.parsedMsg, "warning", ); } diff --git a/frontend/src/components/jobs/result/pluginReportTables.jsx b/frontend/src/components/jobs/result/pluginReportTables.jsx index d2ac82c478..99c5eeda67 100644 --- a/frontend/src/components/jobs/result/pluginReportTables.jsx +++ b/frontend/src/components/jobs/result/pluginReportTables.jsx @@ -21,7 +21,7 @@ const tableProps = { { Header: "Actions", id: "actions", - accessor: (r) => r, + accessor: (pluginReport) => pluginReport, maxWidth: 60, disableSortBy: true, Cell: ({ value: plugin, customProps: { job, refetch } }) => ( @@ -80,7 +80,7 @@ const tableProps = { { Header: "Running Time", id: "running_time", - accessor: (r) => r, + accessor: (pluginReport) => pluginReport, disableSortBy: true, maxWidth: 125, Cell: ({ value: plugin }) => ( diff --git a/frontend/src/components/jobs/result/visualizer/icons.jsx b/frontend/src/components/jobs/result/visualizer/icons.jsx index 32dea1bbe3..c79e553af2 100644 --- a/frontend/src/components/jobs/result/visualizer/icons.jsx +++ b/frontend/src/components/jobs/result/visualizer/icons.jsx @@ -33,9 +33,11 @@ import { MdSignalWifiStatusbarConnectedNoInternet2, } from "react-icons/md"; import { TbFishHook, TbNetwork } from "react-icons/tb"; +import { FaBook } from "react-icons/fa6"; import { PUBLIC_URL } from "../../../../constants/environment"; const generalIcons = Object.freeze({ + book: , info: , like: , dislike: , diff --git a/frontend/src/components/jobs/result/visualizer/visualizer.jsx b/frontend/src/components/jobs/result/visualizer/visualizer.jsx index 2e44ae6b23..d21d72cb68 100644 --- a/frontend/src/components/jobs/result/visualizer/visualizer.jsx +++ b/frontend/src/components/jobs/result/visualizer/visualizer.jsx @@ -146,8 +146,8 @@ export default function VisualizerReport({ visualizerReport }) { validateLevel(levelElement), ); validatedLevels.sort( - (firstLevel, secondLevel) => - firstLevel.levelPosition - secondLevel.levelPosition, + (currentLevel, nextLevel) => + currentLevel.levelPosition - nextLevel.levelPosition, ); console.debug("VisualizerReport - validatedLevels"); diff --git a/frontend/src/components/jobs/table/jobTableColumns.jsx b/frontend/src/components/jobs/table/jobTableColumns.jsx index c61e09e35e..1e33e0dd9a 100644 --- a/frontend/src/components/jobs/table/jobTableColumns.jsx +++ b/frontend/src/components/jobs/table/jobTableColumns.jsx @@ -85,7 +85,7 @@ export const jobTableColumns = [ { Header: "Name", id: "name", - accessor: (r) => r.observable_name || r.file_name, + accessor: (job) => job.observable_name || job.file_name, Cell: ({ value, row: { original: job } }) => ( r.is_sample, + accessor: (job) => job.is_sample, Cell: ({ value }) => (value ? JobTypes.FILE : JobTypes.OBSERVABLE), disableSortBy: true, maxWidth: 100, @@ -185,7 +185,7 @@ export const jobTableColumns = [ { Header: "SubType", id: "type", - accessor: (r) => r.observable_classification || r.file_mimetype, + accessor: (job) => job.observable_classification || job.file_mimetype, disableSortBy: true, maxWidth: 100, Filter: SelectOptionsFilter, @@ -214,12 +214,12 @@ export const jobTableColumns = [ disableSortBy: true, maxWidth: 100, Filter: DefaultColumnFilter, - filterValueAccessorFn: (tags) => tags.map((t) => t.label), + filterValueAccessorFn: (tags) => tags.map((tag) => tag.label), }, { Header: "Playbook Executed", id: "playbook_to_execute", - accessor: (r) => r, + accessor: (job) => job, Cell: ({ value: job }) => (
diff --git a/frontend/src/components/organization/InvitationsList.jsx b/frontend/src/components/organization/InvitationsList.jsx index 9a8449e0c4..35851dc4ce 100644 --- a/frontend/src/components/organization/InvitationsList.jsx +++ b/frontend/src/components/organization/InvitationsList.jsx @@ -26,7 +26,11 @@ export default function InvitationsList() { { url: BASE_URI_INVITATION, }, - (respData) => respData.sort((a, b) => a.created_at - b.created_at), + (respData) => + respData.sort( + (currentInvitation, nextInvitation) => + currentInvitation.created_at - nextInvitation.created_at, + ), ); // page title diff --git a/frontend/src/components/organization/orgApi.js b/frontend/src/components/organization/orgApi.js index 2de978974f..9b65f36096 100644 --- a/frontend/src/components/organization/orgApi.js +++ b/frontend/src/components/organization/orgApi.js @@ -16,9 +16,9 @@ async function createOrganization(body) { true, ); return resp; - } catch (e) { - addToast("Failed!", e.parsedMsg, "danger", true); - return Promise.reject(e); + } catch (error) { + addToast("Failed!", error.parsedMsg, "danger", true); + return Promise.reject(error); } } @@ -33,9 +33,9 @@ async function deleteOrganization(orgName) { 6000, ); return resp; - } catch (e) { - addToast("Failed!", e.parsedMsg, "danger", true); - return Promise.reject(e); + } catch (error) { + addToast("Failed!", error.parsedMsg, "danger", true); + return Promise.reject(error); } } @@ -51,9 +51,9 @@ async function removeMemberFromOrg(username) { true, ); return resp; - } catch (e) { - addToast("Failed!", e.parsedMsg, "danger", true); - return Promise.reject(e); + } catch (error) { + addToast("Failed!", error.parsedMsg, "danger", true); + return Promise.reject(error); } } @@ -67,9 +67,9 @@ async function leaveOrganization(orgName) { true, ); return resp; - } catch (e) { - addToast("Failed!", e.parsedMsg, "danger", true); - return Promise.reject(e); + } catch (error) { + addToast("Failed!", error.parsedMsg, "danger", true); + return Promise.reject(error); } } @@ -80,9 +80,9 @@ async function promoteUserAdmin(username) { }); addToast(`User @${username} is now an admin.`, null, "success", true); return resp; - } catch (e) { - addToast("Failed!", e.parsedMsg, "danger", true); - return Promise.reject(e); + } catch (error) { + addToast("Failed!", error.parsedMsg, "danger", true); + return Promise.reject(error); } } @@ -96,9 +96,9 @@ async function removeUserAdmin(username) { true, ); return resp; - } catch (e) { - addToast("Failed!", e.parsedMsg, "danger", true); - return Promise.reject(e); + } catch (error) { + addToast("Failed!", error.parsedMsg, "danger", true); + return Promise.reject(error); } } @@ -109,9 +109,9 @@ async function sendInvite(body) { const resp = await axios.post(`${BASE_URI_ORG}/invite`, body); addToast("Invite Sent!", null, "success", true); return resp; - } catch (e) { - addToast("Invite Failed!", e.parsedMsg, "danger", true); - return Promise.reject(e); + } catch (error) { + addToast("Invite Failed!", error.parsedMsg, "danger", true); + return Promise.reject(error); } } @@ -126,9 +126,9 @@ async function acceptInvitation(invId, orgName) { 6000, ); return resp; - } catch (e) { - addToast("Failed!", e.parsedMsg, "danger", true); - return Promise.reject(e); + } catch (error) { + addToast("Failed!", error.parsedMsg, "danger", true); + return Promise.reject(error); } } @@ -142,9 +142,9 @@ async function declineInvitation(invId, orgName) { true, ); return resp; - } catch (e) { - addToast("Failed!", e.parsedMsg, "danger", true); - return Promise.reject(e); + } catch (error) { + addToast("Failed!", error.parsedMsg, "danger", true); + return Promise.reject(error); } } @@ -158,9 +158,9 @@ async function deleteInvitation(invId, username) { true, ); return resp; - } catch (e) { - addToast("Failed!", e.parsedMsg, "danger", true); - return Promise.reject(e); + } catch (error) { + addToast("Failed!", error.parsedMsg, "danger", true); + return Promise.reject(error); } } diff --git a/frontend/src/components/organization/utils/InviteButton.jsx b/frontend/src/components/organization/utils/InviteButton.jsx index ce7ef577ad..39b33fa631 100644 --- a/frontend/src/components/organization/utils/InviteButton.jsx +++ b/frontend/src/components/organization/utils/InviteButton.jsx @@ -37,7 +37,7 @@ function InvitationForm({ onFormSubmit }) { try { await sendInvite(values); onFormSubmit(); - } catch (e) { + } catch (error) { // error was handled inside sendInvite } finally { formik.setSubmitting(false); diff --git a/frontend/src/components/organization/utils/MembersList.jsx b/frontend/src/components/organization/utils/MembersList.jsx index a049be2574..e29df3a28c 100644 --- a/frontend/src/components/organization/utils/MembersList.jsx +++ b/frontend/src/components/organization/utils/MembersList.jsx @@ -43,7 +43,12 @@ export function MembersList() { // memo const sortedMembers = React.useMemo( () => - members?.length ? [...members].sort((a, b) => a.joined - b.joined) : [], + members?.length + ? [...members].sort( + (currentMember, nextMember) => + currentMember.joined - nextMember.joined, + ) + : [], [members], ); diff --git a/frontend/src/components/organization/utils/OrgCreateButton.jsx b/frontend/src/components/organization/utils/OrgCreateButton.jsx index 88b076158a..8253616856 100644 --- a/frontend/src/components/organization/utils/OrgCreateButton.jsx +++ b/frontend/src/components/organization/utils/OrgCreateButton.jsx @@ -37,7 +37,7 @@ function OrganizationCreateForm({ onFormSubmit }) { try { await createOrganization(values); onFormSubmit(); - } catch (e) { + } catch (error) { // error was handled inside sendInvite } finally { formik.setSubmitting(false); diff --git a/frontend/src/components/plugins/types/pluginActionsButtons.jsx b/frontend/src/components/plugins/types/pluginActionsButtons.jsx index 833187c737..33d85fa171 100644 --- a/frontend/src/components/plugins/types/pluginActionsButtons.jsx +++ b/frontend/src/components/plugins/types/pluginActionsButtons.jsx @@ -2,7 +2,7 @@ import React from "react"; import PropTypes from "prop-types"; import { Spinner, Button, Modal, ModalHeader, ModalBody } from "reactstrap"; import { RiHeartPulseLine } from "react-icons/ri"; -import { MdDelete } from "react-icons/md"; +import { MdDelete, MdFileDownload } from "react-icons/md"; import { BsPeopleFill } from "react-icons/bs"; import { IconButton } from "@certego/certego-ui"; @@ -12,8 +12,8 @@ import { useOrganizationStore } from "../../../stores/useOrganizationStore"; import { usePluginConfigurationStore } from "../../../stores/usePluginConfigurationStore"; // we can't delete this function because IconButton expects Icon as a function -function PluginHealthSpinner() { - return ; +function PluginSpinner() { + return ; } export function PluginHealthCheckButton({ pluginName, pluginType_ }) { @@ -26,39 +26,32 @@ export function PluginHealthCheckButton({ pluginName, pluginType_ }) { ), ); const [isLoading, setIsLoading] = React.useState(false); - const [isHealthy, setIsHealthy] = React.useState(undefined); const onClick = async () => { setIsLoading(true); - const status = await checkPluginHealth(pluginType_, pluginName); - setIsHealthy(status); + await checkPluginHealth(pluginType_, pluginName); setIsLoading(false); }; return ( -
+
- {isHealthy !== undefined && - (isHealthy ? ( - Up and running! - ) : ( - Failing! - ))}
); } PluginHealthCheckButton.propTypes = { pluginName: PropTypes.string.isRequired, - pluginType_: PropTypes.oneOf(["analyzer", "connector"]).isRequired, + pluginType_: PropTypes.oneOf(["analyzer", "connector", "ingestor", "pivot"]) + .isRequired, }; export function OrganizationPluginStateToggle({ @@ -68,7 +61,7 @@ export function OrganizationPluginStateToggle({ refetch, pluginOwner, }) { - const user = useAuthStore(React.useCallback((s) => s.user, [])); + const user = useAuthStore(React.useCallback((state) => state.user, [])); const { noOrg, fetchAll: fetchAllOrganizations, @@ -212,3 +205,41 @@ export function PlaybooksDeletionButton({ playbookName }) { PlaybooksDeletionButton.propTypes = { playbookName: PropTypes.string.isRequired, }; + +export function PluginPullButton({ pluginName, pluginType_ }) { + const { pluginPull } = usePluginConfigurationStore( + React.useCallback( + (state) => ({ + pluginPull: state.pluginPull, + }), + [], + ), + ); + const [isLoading, setIsLoading] = React.useState(false); + + const onClick = async () => { + setIsLoading(true); + await pluginPull(pluginType_, pluginName); + setIsLoading(false); + }; + + return ( +
+ +
+ ); +} + +PluginPullButton.propTypes = { + pluginName: PropTypes.string.isRequired, + pluginType_: PropTypes.oneOf(["analyzer", "connector", "ingestor", "pivot"]) + .isRequired, +}; diff --git a/frontend/src/components/plugins/types/pluginTableColumns.jsx b/frontend/src/components/plugins/types/pluginTableColumns.jsx index 54d99c1c7e..15b530d614 100644 --- a/frontend/src/components/plugins/types/pluginTableColumns.jsx +++ b/frontend/src/components/plugins/types/pluginTableColumns.jsx @@ -23,6 +23,7 @@ import { OrganizationPluginStateToggle, PluginHealthCheckButton, PlaybooksDeletionButton, + PluginPullButton, } from "./pluginActionsButtons"; import { JobTypes } from "../../../constants/jobConst"; @@ -51,7 +52,7 @@ function SelectColumnFilter({ ? filterValueAccessorFn(value) : value; if (Array.isArray(optVal)) { - optVal.forEach((v) => optionsSet.add(v)); + optVal.forEach((val) => optionsSet.add(val)); } else { optionsSet.add(optVal); } @@ -61,7 +62,7 @@ function SelectColumnFilter({ }, [id, preFilteredRows, filterValueAccessorFn]); // Set undefined to remove the filter entirely - const onChange = (e) => setFilter(e.target.value || undefined); + const onChange = (event) => setFilter(event.target.value || undefined); // Render a multi-select box return ( @@ -87,7 +88,7 @@ const pluginTableColumns = [ { Header: "Info", id: "info", - accessor: (r) => r, + accessor: (pluginConfig) => pluginConfig, Cell: ({ value }) => , disableSortBy: true, maxWidth: 50, @@ -113,7 +114,8 @@ const pluginTableColumns = [ { Header: "Active", id: "active", - accessor: (r) => !(r.disabled || r.orgPluginDisabled), + accessor: (pluginConfig) => + !(pluginConfig.disabled || pluginConfig.orgPluginDisabled), Cell: ({ value }) => , Filter: SelectOptionsFilter, selectOptions: ["true", "false"], @@ -161,12 +163,12 @@ export const analyzersTableColumns = [ { Header: "Supported types", id: "supported_types", - accessor: (r) => { + accessor: (pluginConfig) => { let supported; - if (r.type === JobTypes.OBSERVABLE) { - supported = r.observable_supported; + if (pluginConfig.type === JobTypes.OBSERVABLE) { + supported = pluginConfig.observable_supported; } else { - supported = r.supported_filetypes; + supported = pluginConfig.supported_filetypes; } if (supported.length === 0) { supported.push("everything"); @@ -175,8 +177,8 @@ export const analyzersTableColumns = [ }, Cell: ({ value }) => (
    - {value?.sort().map((v) => ( -
  • {v}
  • + {value?.sort().map((val) => ( +
  • {val}
  • ))}
), @@ -196,7 +198,7 @@ export const analyzersTableColumns = [ { Header: "Actions", id: "actions", - accessor: (r) => r, + accessor: (analyzerConfig) => analyzerConfig, disableSortBy: true, Cell: ({ value }) => (
@@ -206,12 +208,14 @@ export const analyzersTableColumns = [ refetch={value?.refetch} type={PluginsTypes.ANALYZER} /> - {value?.docker_based && ( - - )} + +
), maxWidth: 100, @@ -257,7 +261,7 @@ export const connectorTableColumns = [ { Header: "Actions", id: "actions", - accessor: (r) => r, + accessor: (connectorConfig) => connectorConfig, disableSortBy: true, Cell: ({ value }) => (
@@ -271,6 +275,10 @@ export const connectorTableColumns = [ pluginName={value?.name} pluginType_={PluginsTypes.CONNECTOR} /> +
), maxWidth: 125, @@ -314,7 +322,7 @@ export const pivotTableColumns = [ { Header: "Actions", id: "actions", - accessor: (r) => r, + accessor: (pivotConfig) => pivotConfig, disableSortBy: true, Cell: ({ value }) => (
@@ -324,6 +332,14 @@ export const pivotTableColumns = [ refetch={value?.refetch} type={PluginsTypes.PIVOT} /> + +
), maxWidth: 125, @@ -347,8 +363,8 @@ export const playbookTableColumns = [ accessor: "type", Cell: ({ value }) => (
    - {value?.sort().map((v) => ( -
  • {v}
  • + {value?.sort().map((val) => ( +
  • {val}
  • ))}
), @@ -361,7 +377,10 @@ export const playbookTableColumns = [ id: "analyzers", accessor: (row) => row.analyzers, Cell: ({ value }) => ( - + ), disableSortBy: true, Filter: SelectColumnFilter, @@ -372,7 +391,10 @@ export const playbookTableColumns = [ id: "connectors", accessor: (row) => row.connectors, Cell: ({ value }) => ( - + ), disableSortBy: true, Filter: SelectColumnFilter, @@ -382,7 +404,7 @@ export const playbookTableColumns = [ id: "pivots", accessor: (row) => row.pivots, Cell: ({ value }) => ( - + ), disableSortBy: true, Filter: SelectColumnFilter, @@ -392,7 +414,10 @@ export const playbookTableColumns = [ id: "visualizers", accessor: (row) => row.visualizers, Cell: ({ value }) => ( - + ), disableSortBy: true, Filter: SelectColumnFilter, @@ -400,7 +425,7 @@ export const playbookTableColumns = [ { Header: "Actions", id: "actions", - accessor: (r) => r, + accessor: (playbookConfig) => playbookConfig, disableSortBy: true, Cell: ({ value }) => (
@@ -458,16 +483,16 @@ export const visualizerTableColumns = [ key={`visualizers-playbooks__${value}`} className="d-flex flex-column align-items-start" > - {value?.sort().map((v) => ( -
  • + {value?.sort().map((val) => ( +
  • - {v} + {val}
  • ))} @@ -479,7 +504,7 @@ export const visualizerTableColumns = [ { Header: "Actions", id: "actions", - accessor: (r) => r, + accessor: (visualizerConfig) => visualizerConfig, disableSortBy: true, Cell: ({ value }) => (
    @@ -494,7 +519,7 @@ export const visualizerTableColumns = [ maxWidth: 90, }, ]; -// Visualizers columns: these columns are shown for the visualizers +// Ingestors columns: these columns are shown for the ingestors export const ingestorTableColumns = [ ...pluginTableColumns, { @@ -542,4 +567,23 @@ export const ingestorTableColumns = [ disableSortBy: true, maxWidth: 145, }, + { + Header: "Actions", + id: "actions", + accessor: (ingestorConfig) => ingestorConfig, + disableSortBy: true, + Cell: ({ value }) => ( +
    + + +
    + ), + maxWidth: 90, + }, ]; diff --git a/frontend/src/components/plugins/types/utils.jsx b/frontend/src/components/plugins/types/utils.jsx index 9b2daec217..4a653d2686 100644 --- a/frontend/src/components/plugins/types/utils.jsx +++ b/frontend/src/components/plugins/types/utils.jsx @@ -74,9 +74,11 @@ export function PluginInfoCard({ pluginInfo }) {
    Except:
      - {pluginInfo.not_supported_filetypes.sort().map((v) => ( -
    • {v}
    • - ))} + {pluginInfo.not_supported_filetypes + .sort() + .map((unsupportFiletype) => ( +
    • {unsupportFiletype}
    • + ))}
    )} @@ -256,7 +258,7 @@ export function PluginVerificationIcon({ pluginName, verification }) { ); } -export function PlaybooksCollapse({ value, pluginType_ }) { +export function PlaybooksCollapse({ pluginList, pluginType_ }) { // local state const [isOpen, setIsOpen] = React.useState(false); return ( @@ -267,14 +269,14 @@ export function PlaybooksCollapse({ value, pluginType_ }) { id="PlaybooksCollapse" > - {value?.length} {pluginType_}{" "} + {pluginList?.length} {pluginType_}{" "}
      - {value?.sort().map((v) => ( -
    • {v}
    • + {pluginList?.sort().map((pluginName) => ( +
    • {pluginName}
    • ))}
    @@ -331,7 +333,7 @@ PluginVerificationIcon.propTypes = { }; PlaybooksCollapse.propTypes = { - value: PropTypes.array.isRequired, + pluginList: PropTypes.array.isRequired, pluginType_: PropTypes.oneOf(Object.values(PluginsTypes)).isRequired, }; diff --git a/frontend/src/components/scan/ScanForm.jsx b/frontend/src/components/scan/ScanForm.jsx index 23379f3456..1b1b9b0167 100644 --- a/frontend/src/components/scan/ScanForm.jsx +++ b/frontend/src/components/scan/ScanForm.jsx @@ -302,93 +302,101 @@ export default function ScanForm() { const analyzersOptions = React.useMemo( () => analyzersGrouped[formik.values.classification] - .map((v) => ({ - isDisabled: !v.verification.configured || v.disabled, - value: v.name, + .map((analyzer) => ({ + isDisabled: !analyzer.verification.configured || analyzer.disabled, + value: analyzer.name, label: (
    -
    {v.name} 
    +
    {analyzer.name} 
    - {markdownToHtml(v.description)} + {markdownToHtml(analyzer.description)}
    - {!v.verification.configured && ( + {!analyzer.verification.configured && (
    - âš  {v.verification.details} + âš  {analyzer.verification.details}
    )}
    ), - labelDisplay: v.name, + labelDisplay: analyzer.name, })) - .sort((a, b) => + .sort((currentAnalyzer, nextAnalyzer) => // eslint-disable-next-line no-nested-ternary - a.isDisabled === b.isDisabled ? 0 : a.isDisabled ? 1 : -1, + currentAnalyzer.isDisabled === nextAnalyzer.isDisabled + ? 0 + : currentAnalyzer.isDisabled + ? 1 + : -1, ), [analyzersGrouped, formik.values.classification], ); const connectorOptions = React.useMemo( () => connectors - .map((v) => ({ - isDisabled: !v.verification.configured || v.disabled, - value: v.name, + .map((connector) => ({ + isDisabled: !connector.verification.configured || connector.disabled, + value: connector.name, label: (
    -
    {v.name} 
    +
    {connector.name} 
    - {markdownToHtml(v.description)} + {markdownToHtml(connector.description)}
    - {!v.verification.configured && ( + {!connector.verification.configured && (
    - âš  {v.verification.details} + âš  {connector.verification.details}
    )}
    ), - labelDisplay: v.name, + labelDisplay: connector.name, })) - .sort((a, b) => + .sort((currentConnector, nextConnector) => // eslint-disable-next-line no-nested-ternary - a.isDisabled === b.isDisabled ? 0 : a.isDisabled ? 1 : -1, + currentConnector.isDisabled === nextConnector.isDisabled + ? 0 + : currentConnector.isDisabled + ? 1 + : -1, ), [connectors], ); const playbookOptions = (classification) => playbooksGrouped[classification] - .map((v) => ({ - isDisabled: v.disabled, - value: v.name, - analyzers: v.analyzers, - connectors: v.connectors, - visualizers: v.visualizers, - pivots: v.pivots, + .map((playbook) => ({ + isDisabled: playbook.disabled, + value: playbook.name, + analyzers: playbook.analyzers, + connectors: playbook.connectors, + visualizers: playbook.visualizers, + pivots: playbook.pivots, label: (
    -
    {v.name} 
    +
    {playbook.name} 
    - {markdownToHtml(v.description)} + {markdownToHtml(playbook.description)}
    ), - labelDisplay: v.name, - tags: v.tags.map((tag) => ({ + labelDisplay: playbook.name, + tags: playbook.tags.map((tag) => ({ value: tag, label: , })), - tlp: v.tlp, - scan_mode: `${v.scan_mode}`, - scan_check_time: v.scan_check_time, - runtime_configuration: v.runtime_configuration, + tlp: playbook.tlp, + scan_mode: `${playbook.scan_mode}`, + scan_check_time: playbook.scan_check_time, + runtime_configuration: playbook.runtime_configuration, })) .filter((item) => !item.isDisabled); @@ -535,7 +543,7 @@ export default function ScanForm() { const [isModalOpen, setModalOpen] = React.useState(false); const toggleModal = React.useCallback( - () => setModalOpen((o) => !o), + () => setModalOpen((open) => !open), [setModalOpen], ); @@ -569,15 +577,15 @@ export default function ScanForm() {
    - {[JobTypes.OBSERVABLE, JobTypes.FILE].map((ch) => ( - + {[JobTypes.OBSERVABLE, JobTypes.FILE].map((jobType) => ( + { formik.setFieldValue( "observableType", @@ -604,7 +612,7 @@ export default function ScanForm() { }} /> @@ -653,9 +661,9 @@ export default function ScanForm() { formik.touched.observable_names && formik.touched.observable_names[index] } - onChange={(e) => + onChange={(event) => updateSelectedObservable( - e.target.value, + event.target.value, index, ) } @@ -779,8 +787,8 @@ export default function ScanForm() { - formik.setFieldValue("analyzers", v, false) + onChange={(value) => + formik.setFieldValue("analyzers", value, false) } /> )} @@ -797,8 +805,8 @@ export default function ScanForm() { - formik.setFieldValue("connectors", v, false) + onChange={(value) => + formik.setFieldValue("connectors", value, false) } /> )} @@ -821,7 +829,9 @@ export default function ScanForm() { options={playbookOptions(formik.values.classification)} styles={selectStyles} value={formik.values.playbook} - onChange={(v) => updateSelectedPlaybook(v)} + onChange={(selectedPlaybook) => + updateSelectedPlaybook(selectedPlaybook) + } /> )} /> @@ -833,17 +843,17 @@ export default function ScanForm() {
    - {TlpChoices.map((ch) => ( - -