-
-
Notifications
You must be signed in to change notification settings - Fork 529
spamhaus_drop analyzer, closes #2408 #2422
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Merged
Merged
Changes from 12 commits
Commits
Show all changes
23 commits
Select commit
Hold shift + click to select a range
7043e87
spamhaus_drop
g4ze 7cd712c
spamhaus_drop
g4ze 2364587
ip matching
g4ze f4e8b76
Merge branch 'develop' of https://github.com/intelowlproject/IntelOwl…
g4ze b227431
migratiuons
g4ze 08728ae
migrations
g4ze c049150
tests
g4ze 631e77b
tests
g4ze 5bd4606
tests
g4ze 6846570
tests
g4ze 3bd9164
tests
g4ze 4cab214
tests
g4ze 2f190f6
IocFinder
g4ze 8c9c8f8
bool
g4ze 644f4ad
Merge branch 'develop' of https://github.com/intelowlproject/IntelOwl…
g4ze 9a65657
mign
g4ze 6bbb64e
docs
g4ze 5dd0535
Merge branch 'develop' of https://github.com/intelowlproject/IntelOwl…
g4ze a552ac7
mign
g4ze 6b2573c
Merge branch 'develop' of https://github.com/intelowlproject/IntelOwl…
g4ze 0f57017
mign
g4ze ff125fd
Merge branch 'Ioc-finder-Analyzer-Closes#1229' of https://github.com/…
g4ze 5c39daa
mign
g4ze File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
126 changes: 126 additions & 0 deletions
126
api_app/analyzers_manager/migrations/0107_analyzer_config_spamhaus_drop.py
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,126 @@ | ||
| from django.db import migrations | ||
| from django.db.models.fields.related_descriptors import ( | ||
| ForwardManyToOneDescriptor, | ||
| ForwardOneToOneDescriptor, | ||
| ManyToManyDescriptor, | ||
| ) | ||
|
|
||
| plugin = { | ||
| "python_module": { | ||
| "health_check_schedule": None, | ||
| "update_schedule": { | ||
| "minute": "0", | ||
| "hour": "0", | ||
| "day_of_week": "*", | ||
| "day_of_month": "*", | ||
| "month_of_year": "*", | ||
| }, | ||
| "module": "spamhaus_drop.SpamhausDropV4", | ||
| "base_path": "api_app.analyzers_manager.observable_analyzers", | ||
| }, | ||
| "name": "Spamhaus_DROP", | ||
| "description": "[Spamhaus_DROP](https://www.spamhaus.org/blocklists/do-not-route-or-peer/) protects from activity directly originating from rogue networks, such as spam campaigns, encryption via ransomware, DNS-hijacking and exploit attempts, authentication attacks to discover working access credentials, harvesting, DDoS attacks.", | ||
| "disabled": False, | ||
| "soft_time_limit": 10, | ||
| "routing_key": "default", | ||
| "health_check_status": True, | ||
| "type": "observable", | ||
| "docker_based": False, | ||
| "maximum_tlp": "AMBER", | ||
| "observable_supported": ["ip"], | ||
| "supported_filetypes": [], | ||
| "run_hash": False, | ||
| "run_hash_type": "", | ||
| "not_supported_filetypes": [], | ||
| "model": "analyzers_manager.AnalyzerConfig", | ||
| } | ||
|
|
||
| params = [] | ||
|
|
||
| values = [] | ||
|
|
||
|
|
||
| def _get_real_obj(Model, field, value): | ||
| def _get_obj(Model, other_model, value): | ||
| if isinstance(value, dict): | ||
| real_vals = {} | ||
| for key, real_val in value.items(): | ||
| real_vals[key] = _get_real_obj(other_model, key, real_val) | ||
| value = other_model.objects.get_or_create(**real_vals)[0] | ||
| # it is just the primary key serialized | ||
| else: | ||
| if isinstance(value, int): | ||
| if Model.__name__ == "PluginConfig": | ||
| value = other_model.objects.get(name=plugin["name"]) | ||
| else: | ||
| value = other_model.objects.get(pk=value) | ||
| else: | ||
| value = other_model.objects.get(name=value) | ||
| return value | ||
|
|
||
| if ( | ||
| type(getattr(Model, field)) | ||
| in [ForwardManyToOneDescriptor, ForwardOneToOneDescriptor] | ||
| and value | ||
| ): | ||
| other_model = getattr(Model, field).get_queryset().model | ||
| value = _get_obj(Model, other_model, value) | ||
| elif type(getattr(Model, field)) in [ManyToManyDescriptor] and value: | ||
| other_model = getattr(Model, field).rel.model | ||
| value = [_get_obj(Model, other_model, val) for val in value] | ||
| return value | ||
|
|
||
|
|
||
| def _create_object(Model, data): | ||
| mtm, no_mtm = {}, {} | ||
| for field, value in data.items(): | ||
| value = _get_real_obj(Model, field, value) | ||
| if type(getattr(Model, field)) is ManyToManyDescriptor: | ||
| mtm[field] = value | ||
| else: | ||
| no_mtm[field] = value | ||
| try: | ||
| o = Model.objects.get(**no_mtm) | ||
| except Model.DoesNotExist: | ||
| o = Model(**no_mtm) | ||
| o.full_clean() | ||
| o.save() | ||
| for field, value in mtm.items(): | ||
| attribute = getattr(o, field) | ||
| if value is not None: | ||
| attribute.set(value) | ||
| return False | ||
| return True | ||
|
|
||
|
|
||
| def migrate(apps, schema_editor): | ||
| Parameter = apps.get_model("api_app", "Parameter") | ||
| PluginConfig = apps.get_model("api_app", "PluginConfig") | ||
| python_path = plugin.pop("model") | ||
| Model = apps.get_model(*python_path.split(".")) | ||
| if not Model.objects.filter(name=plugin["name"]).exists(): | ||
| exists = _create_object(Model, plugin) | ||
| if not exists: | ||
| for param in params: | ||
| _create_object(Parameter, param) | ||
| for value in values: | ||
| _create_object(PluginConfig, value) | ||
|
|
||
|
|
||
| def reverse_migrate(apps, schema_editor): | ||
| python_path = plugin.pop("model") | ||
| Model = apps.get_model(*python_path.split(".")) | ||
| Model.objects.get(name=plugin["name"]).delete() | ||
|
|
||
|
|
||
| class Migration(migrations.Migration): | ||
| atomic = False | ||
| dependencies = [ | ||
| ("api_app", "0062_alter_parameter_python_module"), | ||
| ( | ||
| "analyzers_manager", | ||
| "0106_analyzer_config_leakix", | ||
| ), | ||
| ] | ||
|
|
||
| operations = [migrations.RunPython(migrate, reverse_migrate)] |
100 changes: 100 additions & 0 deletions
100
api_app/analyzers_manager/observable_analyzers/spamhaus_drop.py
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,100 @@ | ||
| import bisect | ||
| import ipaddress | ||
| import json | ||
| import logging | ||
| import os | ||
|
|
||
| import requests | ||
| from django.conf import settings | ||
|
|
||
| from api_app.analyzers_manager import classes | ||
| from api_app.analyzers_manager.exceptions import AnalyzerRunException | ||
| from tests.mock_utils import MockUpResponse, if_mock_connections, patch | ||
|
|
||
| logger = logging.getLogger(__name__) | ||
|
|
||
|
|
||
| class SpamhausDropV4(classes.ObservableAnalyzer): | ||
| url = "https://www.spamhaus.org/drop/drop_v4.json" | ||
|
|
||
| @classmethod | ||
| def location(cls) -> str: | ||
| db_name = "drop_v4.json" | ||
| return f"{settings.MEDIA_ROOT}/{db_name}" | ||
|
|
||
| def run(self): | ||
| ip = ipaddress.ip_address(self.observable_name) | ||
| database_location = self.location() | ||
| if not os.path.exists(database_location): | ||
| logger.info( | ||
| f"Database does not exist in {database_location}, initialising..." | ||
| ) | ||
| self.update() | ||
| with open(database_location, "r") as f: | ||
| db = json.load(f) | ||
|
|
||
| insertion = bisect.bisect_left( | ||
| db, ip, key=lambda x: ipaddress.ip_network(x["cidr"]).network_address | ||
| ) | ||
| matches = [] | ||
| # Check entries at and after the insertion point | ||
| # there maybe one or more subnets contained in the ip | ||
| for i in range(insertion, len(db)): | ||
| network = ipaddress.ip_network(db[i]["cidr"]) | ||
| if ip in network: | ||
| matches.append(db[i]) | ||
| elif network.network_address > ip: | ||
| break | ||
| if matches: | ||
| return {"found": True, "details": matches} | ||
|
|
||
| return {"found": False} | ||
|
|
||
| @classmethod | ||
| def update(cls): | ||
| logger.info(f"Updating database from {cls.url}") | ||
| response = requests.get(url=cls.url) | ||
| response.raise_for_status() | ||
| data = cls.convert_to_json(response.text) | ||
| database_location = cls.location() | ||
|
|
||
| with open(database_location, "w", encoding="utf-8") as f: | ||
| json.dump(data, f) | ||
| logger.info(f"Database updated at {database_location}") | ||
|
|
||
| @staticmethod | ||
| def convert_to_json(input_string) -> dict: | ||
| lines = input_string.strip().split("\n") | ||
| json_objects = [] | ||
| for line in lines: | ||
| line = line.strip() | ||
| if not line: | ||
| continue | ||
| try: | ||
| json_obj = json.loads(line) | ||
| json_objects.append(json_obj) | ||
| except json.JSONDecodeError: | ||
| raise AnalyzerRunException( | ||
| "Invalid JSON format in the response while updating the database" | ||
| ) | ||
|
|
||
| return json_objects | ||
|
|
||
| @classmethod | ||
| def _monkeypatch(cls): | ||
| mock_data = ( | ||
| '{"cidr": "1.10.16.0/20", "sblid": "SBL256894", "rir": "apnic"}\n' | ||
| '{"cidr": "2.56.192.0/22", "sblid": "SBL459831", "rir": "ripencc"}' | ||
| ) | ||
| patches = [ | ||
| if_mock_connections( | ||
| patch( | ||
| "requests.get", | ||
| return_value=MockUpResponse( | ||
| mock_data, | ||
| 200, | ||
| ), | ||
| ), | ||
| ) | ||
| ] | ||
| return super()._monkeypatch(patches=patches) |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
Uh oh!
There was an error while loading. Please reload this page.