Skip to content
This repository was archived by the owner on Nov 2, 2024. It is now read-only.

Commit d25f4c9

Browse files
g4zeMichalsus
authored andcommitted
spamhaus_drop analyzer, closes intelowlproject#2408 (intelowlproject#2422)
* spamhaus_drop * spamhaus_drop * ip matching * migratiuons * migrations * tests * tests * tests * tests * tests * tests * IocFinder * bool * mign * docs * mign * mign * mign
1 parent 92a2ed5 commit d25f4c9

File tree

3 files changed

+229
-1
lines changed

3 files changed

+229
-1
lines changed
Lines changed: 126 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,126 @@
1+
from django.db import migrations
2+
from django.db.models.fields.related_descriptors import (
3+
ForwardManyToOneDescriptor,
4+
ForwardOneToOneDescriptor,
5+
ManyToManyDescriptor,
6+
)
7+
8+
plugin = {
9+
"python_module": {
10+
"health_check_schedule": None,
11+
"update_schedule": {
12+
"minute": "0",
13+
"hour": "0",
14+
"day_of_week": "*",
15+
"day_of_month": "*",
16+
"month_of_year": "*",
17+
},
18+
"module": "spamhaus_drop.SpamhausDropV4",
19+
"base_path": "api_app.analyzers_manager.observable_analyzers",
20+
},
21+
"name": "Spamhaus_DROP",
22+
"description": "[Spamhaus_DROP](https://www.spamhaus.org/blocklists/do-not-route-or-peer/) protects from activity directly originating from rogue networks, such as spam campaigns, encryption via ransomware, DNS-hijacking and exploit attempts, authentication attacks to discover working access credentials, harvesting, DDoS attacks.",
23+
"disabled": False,
24+
"soft_time_limit": 10,
25+
"routing_key": "default",
26+
"health_check_status": True,
27+
"type": "observable",
28+
"docker_based": False,
29+
"maximum_tlp": "AMBER",
30+
"observable_supported": ["ip"],
31+
"supported_filetypes": [],
32+
"run_hash": False,
33+
"run_hash_type": "",
34+
"not_supported_filetypes": [],
35+
"model": "analyzers_manager.AnalyzerConfig",
36+
}
37+
38+
params = []
39+
40+
values = []
41+
42+
43+
def _get_real_obj(Model, field, value):
44+
def _get_obj(Model, other_model, value):
45+
if isinstance(value, dict):
46+
real_vals = {}
47+
for key, real_val in value.items():
48+
real_vals[key] = _get_real_obj(other_model, key, real_val)
49+
value = other_model.objects.get_or_create(**real_vals)[0]
50+
# it is just the primary key serialized
51+
else:
52+
if isinstance(value, int):
53+
if Model.__name__ == "PluginConfig":
54+
value = other_model.objects.get(name=plugin["name"])
55+
else:
56+
value = other_model.objects.get(pk=value)
57+
else:
58+
value = other_model.objects.get(name=value)
59+
return value
60+
61+
if (
62+
type(getattr(Model, field))
63+
in [ForwardManyToOneDescriptor, ForwardOneToOneDescriptor]
64+
and value
65+
):
66+
other_model = getattr(Model, field).get_queryset().model
67+
value = _get_obj(Model, other_model, value)
68+
elif type(getattr(Model, field)) in [ManyToManyDescriptor] and value:
69+
other_model = getattr(Model, field).rel.model
70+
value = [_get_obj(Model, other_model, val) for val in value]
71+
return value
72+
73+
74+
def _create_object(Model, data):
75+
mtm, no_mtm = {}, {}
76+
for field, value in data.items():
77+
value = _get_real_obj(Model, field, value)
78+
if type(getattr(Model, field)) is ManyToManyDescriptor:
79+
mtm[field] = value
80+
else:
81+
no_mtm[field] = value
82+
try:
83+
o = Model.objects.get(**no_mtm)
84+
except Model.DoesNotExist:
85+
o = Model(**no_mtm)
86+
o.full_clean()
87+
o.save()
88+
for field, value in mtm.items():
89+
attribute = getattr(o, field)
90+
if value is not None:
91+
attribute.set(value)
92+
return False
93+
return True
94+
95+
96+
def migrate(apps, schema_editor):
97+
Parameter = apps.get_model("api_app", "Parameter")
98+
PluginConfig = apps.get_model("api_app", "PluginConfig")
99+
python_path = plugin.pop("model")
100+
Model = apps.get_model(*python_path.split("."))
101+
if not Model.objects.filter(name=plugin["name"]).exists():
102+
exists = _create_object(Model, plugin)
103+
if not exists:
104+
for param in params:
105+
_create_object(Parameter, param)
106+
for value in values:
107+
_create_object(PluginConfig, value)
108+
109+
110+
def reverse_migrate(apps, schema_editor):
111+
python_path = plugin.pop("model")
112+
Model = apps.get_model(*python_path.split("."))
113+
Model.objects.get(name=plugin["name"]).delete()
114+
115+
116+
class Migration(migrations.Migration):
117+
atomic = False
118+
dependencies = [
119+
("api_app", "0062_alter_parameter_python_module"),
120+
(
121+
"analyzers_manager",
122+
"0109_analyzer_config_iocfinder",
123+
),
124+
]
125+
126+
operations = [migrations.RunPython(migrate, reverse_migrate)]
Lines changed: 100 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,100 @@
1+
import bisect
2+
import ipaddress
3+
import json
4+
import logging
5+
import os
6+
7+
import requests
8+
from django.conf import settings
9+
10+
from api_app.analyzers_manager import classes
11+
from api_app.analyzers_manager.exceptions import AnalyzerRunException
12+
from tests.mock_utils import MockUpResponse, if_mock_connections, patch
13+
14+
logger = logging.getLogger(__name__)
15+
16+
17+
class SpamhausDropV4(classes.ObservableAnalyzer):
18+
url = "https://www.spamhaus.org/drop/drop_v4.json"
19+
20+
@classmethod
21+
def location(cls) -> str:
22+
db_name = "drop_v4.json"
23+
return f"{settings.MEDIA_ROOT}/{db_name}"
24+
25+
def run(self):
26+
ip = ipaddress.ip_address(self.observable_name)
27+
database_location = self.location()
28+
if not os.path.exists(database_location):
29+
logger.info(
30+
f"Database does not exist in {database_location}, initialising..."
31+
)
32+
self.update()
33+
with open(database_location, "r") as f:
34+
db = json.load(f)
35+
36+
insertion = bisect.bisect_left(
37+
db, ip, key=lambda x: ipaddress.ip_network(x["cidr"]).network_address
38+
)
39+
matches = []
40+
# Check entries at and after the insertion point
41+
# there maybe one or more subnets contained in the ip
42+
for i in range(insertion, len(db)):
43+
network = ipaddress.ip_network(db[i]["cidr"])
44+
if ip in network:
45+
matches.append(db[i])
46+
elif network.network_address > ip:
47+
break
48+
if matches:
49+
return {"found": True, "details": matches}
50+
51+
return {"found": False}
52+
53+
@classmethod
54+
def update(cls):
55+
logger.info(f"Updating database from {cls.url}")
56+
response = requests.get(url=cls.url)
57+
response.raise_for_status()
58+
data = cls.convert_to_json(response.text)
59+
database_location = cls.location()
60+
61+
with open(database_location, "w", encoding="utf-8") as f:
62+
json.dump(data, f)
63+
logger.info(f"Database updated at {database_location}")
64+
65+
@staticmethod
66+
def convert_to_json(input_string) -> dict:
67+
lines = input_string.strip().split("\n")
68+
json_objects = []
69+
for line in lines:
70+
line = line.strip()
71+
if not line:
72+
continue
73+
try:
74+
json_obj = json.loads(line)
75+
json_objects.append(json_obj)
76+
except json.JSONDecodeError:
77+
raise AnalyzerRunException(
78+
"Invalid JSON format in the response while updating the database"
79+
)
80+
81+
return json_objects
82+
83+
@classmethod
84+
def _monkeypatch(cls):
85+
mock_data = (
86+
'{"cidr": "1.10.16.0/20", "sblid": "SBL256894", "rir": "apnic"}\n'
87+
'{"cidr": "2.56.192.0/22", "sblid": "SBL459831", "rir": "ripencc"}'
88+
)
89+
patches = [
90+
if_mock_connections(
91+
patch(
92+
"requests.get",
93+
return_value=MockUpResponse(
94+
mock_data,
95+
200,
96+
),
97+
),
98+
)
99+
]
100+
return super()._monkeypatch(patches=patches)

docs/source/Usage.md

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -266,11 +266,13 @@ The following is the list of the available analyzers you can run out-of-the-box.
266266
* `OrklSearch`:[Orkl](https://orkl.eu/) is the Community Driven Cyber Threat Intelligence Library.
267267
* `Crt_sh`:[Crt_Sh](https://crt.sh/) lets you get certificates info about a domain.
268268
* `Spamhaus_WQS`:[Spamhaus_WQS](https://docs.spamhaus.com/datasets/docs/source/70-access-methods/web-query-service/000-intro.html) The Spamhaus Web Query Service (WQS) is a method of accessing Spamhaus block lists using the HTTPS protocol.
269-
* `Adguard`: [Adguard](https://github.com/AdguardTeam/AdguardSDNSFilter), a filter composed of several other filters (AdGuard Base filter, Social media filter, Tracking Protection filter, Mobile Ads filter, EasyList and EasyPrivacy) and simplified specifically to be better compatible with DNS-level ad blocking.* `Adguard`: [Adguard](https://github.com/AdguardTeam/AdguardSDNSFilter), a filter composed of several other filters (AdGuard Base filter, Social media filter, Tracking Protection filter, Mobile Ads filter, EasyList and EasyPrivacy) and simplified specifically to be better compatible with DNS-level ad blocking.
269+
* `Adguard`:[Adguard](https://github.com/AdguardTeam/AdguardSDNSFilter), a filter composed of several other filters (AdGuard Base filter, Social media filter, Tracking Protection filter, Mobile Ads filter, EasyList and EasyPrivacy) and simplified specifically to be better compatible with DNS-level ad blocking.
270270
* `JA4_DB`:[JA4_DB](https://ja4db.com/) lets you search a fingerprint in JA4 databse.
271271
* `LeakIX`:[LeakIX](https://leakix.net/) is a red-team search engine indexing mis-configurations and vulnerabilities online.
272272
* `ApiVoid`:[ApiVoid](https://www.apivoid.com/) provides JSON APIs useful for cyber threat analysis, threat detection and
273273
threat prevention, reducing and automating the manual work of security analysts.
274+
* `Spamhaus_DROP`:[Spamhaus_DROP](https://www.spamhaus.org/blocklists/do-not-route-or-peer/) protects from activity directly originating from rogue networks, such as spam campaigns, encryption via ransomware, DNS-hijacking and exploit attempts, authentication attacks to discover working access credentials, harvesting, DDoS attacks.
275+
274276
##### Generic analyzers (email, phone number, etc.; anything really)
275277

276278
Some analyzers require details other than just IP, URL, Domain, etc. We classified them as `generic` Analyzers. Since the type of field is not known, there is a format for strings to be followed.

0 commit comments

Comments
 (0)