Skip to content

[Marketplace Contribution] Microsoft Sentinel - Content Pack Update #39322

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 3 commits into from
Mar 30, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
49 changes: 30 additions & 19 deletions Packs/AzureSentinel/Integrations/AzureSentinel/AzureSentinel.py
Original file line number Diff line number Diff line change
Expand Up @@ -473,6 +473,19 @@ def severity_to_level(severity):
return 0


def severity_filter(min_severity):
"""
Create Severity Filter when min_severity >= Low.
"""
severity_levels = ["Low", "Medium", "High"]
severity_filter = ""
if min_severity in severity_levels:
min_level = severity_to_level(min_severity)
conditions = [f"properties/severity eq '{s}'" for s in severity_levels if severity_to_level(s) >= min_level]
severity_filter = f"and ({ ' or '.join(conditions) })"
return severity_filter


def generic_list_incident_items(client, incident_id, items_kind, key_in_raw_result, outputs_prefix, xsoar_transformer):
"""
Get a list of incident's items
Expand Down Expand Up @@ -1312,7 +1325,7 @@ def fetch_incidents_additional_info(client: AzureSentinelClient, incidents: List
incident[info_type] = client.http_request(method, f'incidents/{incident_id}/{info_type}').get(results_key)


def fetch_incidents(client: AzureSentinelClient, last_run: dict, first_fetch_time: str, min_severity: int) -> tuple:
def fetch_incidents(client: AzureSentinelClient, last_run: dict, first_fetch_time: str, min_severity: str) -> tuple:
"""Fetching incidents.
Args:
first_fetch_time: The first fetch time.
Expand Down Expand Up @@ -1346,21 +1359,23 @@ def fetch_incidents(client: AzureSentinelClient, last_run: dict, first_fetch_tim

latest_created_time_str = latest_created_time.strftime(DATE_FORMAT)
command_args = {
'filter': f'properties/createdTimeUtc ge {latest_created_time_str}',
'filter': f'properties/createdTimeUtc ge {latest_created_time_str} {severity_filter(min_severity)}',
'orderby': 'properties/createdTimeUtc asc',
'limit': limit
}
demisto.debug(f"Filter query used:{command_args['filter']}")

else:
demisto.debug("last fetch time is empty, trying to fetch incidents by last incident id")
latest_created_time = dateparser.parse(last_fetch_time)
if latest_created_time is None:
raise DemistoException(f"{last_fetch_time=} couldn't be parsed")
command_args = {
'filter': f'properties/incidentNumber gt {last_incident_number}',
'filter': f'properties/incidentNumber gt {last_incident_number} {severity_filter(min_severity)}',
'orderby': 'properties/incidentNumber asc',
'limit': limit
}
demisto.debug(f"Filter query used:{command_args['filter']}")

raw_incidents = list_incidents_command(client, command_args, is_fetch_incidents=True).outputs
if isinstance(raw_incidents, dict):
Expand All @@ -1371,14 +1386,14 @@ def fetch_incidents(client: AzureSentinelClient, last_run: dict, first_fetch_tim

fetch_incidents_additional_info(client, raw_incidents)

return process_incidents(raw_incidents, min_severity,
return process_incidents(raw_incidents,
latest_created_time, last_incident_number) # type: ignore[attr-defined]


def fetch_incidents_command(client, params):
# How much time before the first fetch to retrieve incidents
first_fetch_time = params.get('fetch_time', '3 days').strip()
min_severity = severity_to_level(params.get('min_severity', 'Informational'))
min_severity = params.get('min_severity', 'Informational')
# Set and define the fetch incidents command to run after activated via integration settings.
last_run = demisto.getLastRun()
demisto.debug(f"Current last run is {last_run}")
Expand All @@ -1393,14 +1408,13 @@ def fetch_incidents_command(client, params):
demisto.incidents(incidents)


def process_incidents(raw_incidents: list, min_severity: int, latest_created_time: datetime,
def process_incidents(raw_incidents: list, latest_created_time: datetime,
last_incident_number):
"""Processing the raw incidents
Args:
raw_incidents: The incidents that were fetched from the API.
last_incident_number: The last incident number that was fetched.
latest_created_time: The latest created time.
min_severity: The minimum severity.

Returns:
A next_run dictionary, and an array of incidents.
Expand All @@ -1417,23 +1431,20 @@ def process_incidents(raw_incidents: list, min_severity: int, latest_created_tim

incident_created_time = dateparser.parse(incident.get('CreatedTimeUTC'))
current_fetch_ids.append(incident.get('ID'))
if incident_severity >= min_severity:
add_mirroring_fields(incident)
xsoar_incident = {
'name': '[Azure Sentinel] ' + incident.get('Title'),
'occurred': incident.get('CreatedTimeUTC'),
'severity': incident_severity,
'rawJSON': json.dumps(incident)
}
incidents.append(xsoar_incident)
else:
demisto.debug(f"drop creation of {incident.get('IncidentNumber')=} "
f"due to the {incident_severity=} is lower then {min_severity=}")
add_mirroring_fields(incident)
xsoar_incident = {
'name': '[Azure Sentinel] ' + incident.get('Title'),
'occurred': incident.get('CreatedTimeUTC'),
'severity': incident_severity,
'rawJSON': json.dumps(incident)
}

# Update last run to the latest fetch time
if incident_created_time is None:
raise DemistoException(f"{incident.get('CreatedTimeUTC')=} couldn't be parsed")

incidents.append(xsoar_incident)

if incident_created_time > latest_created_time:
latest_created_time = incident_created_time
if incident.get('IncidentNumber') > last_incident_number:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -196,6 +196,13 @@ configuration:
section: Collect
advanced: true
required: false
- defaultvalue: '1'
display: Incidents Fetch Interval
name: incidentFetchInterval
required: false
type: 19
section: Collect
advanced: true
description: "Microsoft Sentinel is a scalable, cloud-native solution that provides: Security information and event management (SIEM) Security orchestration, automation, and response (SOAR)."
display: Microsoft Sentinel
name: Azure Sentinel
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1273,12 +1273,11 @@ def test_process_incidents(self, args, client, expected_result):
"""
# prepare
raw_incidents = [MOCKED_RAW_INCIDENT_OUTPUT.get('value')[0]]
min_severity = args.get('min_severity')
last_incident_number = args.get('last_incident_number')
latest_created_time = dateparser.parse('2020-02-02T14:05:01.5348545Z')

# run
next_run, _ = process_incidents(raw_incidents, min_severity, latest_created_time,
next_run, _ = process_incidents(raw_incidents, latest_created_time,
last_incident_number)

# validate
Expand Down Expand Up @@ -1307,7 +1306,7 @@ def test_last_run_in_fetch_incidents(self, mocker):
last_run = {'last_fetch_time': '2022-03-16T13:01:08Z',
'last_fetch_ids': []}
first_fetch_time = '3 days'
minimum_severity = 0
minimum_severity = 'Informational'

mocker.patch('AzureSentinel.process_incidents', return_value=({}, []))
mocker.patch.object(client, 'http_request', return_value=MOCKED_INCIDENTS_OUTPUT)
Expand Down Expand Up @@ -1340,7 +1339,7 @@ def test_last_run_in_fetch_incidents_duplicates(self, mocker):
last_run = {'last_fetch_time': '2022-03-16T13:01:08Z',
'last_fetch_ids': ['inc_name']}
first_fetch_time = '3 days'
minimum_severity = 0
minimum_severity = 'Informational'

process_mock = mocker.patch('AzureSentinel.process_incidents', return_value=({}, []))
mocker.patch.object(client, 'http_request', return_value=MOCKED_INCIDENTS_OUTPUT)
Expand All @@ -1351,7 +1350,7 @@ def test_last_run_in_fetch_incidents_duplicates(self, mocker):
# validate
assert not process_mock.call_args[0][0]

@pytest.mark.parametrize('min_severity, expected_incident_num', [(1, 2), (3, 1)])
@pytest.mark.parametrize('min_severity, expected_incident_num', [(1, 2), (3, 2)])
def test_last_fetched_incident_for_various_severity_levels(self, mocker, min_severity, expected_incident_num):
"""
Given:
Expand All @@ -1370,7 +1369,6 @@ def test_last_fetched_incident_for_various_severity_levels(self, mocker, min_sev

# run
next_run, incidents = process_incidents(raw_incidents=raw_incidents,
min_severity=min_severity,
latest_created_time=latest_created_time,
last_incident_number=1)

Expand Down
6 changes: 6 additions & 0 deletions Packs/AzureSentinel/ReleaseNotes/1_5_60.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@

#### Integrations

##### Microsoft Sentinel

- Improved implementation for *The minimum severity of incidents to fetch* parameter.
2 changes: 1 addition & 1 deletion Packs/AzureSentinel/pack_metadata.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
"name": "Microsoft Sentinel",
"description": "Microsoft Sentinel is a cloud-native security information and event manager (SIEM) platform that uses built-in AI to help analyze large volumes of data across an enterprise.",
"support": "xsoar",
"currentVersion": "1.5.59",
"currentVersion": "1.5.60",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
Expand Down
Loading