=logging.INFO) logging.basicConfig(level
legacy
adxtable2df
adxtable2df (table)
Return a pandas dataframe from an adx table
Exported source
def adx_query(kql):
"""
Run a kusto query
Args:
kql (str or list): kusto query or list of queries
Returns:
json: query results
"""
if isinstance(kql, list):
= [".execute script with (ContinueOnErrors=true) <|"] + kql
kql = "\n".join(kql)
kql = api.cache["config"]
config = config.azure_dataexplorer.rsplit("/", 1)
cluster, dx_db = KustoClient(KustoConnectionStringBuilder.with_az_cli_authentication(cluster))
dx_client return dx_client.execute(dx_db, kql.replace("\\", "\\\\")).primary_results[0]
def adxtable2df(table):
"""
Return a pandas dataframe from an adx table
"""
= [col.column_name for col in table.columns]
columns = pandas.DataFrame(table.raw_rows, columns=columns)
frame return frame
adx_query
adx_query (kql)
*Run a kusto query
Args: kql (str or list): kusto query or list of queries
Returns: json: query results*
"SecurityAlert | take 10")) adxtable2df(adx_query(
export_jira_issues
export_jira_issues ()
Exports all JIRA issues to the data lake.
Exported source
def export_jira_issues():
"""
Exports all JIRA issues to the data lake.
"""
= api.datalake_path() / "jira_outputs" / "issues"
jira_issues_path
def getissues(start_at, jql):
= api.clients.jira.jql(jql, start=start_at, limit=100)
response = response["startAt"] + response["maxResults"]
next_start = response["total"]
total_rows if next_start > total_rows:
= total_rows
next_start = response["issues"]
issues return next_start, total_rows, issues
def save_date_issues(after_date: pandas.Timestamp, path=jira_issues_path):
= after_date
fromdate = after_date + pandas.to_timedelta("1d")
todate = f"updated >= {fromdate.date().isoformat()} and updated < {todate.date().isoformat()} order by key"
jql = path / f"{fromdate.date().isoformat()}" / "issues.parquet"
output if output.exists() and fromdate < pandas.Timestamp.now() - pandas.to_timedelta("1d"):
# skip previously dumped days except for last day
return None
= 0, -1
start_at, total_rows = []
dataframes while start_at != total_rows:
= getissues(start_at, jql)
start_at, total_rows, issues
dataframes.append(pandas.DataFrame(issues))if start_at == 100:
f"{total_rows} to load")
logger.info(if total_rows > 1:
= pandas.concat(dataframes)
df "fields"] = df["fields"].apply(json.dumps)
df[f"saving {output}")
logger.info(try:
open("wb"))
df.to_parquet(output.except Exception as exc:
print(exc)
return df
else:
return None
= pandas.Timestamp.now() - pandas.to_timedelta("7d")
after = pandas.Timestamp.now() + pandas.to_timedelta("1d")
until
while after < until:
save_date_issues(after)+= pandas.to_timedelta("1d") after
export_jira_issues()
sentinel_beautify_local
sentinel_beautify_local (data:dict, outputformat:str='jira', default_status:str='Onboard: MOU (T0)', default_orgid:int=2)
Takes a SecurityIncident including alerts as json and returns markdown, html and detailed json representation.
Exported source
def flatten(nested_dict, parent_key='', sep='_'):
"""
Flatten a nested dictionary.
Args:
nested_dict (dict): The nested dictionary to flatten.
parent_key (str, optional): The parent key for the current level of nesting.
sep (str, optional): The separator to use for flattened keys.
Returns:
dict: The flattened dictionary.
"""
= {}
flat_dict
for key, value in nested_dict.items():
= f"{parent_key}{sep}{key}" if parent_key else key
new_key
if isinstance(value, dict):
flat_dict.update(flatten(value, new_key, sep))else:
= value
flat_dict[new_key]
return flat_dict
def sentinel_beautify_local(
dict,
data: str = "jira",
outputformat: str = "Onboard: MOU (T0)",
default_status: int = 2,
default_orgid:
):"""
Takes a SecurityIncident including alerts as json and returns
markdown, html and detailed json representation.
"""
for jsonfield in ["Labels", "Owner", "AdditionalData", "Comments"]:
if data.get(jsonfield):
= json.loads(data[jsonfield])
data[jsonfield] = [
labels f"SIEM_Severity:{data['Severity']}",
f"SIEM_Status:{data['Status']}",
f"SIEM_Title:{data['Title']}",
]+= [l["labelName"] for l in data["Labels"]] # copy over labels from incident
labels = [data["Description"], ""]
incident_details
if data.get("Owner"):
= None
owner if data["Owner"].get("email"):
= data["Owner"]["email"]
owner elif data["Owner"].get("userPrincipalName"):
= data["Owner"]["userPrincipalName"]
owner if owner:
f"SIEM_Owner:{owner}")
labels.append(f"- **Sentinel Incident Owner:** {owner}")
incident_details.append(
if data.get("Classification"):
f"SIEM_Classification:{data['Classification']}")
labels.append(f"- **Alert Classification:** {data['Classification']}")
incident_details.append(
if data.get("ClassificationReason"):
f"SIEM_ClassificationReason:{data['ClassificationReason']}")
labels.append(
incident_details.append(f"- **Alert Classification Reason:** {data['ClassificationReason']}"
)
if data.get("ProviderName"):
f"SIEM_ProviderName:{data['ProviderName']}")
labels.append(f"- **Provider Name:** {data['ProviderName']}")
incident_details.append(
if data.get("AdditionalData"):
if data["AdditionalData"].get("alertProductNames"):
= ",".join(data["AdditionalData"]["alertProductNames"])
product_names f"SIEM_alertProductNames:{product_names}")
labels.append(f"- **Product Names:** {product_names}")
incident_details.append(if data["AdditionalData"].get("tactics"):
= ",".join(data["AdditionalData"]["tactics"])
tactics f"SIEM_tactics:{tactics}")
labels.append(
incident_details.append(f"- **[MITRE ATT&CK Tactics](https://attack.mitre.org/tactics/):** {tactics}"
)if data["AdditionalData"].get("techniques"):
= ",".join(data["AdditionalData"]["techniques"])
techniques f"SIEM_techniques:{techniques}")
labels.append(
incident_details.append("- **[MITRE ATT&CK Techniques](https://attack.mitre.org/techniques/):**"
f" {techniques}"
)
= []
comments if data.get("Comments"):
if len(data["Comments"]) > 0:
+= ["", "## Comments"]
comments for comment in data["Comments"]:
+= comment["message"].split("\n")
comments += [""]
comments
= []
alert_details = []
observables = {
entity_type_value_mappings "host": "{HostName}",
"account": "{Name}",
"process": "{CommandLine}",
"file": "{Name}",
"ip": "{Address}",
"url": "{Url}",
"dns": "{DomainName}",
"registry-key": "{Hive}{Key}",
"filehash": "{Algorithm}{Value}",
}
class Default(dict):
"""
Default dict that returns the key if the key is not found
Args:
dict
"""
def __missing__(self, key):
return key
for alert in data["AlertData"][:10]: # Assumes alertdata is newest to oldest
if not alert_details:
+= [
alert_details "",
"## Alert Details",
("The last day of activity (up to 10 alerts) is summarised below from"
" newest to oldest."
),
]
alert_details.append(f"### [{alert['AlertName']} (Severity:{alert['AlertSeverity']}) - "
+ f"TimeGenerated {alert['TimeGenerated']}]({alert['AlertLink']})"
)"Description"])
alert_details.append(alert[for key in [
"RemediationSteps",
"ExtendedProperties",
"Entities",
# entities last as may get truncated
]: if alert.get(key):
if isinstance(alert[key], str) and alert[key][0] in ["{", "["]:
= json.loads(alert[key])
alert[key] if key == "Entities": # add the entity to our list of observables
for entity in alert[key]:
= {"value": None}
observable if "Type" in entity:
= {
observable "type": entity["Type"],
"value": entity_type_value_mappings.get(
"Type"], ""
entity[
).format_map(Default(entity)),
}if not observable["value"]: # dump whole dict as string if no mapping found
"value"] = repr(entity)
observable[
observables.append(observable)if alert[key] and isinstance(alert[key], list) and isinstance(alert[key][0], dict):
# if list of dicts, make a table
for index, entry in enumerate(
for item in alert[key] if len(item.keys()) > 1]
[flatten(item)
):+= ["", f"#### {key}.{index}"]
alert_details for entrykey, value in entry.items():
if value:
f"- **{entrykey}:** {value}")
alert_details.append(elif isinstance(alert[key], dict): # if dict display as list
+= ["", f"#### {key}"]
alert_details for entrykey, value in alert[key].items():
if value and len(value) < 200:
f"- **{entrykey}:** {value}")
alert_details.append(elif value: # break out long blocks
+= [f"- **{entrykey}:**", "", "```", value, "```", ""]
alert_details else: # otherwise just add as separate lines
+= ["", f"#### {key}"] + [item for item in alert[key]]
alert_details
= (
title f"SIEM Detection #{data['IncidentNumber']} Sev:{data['Severity']} -"
f" {data['Title']} (Status:{data['Status']})"
)= (
mdtext
[f"# {title}",
"",
f"## [SecurityIncident #{data['IncidentNumber']} Details]({data['IncidentUrl']})",
"",
]+ incident_details
+ comments
+ alert_details
)= "\n".join([str(line) for line in mdtext])
mdtext = markdown(mdtext, extensions=["tables"])
content # remove special chars and deduplicate labels
= set("".join(c for c in label if c.isalnum() or c in ".:_") for label in labels)
labels
= {
response "subject": title,
"labels": list(labels),
"observables": [dict(ts) for ts in set(tuple(i.items()) for i in observables)],
"sentinel_data": data,
}= api.list_workspaces()
workspaces_df = (
customer "customerId"] == data["TenantId"]].to_dict("records")
workspaces_df[workspaces_df[
)if len(customer) > 0:
= customer[0]
customer else:
= {}
customer # Grab wiki format for jira and truncate to 32767 chars
response.update(
{"secops_status": customer.get("SecOps Status") or default_status,
"jira_orgid": customer.get("JiraOrgId") or default_orgid,
"customer": customer,
"wikimarkup": (
32760]
api.atlaskit_transformer(mdtext)[:
),
}
)return response
flatten
flatten (nested_dict, parent_key='', sep='_')
*Flatten a nested dictionary.
Args: nested_dict (dict): The nested dictionary to flatten. parent_key (str, optional): The parent key for the current level of nesting. sep (str, optional): The separator to use for flattened keys.
Returns: dict: The flattened dictionary.*
# grab latest incident with alerts
= api.security_incidents().dropna(subset=["AlertIds"]).iloc[0]
incident = api.security_alerts()
df = df[df["TenantId"] == incident["TenantId"]]
df = json.loads(incident["AlertIds"])
alertids # extend incident with alert info
"AlertData"] = df[df["SystemAlertId"].isin(alertids)].copy(deep=True).to_dict(orient="records") incident[
# convert to a jira friendly format
sentinel_beautify_local(incident.to_dict())