Skip to content

Commit

Permalink
Update main.py
Browse files Browse the repository at this point in the history
  • Loading branch information
Cv-securityIQ committed Nov 18, 2024
1 parent 80b1373 commit ee4f4c3
Showing 1 changed file with 71 additions and 76 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -21,12 +21,12 @@

cs = os.environ.get('ConnectionString')

customer_id = os.environ.get('AzureSentinelWorkspaceId', '')
customer_id = os.environ.get('AzureSentinelWorkspaceId','')
shared_key = os.environ.get('AzureSentinelSharedKey')
verify = False
logAnalyticsUri = 'https://' + customer_id + '.ods.opinsights.azure.com'

key_vault_name = os.environ.get("KeyVaultName", "Commvault-Integration-KV")
key_vault_name = os.environ.get("KeyVaultName","Commvault-Integration-KV")
uri = None
url = None
qsdk_token = None
Expand All @@ -36,61 +36,61 @@
}

job_details_body = {
"opType": 1,
"entity": {"_type_": 0},
"options": {"restoreIndex": True},
"queries": [
{
"type": 0,
"queryId": "MimeFileList",
"whereClause": [
{
"criteria": {
"field": 38,
"dataOperator": 9,
"values": ["file"],
}
"opType": 1,
"entity": {"_type_": 0},
"options": {"restoreIndex": True},
"queries": [
{
"type": 0,
"queryId": "MimeFileList",
"whereClause": [
{
"criteria": {
"field": 38,
"dataOperator": 9,
"values": ["file"],
}
},
{
"criteria": {
"field": 147,
"dataOperator": 0,
"values": ["2"],
}
},
],
"dataParam": {
"sortParam": {"ascending": True, "sortBy": [0]},
"paging": {"firstNode": 0, "pageSize": -1, "skipNode": 0},
},
{
"criteria": {
"field": 147,
"dataOperator": 0,
"values": ["2"],
}
},
],
"dataParam": {
"sortParam": {"ascending": True, "sortBy": [0]},
"paging": {"firstNode": 0, "pageSize": -1, "skipNode": 0},
},
},
{
"type": 1,
"queryId": "MimeFileCount",
"whereClause": [
{
"criteria": {
"field": 38,
"dataOperator": 9,
"values": ["file"],
}
},
{
"criteria": {
"field": 147,
"dataOperator": 0,
"values": ["2"],
}
{
"type": 1,
"queryId": "MimeFileCount",
"whereClause": [
{
"criteria": {
"field": 38,
"dataOperator": 9,
"values": ["file"],
}
},
{
"criteria": {
"field": 147,
"dataOperator": 0,
"values": ["2"],
}
},
],
"dataParam": {
"sortParam": {"ascending": True, "sortBy": [0]},
"paging": {"firstNode": 0, "pageSize": -1, "skipNode": 0},
},
],
"dataParam": {
"sortParam": {"ascending": True, "sortBy": [0]},
"paging": {"firstNode": 0, "pageSize": -1, "skipNode": 0},
},
},
],
"paths": [{"path": "/**/*"}],
}
],
"paths": [{"path": "/**/*"}],
}


def main(mytimer: func.TimerRequest) -> None:
Expand Down Expand Up @@ -153,13 +153,13 @@ def main(mytimer: func.TimerRequest) -> None:
gen_chunks(post_data)
logging.info("Job Succeeded")
print("***Job Succeeded*****")
upload_timestamp_blob(cs, container_name, blob_name, to_time + 1)
upload_timestamp_blob(cs, container_name, blob_name, to_time+1)
logging.info("Function App Executed")
else:
print("No new events found.")

else:
logging.error("Failed to get events with status code : " + str(response.status_code))
logging.error("Failed to get events with status code : "+str(response.status_code))
except Exception as e:
logging.info("HTTP request error: %s", str(e))

Expand Down Expand Up @@ -232,7 +232,7 @@ def if_zero_set_none(value: str | None | int) -> str | None | int:


def extract_from_regex(
message: str, default_value: str | None, *regex_string_args: str
message: str, default_value: str | None, *regex_string_args: str
) -> str | None:
"""
From the message, extract the strings matching the given patterns
Expand Down Expand Up @@ -288,7 +288,7 @@ def get_files_list(job_id) -> list:
job_details_body["advOptions"] = {
"advConfig": {"browseAdvancedConfigBrowseByJob": {"jobId": int(job_id)}}
}
f_url = url + "/DoBrowse"
f_url = url+"/DoBrowse"
response = requests.post(f_url, headers=headers, json=job_details_body, verify=verify)
resp = response.json()
browse_responses = resp.get("browseResponses", [])
Expand Down Expand Up @@ -388,12 +388,8 @@ def get_user_details(client_name):

f_url = f"{url}/Client/byName(clientName='{client_name}')"
response = requests.get(f_url, headers=headers, verify=False).json()
user_id = \
response['clientProperties'][0]['clientProps']['securityAssociations']['associations'][0]['userOrGroup'][0][
'userId']
user_name = \
response['clientProperties'][0]['clientProps']['securityAssociations']['associations'][0]['userOrGroup'][0][
'userName']
user_id = response['clientProperties'][0]['clientProps']['securityAssociations']['associations'][0]['userOrGroup'][0]['userId']
user_name = response['clientProperties'][0]['clientProps']['securityAssociations']['associations'][0]['userOrGroup'][0]['userName']
return user_id, user_name


Expand Down Expand Up @@ -423,7 +419,7 @@ def get_incident_details(message: str) -> dict | None:

description = format_alert_description(message)

job_details = get_job_details(job_id, url, headers)
job_details = get_job_details(job_id,url,headers)
if job_details is None:
print(f"Invalid job [{job_id}]")
return None
Expand All @@ -435,9 +431,9 @@ def get_incident_details(message: str) -> dict | None:
)
subclient_id = (
job_details.get("jobs", [{}])[0]
.get("jobSummary", {})
.get("subclient", {})
.get("subclientId")
.get("jobSummary", {})
.get("subclient", {})
.get("subclientId")
)
files_list, scanned_folder_list = fetch_file_details(job_id, subclient_id)
originating_client = extract_from_regex(message, "", r"{}:\[(.*?)\]".format(Constants.originating_client))
Expand All @@ -456,7 +452,7 @@ def get_incident_details(message: str) -> dict | None:
message,
None,
r"{}:\[(.*?)\]".format(
Constants.affected_files_count
Constants.affected_files_count
),
)
),
Expand All @@ -465,7 +461,7 @@ def get_incident_details(message: str) -> dict | None:
message,
None,
r"{}:\[(.*?)\]".format(
Constants.modified_files_count
Constants.modified_files_count
),
)
),
Expand All @@ -474,7 +470,7 @@ def get_incident_details(message: str) -> dict | None:
message,
None,
r"{}:\[(.*?)\]".format(
Constants.deleted_files_count
Constants.deleted_files_count
),
)
),
Expand All @@ -483,7 +479,7 @@ def get_incident_details(message: str) -> dict | None:
message,
None,
r"{}:\[(.*?)\]".format(
Constants.renamed_files_count
Constants.renamed_files_count
),
)
),
Expand All @@ -492,7 +488,7 @@ def get_incident_details(message: str) -> dict | None:
message,
None,
r"{}:\[(.*?)\]".format(
Constants.created_files_count
Constants.created_files_count
),
)
),
Expand Down Expand Up @@ -556,7 +552,7 @@ def post_data(body, chunk_count):
logging.info(f"Date :- {rfc1123date}")
content_length = len(body)
signature = build_signature(rfc1123date, content_length, method, content_type,
resource)
resource)
uri = logAnalyticsUri + resource + '?api-version=2016-04-01'
logging.info(f"URL - {uri}")
headers = {
Expand All @@ -571,16 +567,15 @@ def post_data(body, chunk_count):
if (response.status_code >= 200 and response.status_code <= 299):
logging.info("Chunk was processed{} events".format(chunk_count))
else:
logging.error(
"Error during sending events to Microsoft Sentinel. Response code:{}".format(response.status_code))
logging.error("Error during sending events to Microsoft Sentinel. Response code:{}".format(response.status_code))


def gen_chunks(data):
"""This method is used to get the chunks and post the data to log analytics work space
Args:
data (_type_): _description_
"""
"""
for chunk in gen_chunks_to_object(data, chunksize=10000):
obj_array = []
for row in chunk:
Expand Down

0 comments on commit ee4f4c3

Please sign in to comment.