diff --git a/Packs/SplunkPy/Classifiers/classifier-Splunk_-_Notable_Generic_Incoming_Mapper.json b/Packs/SplunkPy/Classifiers/classifier-Splunk_-_Notable_Generic_Incoming_Mapper.json index cafef6e54632..f2a72c0c9511 100644 --- a/Packs/SplunkPy/Classifiers/classifier-Splunk_-_Notable_Generic_Incoming_Mapper.json +++ b/Packs/SplunkPy/Classifiers/classifier-Splunk_-_Notable_Generic_Incoming_Mapper.json @@ -105,9 +105,15 @@ "dbotMirrorInstance": { "simple": "mirror_instance" }, + "dbotMirrorTags": { + "simple": "mirror_tags" + }, "details": { "simple": "rule_description" }, + "Splunk Comments": { + "simple": "SplunkComments" + }, "name": { "complex": { "filters": [], @@ -164,6 +170,9 @@ }, "dbotMirrorInstance": { "simple": "mirror_instance" + }, + "dbotMirrorTags": { + "simple": "mirror_tags" } } } diff --git a/Packs/SplunkPy/IncidentFields/incidentfiels-Notable_Comments.json b/Packs/SplunkPy/IncidentFields/incidentfiels-Notable_Comments.json new file mode 100644 index 000000000000..88d3a1831fb9 --- /dev/null +++ b/Packs/SplunkPy/IncidentFields/incidentfiels-Notable_Comments.json @@ -0,0 +1,33 @@ +{ + "associatedToAll": false, + "associatedTypes": [ + "Splunk Notable Generic" + ], + "caseInsensitive": true, + "cliName": "splunkcomments", + "closeForm": false, + "content": true, + "description": "the notable comments", + "editForm": true, + "group": 0, + "hidden": false, + "id": "incident_splunkComments", + "isReadOnly": false, + "locked": false, + "name": "Splunk Comments", + "neverSetAsRequired": false, + "ownerOnly": false, + "propagationLabels": [ + "all" + ], + "required": false, + "sla": 0, + "system": false, + "threshold": 72, + "type": "multiSelect", + "unmapped": false, + "unsearchable": true, + "useAsKpi": false, + "version": -1, + "fromVersion": "6.0.0" +} \ No newline at end of file diff --git a/Packs/SplunkPy/Integrations/SplunkPy/SplunkPy.py b/Packs/SplunkPy/Integrations/SplunkPy/SplunkPy.py index 227155a71a4e..aa1b60d419e1 100644 --- a/Packs/SplunkPy/Integrations/SplunkPy/SplunkPy.py +++ b/Packs/SplunkPy/Integrations/SplunkPy/SplunkPy.py @@ -81,6 +81,7 @@ ASSET_ENRICHMENT: 'successful_asset_enrichment', IDENTITY_ENRICHMENT: 'successful_identity_enrichment' } +COMMENT_MIRRORED_FROM_XSOAR = 'Mirrored from Cortex XSOAR' # =========== Not Missing Events Mechanism Globals =========== CUSTOM_ID = 'custom_id' @@ -316,7 +317,8 @@ def build_fetch_query(params): return fetch_query -def fetch_notables(service: client.Service, mapper: UserMappingObject, cache_object: "Cache" = None, enrich_notables=False): +def fetch_notables(service: client.Service, mapper: UserMappingObject, comment_tag_to_splunk: str, comment_tag_from_splunk: str, + cache_object: "Cache" = None, enrich_notables=False): last_run_data = demisto.getLastRun() params = demisto.params() if not last_run_data: @@ -354,7 +356,7 @@ def fetch_notables(service: client.Service, mapper: UserMappingObject, cache_obj continue extensive_log(f'[SplunkPy] Incident data before parsing to notable: {item}') notable_incident = Notable(data=item) - inc = notable_incident.to_incident(mapper) + inc = notable_incident.to_incident(mapper, comment_tag_to_splunk, comment_tag_from_splunk) extensive_log(f'[SplunkPy] Incident data after parsing to notable: {inc}') incident_id = create_incident_custom_id(inc) @@ -417,7 +419,7 @@ def fetch_notables(service: client.Service, mapper: UserMappingObject, cache_obj demisto.setLastRun(last_run_data) -def fetch_incidents(service: client.Service, mapper: UserMappingObject): +def fetch_incidents(service: client.Service, mapper: UserMappingObject, comment_tag_to_splunk: str, comment_tag_from_splunk: str): if ENABLED_ENRICHMENTS: integration_context = get_integration_context() if not demisto.getLastRun() and integration_context: @@ -426,9 +428,10 @@ def fetch_incidents(service: client.Service, mapper: UserMappingObject): # in the last run object to avoid entering this case fetch_incidents_for_mapping(integration_context) else: - run_enrichment_mechanism(service, integration_context, mapper) + run_enrichment_mechanism(service, integration_context, mapper, comment_tag_to_splunk, comment_tag_from_splunk) else: - fetch_notables(service=service, enrich_notables=False, mapper=mapper) + fetch_notables(service=service, enrich_notables=False, mapper=mapper, comment_tag_to_splunk=comment_tag_to_splunk, + comment_tag_from_splunk=comment_tag_from_splunk) # =========== Regular Fetch Mechanism =========== @@ -532,7 +535,8 @@ def get_id(self): return None @staticmethod - def create_incident(notable_data, occurred, mapper: UserMappingObject): + def create_incident(notable_data, occurred, mapper: UserMappingObject, comment_tag_to_splunk: str, + comment_tag_from_splunk: str): rule_title, rule_name = '', '' params = demisto.params() if demisto.get(notable_data, 'rule_title'): @@ -555,10 +559,10 @@ def create_incident(notable_data, occurred, mapper: UserMappingObject): notable_data = parse_notable(notable_data) notable_data.update({ 'mirror_instance': demisto.integrationInstance(), - 'mirror_direction': MIRROR_DIRECTION.get(params.get('mirror_direction')) + 'mirror_direction': MIRROR_DIRECTION.get(params.get('mirror_direction')), + 'mirror_tags': [comment_tag_from_splunk, comment_tag_to_splunk] }) - incident["rawJSON"] = json.dumps(notable_data) - + comment_entries = [] labels = [] if params.get('parseNotableEventsRaw'): for key, value in rawToDict(notable_data['_raw']).items(): @@ -567,12 +571,23 @@ def create_incident(notable_data, occurred, mapper: UserMappingObject): labels.append({'type': key, 'value': value}) if demisto.get(notable_data, 'security_domain'): labels.append({'type': 'security_domain', 'value': notable_data["security_domain"]}) + if demisto.get(notable_data, 'comment'): + comments = argToList(notable_data.get('comment', [])) + demisto.debug(f"data to update comment= {comments}") + for comment in comments: + # Creating a comment + comment_entries.append({ + 'Comment': comment}) + labels.append({'type': 'SplunkComments', 'value': str(comment_entries)}) incident['labels'] = labels incident['dbotMirrorId'] = notable_data.get(EVENT_ID) + notable_data['SplunkComments'] = comment_entries + incident["rawJSON"] = json.dumps(notable_data) + incident['SplunkComments'] = comment_entries return incident - def to_incident(self, mapper: UserMappingObject): + def to_incident(self, mapper: UserMappingObject, comment_tag_to_splunk: str, comment_tag_from_splunk: str): """ Gathers all data from all notable's enrichments and return an incident """ self.incident_created = True @@ -580,7 +595,8 @@ def to_incident(self, mapper: UserMappingObject): self.data[e.type] = e.data self.data[ENRICHMENT_TYPE_TO_ENRICHMENT_STATUS[e.type]] = e.status == Enrichment.SUCCESSFUL - return self.create_incident(self.data, self.occurred, mapper=mapper) + return self.create_incident(self.data, self.occurred, mapper=mapper, comment_tag_to_splunk=comment_tag_to_splunk, + comment_tag_from_splunk=comment_tag_from_splunk) def submitted(self) -> bool: """ Returns an indicator on whether any of the notable's enrichments was submitted or not """ @@ -983,7 +999,9 @@ def asset_enrichment(service: client.Service, notable_data, num_enrichment_event return job -def handle_submitted_notables(service: client.Service, incidents, cache_object: Cache, mapper: UserMappingObject): +def handle_submitted_notables(service: client.Service, incidents, cache_object: Cache, mapper: UserMappingObject, + comment_tag_to_splunk: str, + comment_tag_from_splunk: str): """ Handles submitted notables. For each submitted notable, tries to retrieve its results, if results aren't ready, it moves to the next submitted notable. @@ -1003,7 +1021,7 @@ def handle_submitted_notables(service: client.Service, incidents, cache_object: if handle_submitted_notable( service, notable, enrichment_timeout ): - incidents.append(notable.to_incident(mapper)) + incidents.append(notable.to_incident(mapper, comment_tag_to_splunk, comment_tag_from_splunk)) handled_notables.append(notable) cache_object.submitted_notables = [n for n in notables if n not in handled_notables] @@ -1061,7 +1079,8 @@ def handle_submitted_notable(service: client.Service, notable: Notable, enrichme return task_status -def submit_notables(service: client.Service, incidents: list, cache_object: Cache, mapper: UserMappingObject): +def submit_notables(service: client.Service, incidents: list, cache_object: Cache, mapper: UserMappingObject, + comment_tag_to_splunk: str, comment_tag_from_splunk: str): """ Submits fetched notables to Splunk for an enrichment. Args: @@ -1084,7 +1103,7 @@ def submit_notables(service: client.Service, incidents: list, cache_object: Cach submitted_notables.append(notable) demisto.debug(f'Submitted enrichment request to Splunk for notable {notable.id}') else: - incidents.append(notable.to_incident(mapper)) + incidents.append(notable.to_incident(mapper, comment_tag_to_splunk, comment_tag_from_splunk)) failed_notables.append(notable) demisto.debug(f'Created incident from notable {notable.id} as each enrichment submission failed') @@ -1129,7 +1148,8 @@ def submit_notable(service: client.Service, notable: Notable, num_enrichment_eve return notable.submitted() -def run_enrichment_mechanism(service: client.Service, integration_context, mapper: UserMappingObject): +def run_enrichment_mechanism(service: client.Service, integration_context, mapper: UserMappingObject, + comment_tag_to_splunk, comment_tag_from_splunk): """ Execute the enriching fetch mechanism 1. We first handle submitted notables that have not been handled in the last fetch run 2. If we finished handling and submitting all fetched notables, we fetch new notables @@ -1145,10 +1165,12 @@ def run_enrichment_mechanism(service: client.Service, integration_context, mappe cache_object = Cache.load_from_integration_context(integration_context) try: - handle_submitted_notables(service, incidents, cache_object, mapper) + handle_submitted_notables(service, incidents, cache_object, mapper, comment_tag_to_splunk, comment_tag_from_splunk) if cache_object.done_submitting() and cache_object.done_handling(): - fetch_notables(service=service, cache_object=cache_object, enrich_notables=True, mapper=mapper) - submit_notables(service, incidents, cache_object, mapper) + fetch_notables(service=service, cache_object=cache_object, enrich_notables=True, mapper=mapper, + comment_tag_to_splunk=comment_tag_to_splunk, + comment_tag_from_splunk=comment_tag_from_splunk) + submit_notables(service, incidents, cache_object, mapper, comment_tag_to_splunk, comment_tag_from_splunk) except Exception as e: err = f'Caught an exception while executing the enriching fetch mechanism. Additional Info: {str(e)}' @@ -1161,7 +1183,8 @@ def run_enrichment_mechanism(service: client.Service, integration_context, mappe store_incidents_for_mapping(incidents, integration_context) handled_but_not_created_incidents = cache_object.organize() cache_object.dump_to_integration_context(integration_context) - incidents += [notable.to_incident(mapper) for notable in handled_but_not_created_incidents] + incidents += [notable.to_incident(mapper, comment_tag_to_splunk, comment_tag_from_splunk) + for notable in handled_but_not_created_incidents] demisto.incidents(incidents) @@ -1231,8 +1254,44 @@ def get_last_update_in_splunk_time(last_update): return (dt - datetime(1970, 1, 1, tzinfo=pytz.utc)).total_seconds() +def get_comments_data(service: client.Service, notable_id: str, comment_tag_from_splunk: str, last_update_splunk_timestamp): + """get notable comments data and add new entries if needed + Args: + comment_tag_from_splunk (str): _description_ + """ + notes = [] + search = '|`incident_review` ' \ + '| eval last_modified_timestamp=_time ' \ + f'| where rule_id="{notable_id}" ' \ + f'| where last_modified_timestamp>{last_update_splunk_timestamp} ' \ + '| fields - time ' \ + + demisto.debug(f'Performing get-comments-data command with query: {search}') + + for item in results.JSONResultsReader(service.jobs.oneshot(search, output_mode=OUTPUT_MODE_JSON)): + demisto.debug(f'item: {item}') + if handle_message(item): + continue + updated_notable = parse_notable(item, to_dict=True) + demisto.debug(f'updated_notable: {updated_notable}') + comment = updated_notable.get('comment', '') + if comment and COMMENT_MIRRORED_FROM_XSOAR not in comment: + # Creating a note + notes.append({ + 'Type': EntryType.NOTE, + 'Contents': comment, + 'ContentsFormat': EntryFormat.TEXT, + 'Tags': [comment_tag_from_splunk], # The list of tags to add to the entry + 'Note': True, + }) + demisto.debug(f'Update new comment-{comment}') + demisto.debug(f'notes={notes}') + return notes + + def get_remote_data_command(service: client.Service, args: dict, - close_incident: bool, close_end_statuses: bool, close_extra_labels: list[str], mapper): + close_incident: bool, close_end_statuses: bool, close_extra_labels: list[str], mapper, + comment_tag_from_splunk: str): """ get-remote-data command: Returns an updated notable and error entry (if needed) Args: @@ -1264,32 +1323,40 @@ def get_remote_data_command(service: client.Service, args: dict, if handle_message(item): continue updated_notable = parse_notable(item, to_dict=True) - if updated_notable.get('owner'): demisto.debug("owner field was found, changing according to mapping.") updated_notable["owner"] = mapper.get_xsoar_user_by_splunk( updated_notable.get("owner")) if mapper.should_map else updated_notable.get("owner") - - demisto.debug(f'notable {notable_id} data: {updated_notable}') if close_incident and updated_notable.get('status_label'): status_label = updated_notable['status_label'] if status_label == "Closed" or (status_label in close_extra_labels) \ or (close_end_statuses and argToBoolean(updated_notable.get('status_end', 'false'))): demisto.info(f'Closing incident related to notable {notable_id} with status_label: {status_label}') - entries = [{ + entries.append({ 'Type': EntryType.NOTE, 'Contents': { 'dbotIncidentClose': True, 'closeReason': f'Notable event was closed on Splunk with status \"{status_label}\".' }, 'ContentsFormat': EntryFormat.JSON - }] + }) else: demisto.debug('"status_label" key could not be found on the returned data, ' f'skipping closure mirror for notable {notable_id}.') - + if updated_notable.get('comment'): + comment_entries = [] + comments = argToList(updated_notable.get('comment')) + for comment in comments: + comment_entries.append({ + 'Comment': comment, + }) + new_notes = get_comments_data(service, notable_id, comment_tag_from_splunk, last_update_splunk_timestamp) + demisto.debug(f"new_notes: {new_notes}") + entries.extend(new_notes) + if comment_entries: + updated_notable['SplunkComments'] = comment_entries demisto.debug(f'Updated notable {notable_id}') return_results(GetRemoteDataResponse(mirrored_object=updated_notable, entries=entries)) @@ -1328,7 +1395,7 @@ def get_modified_remote_data_command(service: client.Service, args): return_results(GetModifiedRemoteDataResponse(modified_incident_ids=modified_notable_ids)) -def update_remote_system_command(args, params, service: client.Service, auth_token, mapper): +def update_remote_system_command(args, params, service: client.Service, auth_token, mapper, comment_tag_to_splunk): """ Pushes changes in XSOAR incident into the corresponding notable event in Splunk Server. Args: @@ -1336,6 +1403,7 @@ def update_remote_system_command(args, params, service: client.Service, auth_tok params (dict): Demisto params service (splunklib.client.Service): Splunk service object auth_token (str) - The authentication token to use + comment_tag_to_splunk (str) - tag of comment from xsaor Returns: notable_id (str): The notable id @@ -1343,7 +1411,9 @@ def update_remote_system_command(args, params, service: client.Service, auth_tok parsed_args = UpdateRemoteSystemArgs(args) delta = parsed_args.delta notable_id = parsed_args.remote_incident_id - + entries = parsed_args.entries + base_url = 'https://' + params['host'] + ':' + params['port'] + '/' + demisto.debug(f"mirroring args: entries:{parsed_args.entries} delta:{parsed_args.delta}") if parsed_args.incident_changed and delta: demisto.debug( f'Got the following delta keys {list(delta.keys())} to update incident corresponding to notable {notable_id}' @@ -1365,7 +1435,6 @@ def update_remote_system_command(args, params, service: client.Service, auth_tok if any(changed_data.values()): demisto.debug(f'Sending update request to Splunk for notable {notable_id}, data: {changed_data}') - base_url = 'https://' + params['host'] + ':' + params['port'] + '/' try: session_key = None if auth_token else get_auth_session_key(service) response_info = update_notable_events( @@ -1390,6 +1459,26 @@ def update_remote_system_command(args, params, service: client.Service, auth_tok else: demisto.debug(f'Incident corresponding to notable {notable_id} was not changed.') + if entries: + for entry in entries: + entry_tags = entry.get('tags', []) + demisto.debug(f'Got the entry tags: {entry_tags}') + if comment_tag_to_splunk in entry_tags: + demisto.debug('Add new comment') + comment_body = f'{entry.get("contents", "")}\n {COMMENT_MIRRORED_FROM_XSOAR}' + try: + session_key = get_auth_session_key(service) if not auth_token else None + response_info = update_notable_events( + baseurl=base_url, comment=comment_body, auth_token=auth_token, sessionKey=session_key, + eventIDs=[notable_id]) + if 'success' not in response_info or not response_info['success']: + demisto.error(f'Failed updating notable {notable_id}: {str(response_info)}') + else: + demisto.debug('update-remote-system for notable {}: {}' + .format(notable_id, response_info.get('message'))) + except Exception as e: + demisto.error('Error in Splunk outgoing mirror for incident corresponding to notable {}. ' + 'Error message: {}'.format(notable_id, str(e))) return notable_id @@ -1414,7 +1503,8 @@ def create_mapping_dict(total_parsed_results, type_field): return types_map -def get_mapping_fields_command(service: client.Service, mapper, params: dict): +def get_mapping_fields_command(service: client.Service, mapper, params: dict, comment_tag_to_splunk: str, + comment_tag_from_splunk: str): # Create the query to get unique objects # The logic is identical to the 'fetch_incidents' command type_field = params.get('type_field', 'source') @@ -1459,7 +1549,7 @@ def get_mapping_fields_command(service: client.Service, mapper, params: dict): for item in reader: if isinstance(item, dict): notable = Notable(data=item) - total_parsed_results.append(notable.to_incident(mapper)) + total_parsed_results.append(notable.to_incident(mapper, comment_tag_to_splunk, comment_tag_from_splunk)) elif handle_message(item): continue @@ -2708,6 +2798,12 @@ def main(): # pragma: no cover if proxy: handle_proxy() + comment_tag_to_splunk = params.get('comment_tag_to_splunk', 'FROM XSOAR') + comment_tag_from_splunk = params.get('comment_tag_from_splunk', 'FROM SPLUNK') + if comment_tag_to_splunk == comment_tag_from_splunk: + raise DemistoException('Comment Tag to Splunk and Comment Tag ' + 'from Splunk cannot have the same value.') + connection_args['handler'] = requests_handler if (service := client.connect(**connection_args)) is None: @@ -2732,7 +2828,7 @@ def main(): # pragma: no cover splunk_get_indexes_command(service) elif command == 'fetch-incidents': demisto.info('########### FETCH #############') - fetch_incidents(service, mapper) + fetch_incidents(service, mapper, comment_tag_to_splunk, comment_tag_from_splunk) elif command == 'splunk-submit-event': splunk_submit_event_command(service, args) elif command == 'splunk-notable-event-edit': @@ -2771,19 +2867,20 @@ def main(): # pragma: no cover if argToBoolean(params.get('use_cim', False)): return_results(get_cim_mapping_field_command()) else: - return_results(get_mapping_fields_command(service, mapper, params)) + return_results(get_mapping_fields_command(service, mapper, params, comment_tag_to_splunk, comment_tag_from_splunk)) elif command == 'get-remote-data': demisto.info('########### MIRROR IN #############') get_remote_data_command(service=service, args=args, close_incident=params.get('close_incident'), close_end_statuses=params.get('close_end_status_statuses'), close_extra_labels=argToList(params.get('close_extra_labels', '')), - mapper=mapper) + mapper=mapper, + comment_tag_from_splunk=comment_tag_from_splunk) elif command == 'get-modified-remote-data': get_modified_remote_data_command(service, args) elif command == 'update-remote-system': demisto.info('########### MIRROR OUT #############') - update_remote_system_command(args, params, service, auth_token, mapper) + return_results(update_remote_system_command(args, params, service, auth_token, mapper, comment_tag_to_splunk)) elif command == 'splunk-get-username-by-xsoar-user': return_results(mapper.get_splunk_user_by_xsoar_command(args)) else: diff --git a/Packs/SplunkPy/Integrations/SplunkPy/SplunkPy.yml b/Packs/SplunkPy/Integrations/SplunkPy/SplunkPy.yml index db7dd958d3cc..2c0689e96a33 100644 --- a/Packs/SplunkPy/Integrations/SplunkPy/SplunkPy.yml +++ b/Packs/SplunkPy/Integrations/SplunkPy/SplunkPy.yml @@ -272,6 +272,18 @@ configuration: section: Collect advanced: true required: false +- display: Comment tag from Splunk + name: comment_tag_from_splunk + defaultvalue: FROM SPLUNK + type: 0 + required: false + additionalinfo: Add this tag to an entry to mirror it as a comment from Splunk. +- display: Comment tag to Splunk + name: comment_tag_to_splunk + defaultvalue: FROM XSOAR + type: 0 + required: false + additionalinfo: Add this tag to an entry to mirror it as a comment to Splunk. description: Runs queries on Splunk servers. display: SplunkPy name: SplunkPy diff --git a/Packs/SplunkPy/Integrations/SplunkPy/SplunkPy_test.py b/Packs/SplunkPy/Integrations/SplunkPy/SplunkPy_test.py index ec129493f8c2..2bba00e106ff 100644 --- a/Packs/SplunkPy/Integrations/SplunkPy/SplunkPy_test.py +++ b/Packs/SplunkPy/Integrations/SplunkPy/SplunkPy_test.py @@ -584,7 +584,7 @@ def test_fetch_incidents(mocker): service = mocker.patch('splunklib.client.connect', return_value=None) mocker.patch('splunklib.results.JSONResultsReader', return_value=SAMPLE_RESPONSE) mapper = UserMappingObject(service, False) - splunk.fetch_incidents(service, mapper) + splunk.fetch_incidents(service, mapper, 'from_xsoar', 'from_splunk') incidents = demisto.incidents.call_args[0][0] assert demisto.incidents.call_count == 1 assert len(incidents) == 1 @@ -648,11 +648,12 @@ def test_fetch_notables(mocker): service = Service('DONE') mocker.patch('splunklib.results.JSONResultsReader', return_value=SAMPLE_RESPONSE) mapper = splunk.UserMappingObject(service, False) - splunk.fetch_incidents(service, mapper=mapper) + splunk.fetch_incidents(service, mapper=mapper, comment_tag_to_splunk='comment_tag_to_splunk', + comment_tag_from_splunk='comment_tag_from_splunk') cache_object = splunk.Cache.load_from_integration_context(get_integration_context()) assert cache_object.submitted_notables notable = cache_object.submitted_notables[0] - incident_from_cache = notable.to_incident(mapper) + incident_from_cache = notable.to_incident(mapper, 'comment_tag_to_splunk', 'comment_tag_from_splunk') incidents = demisto.incidents.call_args[0][0] assert demisto.incidents.call_count == 1 assert len(incidents) == 0 @@ -661,7 +662,8 @@ def test_fetch_notables(mocker): assert not incident_from_cache.get('owner') # now call second time to make sure that the incident fetched - splunk.fetch_incidents(service, mapper=mapper) + splunk.fetch_incidents(service, mapper=mapper, comment_tag_to_splunk='comment_tag_to_splunk', + comment_tag_from_splunk='comment_tag_from_splunk') incidents = demisto.incidents.call_args[0][0] assert len(incidents) == 1 assert incidents[0]["name"] == "Endpoint - Recurring Malware Infection - Rule : Endpoint - " \ @@ -1072,7 +1074,8 @@ def __init__(self): mocker.patch('SplunkPy.results.JSONResultsReader', return_value=notable_data) mocker.patch.object(demisto, 'results') service = Service() - splunk.get_remote_data_command(service, args, mapper=splunk.UserMappingObject(service, False), **func_call_kwargs) + splunk.get_remote_data_command(service, args, mapper=splunk.UserMappingObject(service, False), + comment_tag_from_splunk='comment_tag_from_splunk', **func_call_kwargs) results = demisto.results.call_args[0][0] expected_results = [notable_data[1]] @@ -1120,11 +1123,63 @@ def __init__(self): ) mocker.patch("SplunkPy.isinstance", return_value=True) - splunk.get_remote_data_command(Service(), **func_call_kwargs) + splunk.get_remote_data_command(Service(), comment_tag_from_splunk='from_splunk', **func_call_kwargs) (info_message,) = info_mock.call_args_list[0][0] assert info_message == "Splunk-SDK message: test message" +@pytest.mark.parametrize("notable_data, func_call_kwargs, expected_closure_data", + [({'status_label': 'New', 'event_id': 'id', 'status_end': 'false', + 'comment': 'new comment from splunk', 'reviewer': 'admin', + 'review_time': '1612881691.589575'}, + {'close_incident': True, 'close_end_statuses': False, 'close_extra_labels': []}, + None, + )]) +def test_get_remote_data_command_add_comment(mocker, notable_data: dict, + func_call_kwargs: dict, expected_closure_data: dict): + """ + Test case for get_remote_data_command with comment addition. + Given: + - notable data with new comment + When: + new comment added in splunk + Then: + - ensure the comment added as a new note + - ensure the event was updated + + """ + class Jobs: + def oneshot(self, _, output_mode: str): + assert output_mode == splunk.OUTPUT_MODE_JSON + return notable_data + + class Service: + def __init__(self): + self.jobs = Jobs() + + args = {'lastUpdate': '2021-02-09T16:41:30.589575+02:00', 'id': 'id'} + mocker.patch.object(demisto, 'params', return_value={'timezone': '0'}) + mocker.patch.object(demisto, 'debug') + mocker.patch.object(demisto, 'info') + mocker.patch('SplunkPy.results.JSONResultsReader', return_value=[notable_data]) + mocker.patch.object(demisto, 'results') + service = Service() + + expected_comment_note = {'Type': 1, 'Contents': 'new comment from splunk', + 'ContentsFormat': 'text', 'Tags': ['from_splunk'], 'Note': True} + splunk.get_remote_data_command(service, args, mapper=splunk.UserMappingObject(service, False), + comment_tag_from_splunk='from_splunk', **func_call_kwargs) + results = demisto.results.call_args[0][0][0] + notable_data.update({'SplunkComments': [{'Comment': 'new comment from splunk'}]}) + note_results = demisto.results.call_args[0][0][1] + + expected_results = [notable_data][0] + + assert demisto.results.call_count == 1 + assert results == expected_results + assert note_results == expected_comment_note + + def test_get_modified_remote_data_command(mocker): updated_incidet_review = {'rule_id': 'id'} @@ -1183,12 +1238,12 @@ def test_edit_notable_event__failed_to_update(mocker, requests_mock): @pytest.mark.parametrize('args, params, call_count, success', [ ({'delta': {'status': '2'}, 'remoteId': '12345', 'status': 2, 'incidentChanged': True}, - {'host': 'ec.com', 'port': '8089', 'authentication': {'identifier': 'i', 'password': 'p'}}, 3, True), + {'host': 'ec.com', 'port': '8089', 'authentication': {'identifier': 'i', 'password': 'p'}}, 4, True), ({'delta': {'status': '2'}, 'remoteId': '12345', 'status': 2, 'incidentChanged': True}, - {'host': 'ec.com', 'port': '8089', 'authentication': {'identifier': 'i', 'password': 'p'}}, 2, False), + {'host': 'ec.com', 'port': '8089', 'authentication': {'identifier': 'i', 'password': 'p'}}, 3, False), ({'delta': {'status': '2'}, 'remoteId': '12345', 'status': 2, 'incidentChanged': True}, {'host': 'ec.com', 'port': '8089', 'authentication': {'identifier': 'i', 'password': 'p'}, 'close_notable': True}, - 4, True) + 5, True) ]) def test_update_remote_system(args, params, call_count, success, mocker, requests_mock): @@ -1212,7 +1267,8 @@ def __init__(self): mocker.patch.object(demisto, 'error') service = Service() mapper = splunk.UserMappingObject(service, False) - assert splunk.update_remote_system_command(args, params, service, None, mapper=mapper) == args['remoteId'] + assert splunk.update_remote_system_command(args, params, service, None, mapper=mapper, + comment_tag_to_splunk='comment_tag_to_splunk') == args['remoteId'] assert demisto.debug.call_count == call_count if not success: assert demisto.error.call_count == 1 @@ -1648,7 +1704,8 @@ def test_labels_with_non_str_values(mocker): # run service = mocker.patch('splunklib.client.connect', return_value=None) mapper = UserMappingObject(service, False) - splunk.fetch_incidents(service, mapper) + splunk.fetch_incidents(service, mapper, comment_tag_to_splunk='comment_tag_to_splunk', + comment_tag_from_splunk='comment_tag_from_splunk') incidents = demisto.incidents.call_args[0][0] # validate diff --git a/Packs/SplunkPy/Layouts/layoutscontainer-Splunk_Notable_Generic.json b/Packs/SplunkPy/Layouts/layoutscontainer-Splunk_Notable_Generic.json index 47f6e7618820..8fd9ec99ab63 100644 --- a/Packs/SplunkPy/Layouts/layoutscontainer-Splunk_Notable_Generic.json +++ b/Packs/SplunkPy/Layouts/layoutscontainer-Splunk_Notable_Generic.json @@ -308,6 +308,60 @@ "w": 1, "x": 1, "y": 2 + }, + { + "displayType": "ROW", + "h": 2, + "hideName": false, + "i": "splunk-comments-field", + "items": [], + "maxH": null, + "maxW": 3, + "minH": 1, + "moved": false, + "name": "Splunk Comments", + "query": "SplunkConvertCommentsToTable", + "queryType": "script", + "static": false, + "type": "dynamic", + "w": 2, + "x": 0, + "y": 8 + }, + { + "displayType": "ROW", + "h": 1, + "hideName": false, + "i": "splunk-add-comment-botton", + "items": [ + { + "args": { + "tags": { + "simple": "", + "userMarkedRequired": true + } + }, + "buttonClass": "warning", + "endCol": 2, + "fieldId": "", + "height": 22, + "id": "button", + "index": 0, + "name": "Press to add comment to Splunk", + "scriptId": "SplunkAddComment", + "sectionItemType": "button", + "startCol": 0 + } + ], + "maxH": null, + "maxW": 3, + "minH": 1, + "moved": false, + "name": "Add Comment", + "static": false, + "w": 1, + "x": 2, + "y": 8 } ], "type": "custom" diff --git a/Packs/SplunkPy/ReleaseNotes/3_1_4.md b/Packs/SplunkPy/ReleaseNotes/3_1_4.md new file mode 100644 index 000000000000..4b44f4f259f6 --- /dev/null +++ b/Packs/SplunkPy/ReleaseNotes/3_1_4.md @@ -0,0 +1,34 @@ + +#### Incident Fields + +- New: **Splunk Comments** + +#### Integrations + +##### SplunkPy + +- Added a *comments* to the mirror in/out functionality. + +#### Layouts + +##### Splunk Notable Generic + +- Added a table to show the comments of the Splunk notable. +- Added a button that enables the user to create a note and tag it, without the need of going to the War Room to tag it. + +#### Mappers + +##### Splunk - Notable Generic Incoming Mapper + +- Added the following new fields to the integration incoming mapper: +- **dbotMirrorTags** +- **Splunk Comments** + +#### Scripts + +##### New: SplunkAddComment + +- New: Use this script to add a comment with a tag (the "Comment tag to Splunk" defined in the instance configuration) as an entry in Cortex XSOAR, which will then be mirrored as a comment to a Splunk issue. This script should be run within an incident. (Available from Cortex XSOAR 6.0.0). +##### New: SplunkConvertCommentsToTable + +- New: This script is used to convert Splunk comments to a table. (Available from Cortex XSOAR 6.0.0). diff --git a/Packs/SplunkPy/Scripts/SplunkAddComment/README.md b/Packs/SplunkPy/Scripts/SplunkAddComment/README.md new file mode 100644 index 000000000000..0a0c5ecb740a --- /dev/null +++ b/Packs/SplunkPy/Scripts/SplunkAddComment/README.md @@ -0,0 +1,24 @@ +Use this script to add a comment with a tag (the "Comment tag to Splunk" defined in the instance configuration) as an entry in Cortex XSOAR, which will then be mirrored as a comment to a Splunk issue. This script should be run within an incident. + +## Script Data + +--- + +| **Name** | **Description** | +| --- | --- | +| Script Type | python3 | +| Cortex XSOAR Version | 6.0.0 | + +## Inputs + +--- + +| **Argument Name** | **Description** | +| --- | --- | +| comment | Comment to be added to the Splunk issue. | +| tag | The comment tag. Use the comment entry tag \(defined in your instance configuration\) to mirror the comment to splunk. | + +## Outputs + +--- +There are no outputs for this script. diff --git a/Packs/SplunkPy/Scripts/SplunkAddComment/SplunkAddComment.py b/Packs/SplunkPy/Scripts/SplunkAddComment/SplunkAddComment.py new file mode 100644 index 000000000000..c8edef5b92dc --- /dev/null +++ b/Packs/SplunkPy/Scripts/SplunkAddComment/SplunkAddComment.py @@ -0,0 +1,26 @@ +import demistomock as demisto # noqa: F401 +from CommonServerPython import * # noqa: F401 + + +def add_comment(args: Dict[str, Any]) -> CommandResults: + demisto.debug("adding comment") + tags = argToList(args.get('tags', 'FROM XSOAR')) + comment_body = args.get('comment', '') + + return CommandResults( + readable_output=comment_body, mark_as_note=True, tags=tags + ) + + +def main(): # pragma: no cover + try: + demisto.debug('SplunkAddComment is being called') + res = add_comment(demisto.args()) + return_results(res) + + except Exception as ex: + return_error(f'Failed to execute SplunkAddComment. Error: {str(ex)}') + + +if __name__ in ["__builtin__", "builtins", '__main__']: + main() diff --git a/Packs/SplunkPy/Scripts/SplunkAddComment/SplunkAddComment.yml b/Packs/SplunkPy/Scripts/SplunkAddComment/SplunkAddComment.yml new file mode 100644 index 000000000000..ef7b5a32c8b9 --- /dev/null +++ b/Packs/SplunkPy/Scripts/SplunkAddComment/SplunkAddComment.yml @@ -0,0 +1,28 @@ +args: +- description: Comment to be added to the Splunk notable. + name: comment + required: true +- auto: PREDEFINED + defaultValue: FROM XSOAR + description: The comment tag. Use the comment entry tag (defined in your instance configuration) to mirror the comment to Splunk. + name: tags + predefined: + - FROM XSOAR + required: false + isArray: true +comment: Use this script to add a comment with a tag (the "Comment tag to Splunk" defined in the instance configuration) as an entry in Cortex XSOAR, which will then be mirrored as a comment to a Splunk issue. This script should be run within an incident. +commonfields: + id: SplunkAddComment + version: -1 +name: SplunkAddComment +script: "" +type: python +tags: [] +enabled: true +scripttarget: 0 +subtype: python3 +runonce: false +dockerimage: demisto/python3:3.10.12.68714 +fromversion: 6.0.0 +tests: +- No tests (auto formatted) diff --git a/Packs/SplunkPy/Scripts/SplunkAddComment/SplunkAddComment_test.py b/Packs/SplunkPy/Scripts/SplunkAddComment/SplunkAddComment_test.py new file mode 100644 index 000000000000..c25221d23c30 --- /dev/null +++ b/Packs/SplunkPy/Scripts/SplunkAddComment/SplunkAddComment_test.py @@ -0,0 +1,10 @@ +def test_add_comment_as_note(): + """Test if the correct arguments are given to the CommandResults object when + adding a comment as a note. + """ + from SplunkAddComment import add_comment + result = add_comment({'comment': 'New comment', 'tags': 'comment tag to splunk'}) + + assert result.readable_output == 'New comment' + assert result.tags == ['comment tag to splunk'] + assert result.mark_as_note diff --git a/Packs/SplunkPy/Scripts/SplunkConvertCommentsToTable/README.md b/Packs/SplunkPy/Scripts/SplunkConvertCommentsToTable/README.md new file mode 100644 index 000000000000..1f92e7ebdd17 --- /dev/null +++ b/Packs/SplunkPy/Scripts/SplunkConvertCommentsToTable/README.md @@ -0,0 +1,21 @@ +This script is used to convert Splunk comments to a table. + +## Script Data + +--- + +| **Name** | **Description** | +| --- | --- | +| Script Type | python3 | +| Tags | dynamic-section | +| Cortex XSOAR Version | 6.0.0 | + +## Inputs + +--- +There are no inputs for this script. + +## Outputs + +--- +There are no outputs for this script. diff --git a/Packs/SplunkPy/Scripts/SplunkConvertCommentsToTable/SplunkConvertCommentsToTable.py b/Packs/SplunkPy/Scripts/SplunkConvertCommentsToTable/SplunkConvertCommentsToTable.py new file mode 100644 index 000000000000..62cf416238a6 --- /dev/null +++ b/Packs/SplunkPy/Scripts/SplunkConvertCommentsToTable/SplunkConvertCommentsToTable.py @@ -0,0 +1,30 @@ +import demistomock as demisto # noqa: F401 +from CommonServerPython import * # noqa: F401 + + +def main(): + incident = demisto.incident() + splunkComments = [] + if not incident: + raise ValueError("Error - demisto.incident() expected to return current incident " + "from context but returned None") + fields = incident.get('CustomFields', []) + if fields: + splunkComments_str = fields.get('splunkcomments', []) + for data in splunkComments_str: + parsed_data = json.loads(data) + splunkComments.append(parsed_data) + if not splunkComments: + return CommandResults(readable_output='No comments were found in the notable') + + markdown = tableToMarkdown("", splunkComments, headers=['Comment']) + return CommandResults( + readable_output=markdown + ) + + +if __name__ in ('__main__', '__builtin__', 'builtins'): + try: + return_results(main()) + except Exception as e: + return_error(f'Got an error while parsing Splunk events: {e}', error=e) diff --git a/Packs/SplunkPy/Scripts/SplunkConvertCommentsToTable/SplunkConvertCommentsToTable.yml b/Packs/SplunkPy/Scripts/SplunkConvertCommentsToTable/SplunkConvertCommentsToTable.yml new file mode 100644 index 000000000000..0dbc3850e74c --- /dev/null +++ b/Packs/SplunkPy/Scripts/SplunkConvertCommentsToTable/SplunkConvertCommentsToTable.yml @@ -0,0 +1,18 @@ +commonfields: + id: SplunkConvertCommentsToTable + version: -1 +comment: This script is used to convert Splunk comments to a table. +name: SplunkConvertCommentsToTable +outputs: [] +script: '-' +system: false +tags: +- dynamic-section +timeout: '0' +type: python +subtype: python3 +runas: DBotWeakRole +dockerimage: demisto/python3:3.10.12.68714 +fromversion: 6.0.0 +tests: +- No tests (auto formatted) diff --git a/Packs/SplunkPy/Scripts/SplunkConvertCommentsToTable/SplunkConvertCommentsToTable_test.py b/Packs/SplunkPy/Scripts/SplunkConvertCommentsToTable/SplunkConvertCommentsToTable_test.py new file mode 100644 index 000000000000..7277941907be --- /dev/null +++ b/Packs/SplunkPy/Scripts/SplunkConvertCommentsToTable/SplunkConvertCommentsToTable_test.py @@ -0,0 +1,22 @@ +import SplunkConvertCommentsToTable + +EXPECTED_TABLE = ('|Comment|\n' + '|---|\n' + '| new comment |\n') + + +def test_convert_to_table(mocker): + """ + Given: + - A list of comments of a Jira issue in string format + When: + - Calling convert_to_table function + Then: + - Validate the table is created correctly + """ + incident = {'CustomFields': {'splunkcomments': [ + '{"Comment":"new comment"}']}} + mocker.patch('demistomock.incident', return_value=incident) + result = SplunkConvertCommentsToTable.main() + + assert result.readable_output == EXPECTED_TABLE diff --git a/Packs/SplunkPy/pack_metadata.json b/Packs/SplunkPy/pack_metadata.json index 289cf84ff9ab..1f23e3bef7d3 100644 --- a/Packs/SplunkPy/pack_metadata.json +++ b/Packs/SplunkPy/pack_metadata.json @@ -2,7 +2,7 @@ "name": "Splunk", "description": "Run queries on Splunk servers.", "support": "xsoar", - "currentVersion": "3.1.3", + "currentVersion": "3.1.4", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "",