From ffc68b92ae6a9639c893b8f5d541d2611e4bb76c Mon Sep 17 00:00:00 2001 From: Andrew Shamah <42912128+amshamah419@users.noreply.github.com> Date: Thu, 7 Sep 2023 14:48:54 +0300 Subject: [PATCH] Workday Sign on Event Collector (#28832) --- Packs/Workday/.pack-ignore | 8 + .../WorkdaySignOnEventCollector/README.md | 49 ++ .../WorkdaySignOnEventCollector.py | 571 ++++++++++++++++ .../WorkdaySignOnEventCollector.yml | 110 +++ ...WorkdaySignOnEventCollector_description.md | 7 + .../WorkdaySignOnEventCollector_image.png | Bin 0 -> 4818 bytes .../WorkdaySignOnEventCollector_test.py | 629 ++++++++++++++++++ .../command_examples | 1 + .../test_data/example_event.json | 39 ++ .../WorkdaySignonEventGenerator/README.md | 15 + .../WorkdaySignonEventGenerator.py | 189 ++++++ .../WorkdaySignonEventGenerator.yml | 31 + ...WorkdaySignonEventGenerator_description.md | 5 + .../WorkdaySignonEventGenerator_image.png | Bin 0 -> 4818 bytes .../WorkdaySignonEventGenerator_test.py | 133 ++++ .../WorkdayEventCollector.xif | 62 +- .../WorkdayEventCollector.yml | 4 +- .../WorkdayEventCollector_schema.json | 136 +++- Packs/Workday/README.md | 14 +- Packs/Workday/ReleaseNotes/1_4_0.md | 17 + Packs/Workday/pack_metadata.json | 2 +- 21 files changed, 2015 insertions(+), 7 deletions(-) create mode 100644 Packs/Workday/Integrations/WorkdaySignOnEventCollector/README.md create mode 100644 Packs/Workday/Integrations/WorkdaySignOnEventCollector/WorkdaySignOnEventCollector.py create mode 100644 Packs/Workday/Integrations/WorkdaySignOnEventCollector/WorkdaySignOnEventCollector.yml create mode 100644 Packs/Workday/Integrations/WorkdaySignOnEventCollector/WorkdaySignOnEventCollector_description.md create mode 100644 Packs/Workday/Integrations/WorkdaySignOnEventCollector/WorkdaySignOnEventCollector_image.png create mode 100644 Packs/Workday/Integrations/WorkdaySignOnEventCollector/WorkdaySignOnEventCollector_test.py create mode 100644 Packs/Workday/Integrations/WorkdaySignOnEventCollector/command_examples create mode 100644 Packs/Workday/Integrations/WorkdaySignOnEventCollector/test_data/example_event.json create mode 100644 Packs/Workday/Integrations/WorkdaySignonEventGenerator/README.md create mode 100644 Packs/Workday/Integrations/WorkdaySignonEventGenerator/WorkdaySignonEventGenerator.py create mode 100644 Packs/Workday/Integrations/WorkdaySignonEventGenerator/WorkdaySignonEventGenerator.yml create mode 100644 Packs/Workday/Integrations/WorkdaySignonEventGenerator/WorkdaySignonEventGenerator_description.md create mode 100644 Packs/Workday/Integrations/WorkdaySignonEventGenerator/WorkdaySignonEventGenerator_image.png create mode 100644 Packs/Workday/Integrations/WorkdaySignonEventGenerator/WorkdaySignonEventGenerator_test.py create mode 100644 Packs/Workday/ReleaseNotes/1_4_0.md diff --git a/Packs/Workday/.pack-ignore b/Packs/Workday/.pack-ignore index 0534618aee2f..963b5054f29c 100644 --- a/Packs/Workday/.pack-ignore +++ b/Packs/Workday/.pack-ignore @@ -7,6 +7,12 @@ ignore=IM111 [file:WorkdayEventCollector_image.png] ignore=IM111 +[file:WorkdaySignOnEventCollector_image.png] +ignore=IM111 + +[file:WorkdaySignonEventGenerator_image.png] +ignore=IM111 + [file:WorkdayIAMEventsGenerator_image.png] ignore=IM111 @@ -19,3 +25,5 @@ ignore=BA124 [file:WorkdayEventCollector.yml] ignore=MR108 +[known_words] +signon \ No newline at end of file diff --git a/Packs/Workday/Integrations/WorkdaySignOnEventCollector/README.md b/Packs/Workday/Integrations/WorkdaySignOnEventCollector/README.md new file mode 100644 index 000000000000..1d23d169cb0b --- /dev/null +++ b/Packs/Workday/Integrations/WorkdaySignOnEventCollector/README.md @@ -0,0 +1,49 @@ +Use the Workday Sign On Event Collector integration to get sign on logs from Workday. +This integration was integrated and tested with version v37.0 of Workday Sign On Event Collector. + +## Configure Workday Sign On Event Collector on Cortex XSOAR + +1. Navigate to **Settings** > **Integrations** > **Servers & Services**. +2. Search for Workday Sign On Event Collector. +3. Click **Add instance** to create and configure a new integration instance. + + | **Parameter** | **Description** | **Required** | +---------------------------------------------------| --- | --- | --- | + | Server URL (e.g., https://services1.myworkday.com) | API Endpoint of Workday server. Can be obtained from View API Clients report in Workday application. | True | + | Tenant Name | The name of the Workday Tenant. Can be obtained from View API Clients report in Workday application. | True | + | Username | | True | + | Password | | True | + | Trust any certificate (not secure) | | False | + | Use system proxy settings | | False | + | Max events per fetch | The maximum number of sign on events to retrieve. Large amount of events may cause performance issues. | False | + | Events Fetch Interval | | False | + +4. Click **Test** to validate the URLs, token, and connection. + +## Commands + +You can execute these commands from the Cortex XSIAM CLI, as part of an automation, or in a playbook. +After you successfully execute a command, a DBot message appears in the War Room with the command details. + +### workday-get-sign-on-events + +*** +Returns sign on events extracted from Workday. This command is used for developing/debugging and is to be used with caution, as it can create events, leading to events duplication and exceeding the API request limitation. + +#### Base Command + +`workday-get-sign-on-events` + +#### Input + +| **Argument Name** | **Description** | **Required** | +|--------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------------| +| should_push_events | Set this argument to True in order to create events, otherwise the command will only display them. Possible values are: True, False. Default is False. | Required | +| limit | The maximum number of events to return. Default is 1000. | Optional | +| from_date | The date and time of the earliest event. The default timezone is UTC/GMT. The time format is "{yyyy}-{mm}-{dd}T{hh}:{mm}:{ss}Z". Example: "2021-05-18T13:45:14Z" indicates May 18, 2021, 1:45PM UTC. | Optional | +| to_date | The time format is "{yyyy}-{mm}-{dd}T{hh}:{mm}:{ss}Z". Example: "2021-05-18T13:45:14Z" indicates May 18, 2021, 1:45PM UTC. | Optional | +| relative_from_date | The query from date, for example, "5 minutes". Be advised, it is strongly suggested to keep this parameter limited in time. | Optional | + +#### Context Output + +There is no context output for this command. diff --git a/Packs/Workday/Integrations/WorkdaySignOnEventCollector/WorkdaySignOnEventCollector.py b/Packs/Workday/Integrations/WorkdaySignOnEventCollector/WorkdaySignOnEventCollector.py new file mode 100644 index 000000000000..ed7b00b7f19c --- /dev/null +++ b/Packs/Workday/Integrations/WorkdaySignOnEventCollector/WorkdaySignOnEventCollector.py @@ -0,0 +1,571 @@ + +import demistomock as demisto +from CommonServerPython import * # noqa # pylint: disable=unused-wildcard-import + +import urllib3 + +# Disable insecure warnings +urllib3.disable_warnings() + +VENDOR = "workday" +PRODUCT = "signon" +API_VERSION = "v40.0" +REQUEST_DATE_FORMAT = "%Y-%m-%dT%H:%M:%SZ" # Old format for making requests +EVENT_DATE_FORMAT = "%Y-%m-%dT%H:%M:%S.%f%z" # New format for processing events +TIMEDELTA = 1 + + +def get_from_time(seconds_ago: int) -> str: + current_time = datetime.now(tz=timezone.utc) + from_time = current_time - timedelta(seconds=seconds_ago) + return from_time.strftime(REQUEST_DATE_FORMAT) + + +def fletcher16(data: bytes) -> int: + """ + Compute the Fletcher-16 checksum for the given data. + + The Fletcher-16 checksum is a simple and fast checksum algorithm that provides + a checksum value based on the input data. It's not as collision-resistant as + cryptographic hashes but is faster and can be suitable for non-security-critical + applications. + + Parameters: + - data (bytes): The input data for which the checksum is to be computed. + + Returns: + - int: The computed Fletcher-16 checksum value. + """ + sum1, sum2 = 0, 0 + for byte in data: + sum1 = (sum1 + byte) % 256 + sum2 = (sum2 + sum1) % 256 + return (sum2 << 8) | sum1 + + +def generate_pseudo_id(event: dict) -> str: + """ + Compute a checksum for the given event using the Fletcher-16 algorithm. + + This function takes the entire event, serializes it to a JSON string, + converts that string to bytes, and then computes a Fletcher-16 checksum + for the byte data. + + Parameters: + - event (dict): The entire event dictionary. + + Returns: + - str: The unique ID, which is the computed Fletcher-16 checksum value concatenated with the event's Signon_DateTime. + """ + # Serialize the entire event to a JSON string and encode that to bytes + event_str = json.dumps(event, sort_keys=True) + data = event_str.encode() + + # Calculate the checksum + checksum = fletcher16(data) + + # Create a unique ID by concatenating the checksum with the Signon_DateTime + try: + unique_id = f"{checksum}_{event['Signon_DateTime']}" + except KeyError as e: + raise DemistoException(f"While calculating the pseudo ID for an event, an event without a Signon_DateTime was " + f"found.\nError: {e}") + + return unique_id + + +""" CLIENT CLASS """ + + +class Client(BaseClient): + """ + Client will implement the service API, and should not contain any Demisto logic. + Should only do requests and return data. + """ + + def __init__( + self, + base_url: str, + verify_certificate: bool, + proxy: bool, + tenant_name: str, + username: str, + password: str, + ): + headers = {"content-type": "text/xml;charset=UTF-8"} + + super().__init__( + base_url=base_url, verify=verify_certificate, proxy=proxy, headers=headers + ) + self.tenant_name = tenant_name + self.username = username + self.password = password + + def generate_workday_account_signons_body( + self, + page: int, + count: int, + to_time: Optional[str] = None, + from_time: Optional[str] = None, + ) -> str: + """ + Generates XML body for Workday Account Signons Request. + + :type page: ``int`` + :param page: Page number. + + :type count: ``int`` + :param count: Number of results per page. + + :type to_time: ``Optional[str]`` + :param to_time: End time for fetching events. + + :type from_time: ``Optional[str]`` + :param from_time: Start time for fetching events. + + :return: XML body as string. + :rtype: ``str`` + """ + + return f""" + + + + + {self.username} + {self.password} + + + + + + + + + {from_time} + + {to_time} + + + + {page} + + {count} + {from_time} + + + + + + """ # noqa:E501 + + def generate_test_payload(self, from_time: str, to_time: str) -> str: + return f""" + + + + + {self.username} + {self.password} + + + + + + + + + {from_time} + + {to_time} + + + + 1 + + 1 + + + + + """ # noqa:E501 + + def retrieve_events( + self, + page: int, + count: int, + to_time: Optional[str] = None, + from_time: Optional[str] = None, + ) -> tuple: + """ + Retrieves events from Workday. + + :type page: ``int`` + :param page: Page number. + + :type count: ``int`` + :param count: Number of results per page. + + :type to_time: ``Optional[str]`` + :param to_time: End time for fetching events. + + :type from_time: ``Optional[str]`` + :param from_time: Start time for fetching events. + + :return: Tuple containing raw JSON response and account sign-on data. + :rtype: ``Tuple`` + """ + + # Make the HTTP request. + raw_response = self._http_request( + method="POST", + url_suffix="", + data=self.generate_workday_account_signons_body(page, count, to_time, from_time), + resp_type="text", + timeout=120 + ) + + raw_json_response, account_signon_data = convert_to_json(raw_response) + + total_pages = int(demisto.get( + obj=raw_json_response, field="Envelope.Body.Get_Workday_Account_Signons_Response.Response_Results", + defaultParam={} + ).get("Total_Pages", "1")) + + return account_signon_data, total_pages + + def test_connectivity(self) -> str: + """ + Tests API connectivity and authentication. + + :return: 'ok' if test passed, else exception. + :rtype: ``str`` + """ + seconds_ago = 5 + from_time = get_from_time(seconds_ago) + to_time = datetime.now(tz=timezone.utc).strftime(REQUEST_DATE_FORMAT) + + payload = self.generate_test_payload(from_time=from_time, to_time=to_time) + + self._http_request( + method="POST", url_suffix="", data=payload, resp_type="text", timeout=120 + ) + + return "ok" + + +""" HELPER FUNCTIONS """ + + +def convert_to_json(response: str | dict) -> tuple[Dict[str, Any], Dict[str, Any]]: + """ + Convert an XML response to a JSON object and extract the 'Workday_Account_Signons' data. + + :param response: XML response to be converted + :return: Tuple containing the full converted response and the extracted 'Workday_Account_Signons' data. + :raises ValueError: If the expected data cannot be found in the response. + """ + if type(response) == dict: + raw_json_response = response + else: + try: + raw_json_response = json.loads(xml2json(response)) + except Exception as e: + raise ValueError(f"Error parsing XML to JSON: {e}") + + # Get the 'Get_Workday_Account_Signons_Response' dictionary safely + response_data = demisto.get(raw_json_response, "Envelope.Body.Get_Workday_Account_Signons_Response") + + if not response_data: + response_data = raw_json_response.get( + "Get_Workday_Account_Signons_Response", {} + ) + + account_signon_data = response_data.get("Response_Data", {}) + + # Ensure 'Workday_Account_Signon' is a list + workday_account_signons = account_signon_data.get("Workday_Account_Signon") + if isinstance(workday_account_signons, dict): + account_signon_data["Workday_Account_Signon"] = [workday_account_signons] + + return raw_json_response, account_signon_data + + +def process_and_filter_events(events: list, from_time: str, previous_run_pseudo_ids: set) -> tuple: + non_duplicates = [] + duplicates = [] + pseudo_ids_for_next_iteration = set() + + try: + from_datetime = datetime.strptime(from_time, EVENT_DATE_FORMAT).replace(tzinfo=timezone.utc) + except ValueError: + # On first run, the from_time is in UTC since that is what's sent in the request, this covers this scenario + from_datetime = datetime.strptime(from_time, REQUEST_DATE_FORMAT).replace(tzinfo=timezone.utc) + most_recent_event_time = datetime.min.replace(tzinfo=timezone.utc) + + for event in events: + event_datetime = datetime.strptime(event["Signon_DateTime"], EVENT_DATE_FORMAT).replace(tzinfo=timezone.utc) + + # Add '_time' key to each event + event["_time"] = event.get("Signon_DateTime") + + # Update the most recent event time + if event_datetime > most_recent_event_time: + most_recent_event_time = event_datetime + + # Check for duplicates within ±1 second of from_time + if abs((event_datetime - from_datetime).total_seconds()) <= 1: + event_pseudo_id = generate_pseudo_id(event) + if event_pseudo_id not in previous_run_pseudo_ids: + non_duplicates.append(event) + else: + duplicates.append(event_pseudo_id) + else: + non_duplicates.append(event) + # Generate pseudo IDs for events within the last second of the most recent event + last_second_start_time = most_recent_event_time - timedelta(seconds=TIMEDELTA) + + if duplicates: + demisto.debug(f"Found {len(duplicates)} duplicate events: {duplicates}") + + for event in non_duplicates: + event_datetime = datetime.strptime(event["_time"], EVENT_DATE_FORMAT).replace(tzinfo=timezone.utc) + + if event_datetime >= last_second_start_time: + event_pseudo_id = generate_pseudo_id(event) + pseudo_ids_for_next_iteration.add(event_pseudo_id) + + return non_duplicates, pseudo_ids_for_next_iteration + + +def fetch_sign_on_logs( + client: Client, limit_to_fetch: int, from_date: str, to_date: str +): + """ + Fetches Sign On logs from workday. + Args: + client: Client object. + limit_to_fetch: limit of logs to fetch from Workday. + from_date: Events from time. + to_date: Events to time. + + Returns: + Sign On Events fetched from Workday. + """ + sign_on_logs: list = [] + page = 1 # We assume that we will need to make one call at least + total_fetched = 0 # Keep track of the total number of events fetched + res, total_pages = client.retrieve_events( + from_time=from_date, to_time=to_date, page=1, count=999 + ) + sign_on_events_from_api = res.get("Workday_Account_Signon", []) + sign_on_logs.extend(sign_on_events_from_api) + demisto.debug(f"Request indicates a total of {total_pages} pages to paginate.") + pages_remaining = total_pages - 1 + + while (page <= total_pages and pages_remaining != 0) and res: + page += 1 + # Calculate the remaining number of events to fetch + remaining_to_fetch = limit_to_fetch - total_fetched + if remaining_to_fetch <= 0: + break + res, _ = client.retrieve_events( + from_time=from_date, to_time=to_date, page=page, count=limit_to_fetch + ) + pages_remaining -= 1 + fetched_count = len(sign_on_events_from_api) + total_fetched += fetched_count + + demisto.debug(f"Fetched {len(sign_on_events_from_api)} sign on logs.") + sign_on_logs.extend(sign_on_events_from_api) + demisto.debug(f"{pages_remaining} pages left to fetch.") + return sign_on_logs + + +""" COMMAND FUNCTIONS """ + + +def get_sign_on_events_command( + client: Client, from_date: str, to_date: str, limit: int +) -> tuple[list, CommandResults]: + """ + + Args: + limit: The maximum number of logs to return. + to_date: date to fetch events from. + from_date: date to fetch events to. + client: Client object. + + Returns: + Sign on logs from Workday. + """ + + sign_on_events = fetch_sign_on_logs( + client=client, limit_to_fetch=limit, from_date=from_date, to_date=to_date + ) + + [_event.update({"_time": _event.get("Signon_DateTime")}) for _event in sign_on_events] + + demisto.info( + f"Got a total of {len(sign_on_events)} events between the time {from_date} to {to_date}" + ) + readable_output = tableToMarkdown( + "Sign On Events List:", + sign_on_events, + removeNull=True, + headerTransform=lambda x: string_to_table_header(camel_case_to_underscore(x)), + ) + + return sign_on_events, CommandResults(readable_output=readable_output) + + +def fetch_sign_on_events_command(client: Client, max_fetch: int, last_run: dict): + """ + Fetches sign on logs from Workday. + Args: + client: Client object. + max_fetch: max logs to fetch set by customer. + last_run: last run object. + + Returns: + Sign on logs from Workday. + + """ + current_time = datetime.utcnow() + if "last_fetch_time" not in last_run: + first_fetch_time = current_time - timedelta(minutes=1) + first_fetch_str = first_fetch_time.strftime(REQUEST_DATE_FORMAT) + from_date = last_run.get("last_fetch_time", first_fetch_str) + else: + from_date = last_run.get("last_fetch_time") + # Checksums in this context is used as an ID since none is provided directly from Workday. + # This is to prevent duplicates. + previous_run_pseudo_ids = last_run.get("previous_run_pseudo_ids", {}) + to_date = datetime.now(tz=timezone.utc).strftime(REQUEST_DATE_FORMAT) + demisto.debug(f"Getting Sign On Events {from_date=}, {to_date=}.") + sign_on_events = fetch_sign_on_logs( + client=client, limit_to_fetch=max_fetch, from_date=from_date, to_date=to_date + ) + + if sign_on_events: + demisto.debug(f"Got {len(sign_on_events)} sign_on_events. Begin processing.") + non_duplicates, pseudo_ids_for_next_iteration = process_and_filter_events( + events=sign_on_events, + previous_run_pseudo_ids=previous_run_pseudo_ids, + from_time=from_date + ) + + demisto.debug(f"Done processing {len(non_duplicates)} sign_on_events.") + last_event = non_duplicates[-1] + last_run = { + "last_fetch_time": last_event.get('Signon_DateTime'), + "previous_run_pseudo_ids": pseudo_ids_for_next_iteration, + } + demisto.debug(f"Saving last run as {last_run}") + else: + # Handle the case where no events were retrieved + last_run["last_fetch_time"] = current_time + non_duplicates = [] + + return non_duplicates, last_run + + +def module_of_testing(client: Client) -> str: # pragma: no cover + """Tests API connectivity and authentication + + Returning 'ok' indicates that the integration works like it is supposed to. + Connection to the service is successful. + Raises exceptions if something goes wrong. + + :type client: ``Client`` + :param Client: client to use + + :return: 'ok' if test passed, anything else will fail the test. + :rtype: ``str`` + """ + return client.test_connectivity() + + +""" MAIN FUNCTION """ + + +def main() -> None: # pragma: no cover + """main function, parses params and runs command functions""" + command = demisto.command() + args = demisto.args() + params = demisto.params() + + tenant_name = params.get("tenant_name") + base_url = params.get("base_url") + + if not base_url.startswith("https://"): + raise ValueError("Invalid base URL. Should begin with https://") + url = f"{base_url}/ccx/service/{tenant_name}/Identity_Management/{API_VERSION}" + + username = params.get("credentials", {}).get("identifier") + password = params.get("credentials", {}).get("password") + + verify_certificate = not params.get("insecure", False) + proxy = params.get("proxy", False) + max_fetch = arg_to_number(params.get("max_fetch")) or 10000 + + demisto.debug(f"Command being called is {command}") + try: + client = Client( + base_url=url, + tenant_name=tenant_name, + username=username, + password=password, + verify_certificate=verify_certificate, + proxy=proxy, + ) + + if command == "test-module": + return_results(module_of_testing(client)) + elif command == "workday-get-sign-on-events": + if args.get("relative_from_date", None): + from_time = arg_to_datetime( # type:ignore + arg=args.get('relative_from_date'), + arg_name='Relative datetime', + required=False + ).strftime(REQUEST_DATE_FORMAT) + to_time = datetime.utcnow().strftime(REQUEST_DATE_FORMAT) + else: + from_time = args.get("from_date") + to_time = args.get("to_date") + + sign_on_events, results = get_sign_on_events_command( + client=client, + from_date=from_time, + to_date=to_time, + limit=arg_to_number(args.get("limit", "100"), required=True), # type: ignore + ) + return_results(results) + if argToBoolean(args.get("should_push_events", "true")): + send_events_to_xsiam(sign_on_events, vendor=VENDOR, product=PRODUCT) + elif command == "fetch-events": + last_run = demisto.getLastRun() + demisto.debug(f"Starting new fetch with last_run as {last_run}") + sign_on_events, new_last_run = fetch_sign_on_events_command( + client=client, max_fetch=max_fetch, last_run=last_run + ) + demisto.debug(f"Done fetching events, sending to XSIAM. - {sign_on_events}") + send_events_to_xsiam(sign_on_events, vendor=VENDOR, product=PRODUCT) + if new_last_run: + # saves next_run for the time fetch-events is invoked + demisto.info(f"Setting new last_run to {new_last_run}") + demisto.setLastRun(new_last_run) + else: + raise NotImplementedError(f"command {command} is not implemented.") + + # Log exceptions and return errors + except Exception as e: + return_error( + f"Failed to execute {demisto.command()} command.\nError:\n{str(e)}" + ) + + +""" ENTRY POINT """ + +if __name__ in ("__main__", "__builtin__", "builtins"): + main() diff --git a/Packs/Workday/Integrations/WorkdaySignOnEventCollector/WorkdaySignOnEventCollector.yml b/Packs/Workday/Integrations/WorkdaySignOnEventCollector/WorkdaySignOnEventCollector.yml new file mode 100644 index 000000000000..da3720c8cc36 --- /dev/null +++ b/Packs/Workday/Integrations/WorkdaySignOnEventCollector/WorkdaySignOnEventCollector.yml @@ -0,0 +1,110 @@ +category: Analytics & SIEM +sectionOrder: +- Connect +- Collect +commonfields: + id: Workday Sign On Event Collector + version: -1 +configuration: +- name: base_url + display: Server URL (e.g., https://services1.myworkday.com) + required: true + defaultvalue: 'https://services1.myworkday.com' + type: 0 + additionalinfo: 'API endpoint of Workday server. Can be obtained from the View API Clients report in the Workday application.' + section: Connect +- name: tenant_name + display: Tenant Name + required: true + defaultvalue: + type: 0 + additionalinfo: 'The name of the Workday Tenant. Can be obtained from View API Clients report in Workday application.' + section: Connect +- name: credentials + display: Username + required: true + defaultvalue: + type: 9 + displaypassword: Password + section: Connect + hiddenusername: false +- name: insecure + display: Trust any certificate (not secure) + required: false + type: 8 + additionalinfo: + section: Connect + advanced: true +- name: proxy + display: Use system proxy settings + required: false + type: 8 + additionalinfo: + section: Connect + advanced: true +- additionalinfo: The maximum number of sign on events to retrieve. Large amount of events may cause performance issues. + defaultvalue: '10000' + display: Max events per fetch + name: max_fetch + required: false + type: 0 + section: Collect + hidden: false +- defaultvalue: 1 + display: Events Fetch Interval + hidden: false + name: eventFetchInterval + required: false + type: 19 + section: Collect + advanced: true +description: Use the Workday Sign On Event Collector integration to get sign on logs from Workday. +display: Workday Sign On Event Collector +name: Workday Sign On Event Collector +script: + commands: + - name: workday-get-sign-on-events + description: Returns sign on events extracted from Workday. This command is used for developing/debugging and is to be used with caution, as it can create events, leading to events duplication and exceeding the API request limitation. + deprecated: false + arguments: + - auto: PREDEFINED + defaultValue: "False" + description: Set this argument to True in order to create events, otherwise the command will only display them. + name: should_push_events + predefined: + - "True" + - "False" + required: true + - name: limit + description: The maximum number of events to return. + required: false + isArray: false + defaultValue: 1000 + - name: from_date + description: 'The date and time of the earliest event. The default timezone is UTC/GMT. The time format is "{yyyy}-{mm}-{dd}T{hh}:{mm}:{ss}Z". Example: "2021-05-18T13:45:14Z" indicates May 18, 2021, 1:45PM UTC.' + required: false + isArray: false + defaultValue: "" + - name: to_date + description: 'The time format is "{yyyy}-{mm}-{dd}T{hh}:{mm}:{ss}Z". Example: "2021-05-18T13:45:14Z" indicates May 18, 2021, 1:45PM UTC.' + required: false + isArray: false + defaultValue: "" + - name: relative_from_date + description: 'The query from date, for example, "5 minutes". Note: We strongly suggest to limit the value of this parameter.' + required: false + isArray: false + defaultValue: "" + outputs: [] + runonce: false + script: "-" + type: python + subtype: python3 + isfetchevents: true + dockerimage: demisto/python3:3.10.13.72123 + feed: false +fromversion: 8.2.0 +tests: +- No tests (auto formatted) +marketplaces: +- marketplacev2 diff --git a/Packs/Workday/Integrations/WorkdaySignOnEventCollector/WorkdaySignOnEventCollector_description.md b/Packs/Workday/Integrations/WorkdaySignOnEventCollector/WorkdaySignOnEventCollector_description.md new file mode 100644 index 000000000000..86f2da68252e --- /dev/null +++ b/Packs/Workday/Integrations/WorkdaySignOnEventCollector/WorkdaySignOnEventCollector_description.md @@ -0,0 +1,7 @@ +## Workday Event Collector + +Use this integration to collect Signon events automatically from Workday. + +In order to use this integration, you need to enter your Workday credentials in the relevant integration instance parameters. + +The API Endpoint of Workday server can be obtained from View API Clients report in Workday application. \ No newline at end of file diff --git a/Packs/Workday/Integrations/WorkdaySignOnEventCollector/WorkdaySignOnEventCollector_image.png b/Packs/Workday/Integrations/WorkdaySignOnEventCollector/WorkdaySignOnEventCollector_image.png new file mode 100644 index 0000000000000000000000000000000000000000..1426dc48413218e3c859ace23ebd9f8ce3605796 GIT binary patch literal 4818 zcmV;@5-shCP)mX?Vy%;3NPr}zE-|3CNKv0(B9UBlz6_0;WruX%`f0LHSBrdR?KC;2yG&weS`E8(}M&yA^0_C%WgewaumQjud7vV!QUqU-lo2DC4v$} z(FGX~+Sw1#Hv{k|F>eJGVBIu1dhHB(y<406a6k8bkX}N9Rn_B93@iXS7{FYDzmNCM zC>q zw*bIx3Mv7eSq5|C%(9j8Gur#`JhP5g-72d9=)djvzU0`xnPt$q9@pjiXuBPAz7z8T zi%#+~Nc|uRb9usQ)=ZN_0KlCDWRr2PTWEos&odz5`$hnA7vkz{D8LxTSAewNy0c`D z=t9{knjypZm*3a29sP*UqcQL2n0F_h4Fs0!ab0}h%zdI+`uXlPEpk}Qn=f^P@wp#F z0s5n)j-R_`$?kNS1d+lAAiZ?83Tm5d!S|cHTjYebE%MIvIxmEJTV!8&hjCc%CMplt z=RVwTvjrNTFJE`Q?7{KsS$xCeAiV&J=ExBD8S(N?Tjk@h;S+I8OZxUn`g6qUFwtI%WAHZJj zk~s3gcuiLhK7N{*`v z|32~o9=h>)qvh}Nc;28TSEpn$1p!`_aLIkq&sC`NR-Yrgu;04a9m}&{I^sUf)}o)9 zCOgAz|B5afd~G7oi=CCQUe%1YRVKN9T9!+`UM?bk+}C{v zxZ{~!uWwU6)w>YnrDFc|Q$)zilsF^kK%&giOcT^KF;p zG@LIAx>{tVEjymI2~2DS>H2h_5czT-ceTkW_7bF*WgY&mLfad3-1cb6OhQ~D0#Ik$ zxZ6!3F@uiB3^2=0Q^h&jWTCy%?4he>ww)fc-H@vnc^Aw~nXb+LeY)!`*i(LG%bv17I$D^8X;5X7~%#$Bt|fo*aO+mqSqtAQMVe9L8Bb(cNQ z(s;}ONfKXjatFoX57&6r0+&Xiw8#zVu`imMoAc7V9F`Srdl%2O3wI2WQGNwruGGUh zuCpMoXI|lM-j=^gJW{_X8B-CfuGWMK0P7nfxN9BLe@#z;x-CJjzLw*gFF!P^rsA5H z6iebUuk~2$4%jta_U|??=v4?^+xix$v2*vKjh*|;M1PoRs1D2vrg7K2uD&5SXlP-< zzKPa~iw#r-i!Hc4np>PB2e5scoNXYJJ0O4a1PxymT`UpZG+znY{(iVI3u0l;kX$ji zLnbbb@?O}YZR)a{`zKsGKVJ-*<)jr+&hzizso42O?HlE5435K3tBlkiAzZAkn!g~? zaj$uRNiJ6LbssGEQeG6ezW$X(2H-u9oj)Sy1rJNp?<0VrrHaxv0=V7_TIIw)MBHOo z!xi4D z9KUnae8ARABCeeTNL?<3s*h_o`|e$sGaT0SfCB7=bL@;tfSV(89{#OG%*4l?KU4wK ztQ-J)!8r=B=RcGMup`#`e{MQ<#GK~l;mW73QDZ+R<62`#l!fB36**^8mX@d6vOrFc z$s%{iG+DsY%`uhNHV4WSRQa&Rf@=V5fT}GV1@@ZC-$Ij5P`z3(U33kABk+@+Zzw0n z&E@@c?ftW4OsdvigKBR?Mzbi{o;{|rEhzvVyup9d@IuF4Z);v&zKl-`Nh!5<@_?f| z_L1YS+&lhm0nD~wu7k%~VI`&ZNR9!o!^bY#d)EmIcXds9&!28AILro^%9l|1*Zt6c z{qPALdkOx=W6~Zei_-nsa;`yEBcIgpJjgG=l@82d!zAz$!I5c*n{DRRQKk(_!D+JY-}ygg(lz)o4l`Fmjg zN@mzn>Wu1&!B@3iW1kbEf_sOaXTo*h&0mf6{vzXlM?P2AdH!$N2SGoQ0yq}3?o0m6 z*}8K^srx7&o#Fcjr11l56|9NuKEd~oOG(X1A4h2$aEnaxF zGNcx9>0a6vTX66bIbjJ76yAgWjd=b8;D#sHz{Z!!c%8qRTCRN-Ambr|E}3)#`)`+$ z|1NDOl{!8l%y=hUuYyo(Rf=4!0>HuNA@Dwo+oFmFFH&D>A6W+r!M?Mk(4l3B^ zm$PW_gK$6(WQ4w6O2KEe-FdRul7iXFYq0~5e|9{dB@uxdyEu}D-#{MzzlM*sAwu&4 zs~}!@g6>gT$bC+aL$W51;={A=elDPsjaakifw@q1DT@ zEtv#S>r^X)?x79-esQ~I+@LY-BZV#-Le0}%lPD~!nF_EMY*PNb;PLY9RDz6~&9LC$ z*KP;^x!x+-hWes^ur@M7F3%SsVDx4bVllkU&5f<3VZnPGFW6%h<8wv-@r!mgJT7Cu zzV=i~#&ybymYnQ&R_p~As^=&KTi{uZ0#GqPb*11-2BWKZt_!tKm9tiCc2?qb9tw~* z>g^A2Y8=r8(W(im7aW4mw-QLiuG7k67Q7F0pCZ%P_{ZAjtXJnFwwp@_U)!9fJIf(4 zG^jg-h8{bAgtm<`K-L#JzF-t!8P{*sK3b#3%;ja*T2Us!D>U82@w3>b9tN2&*6FJ+ zb&S;EN!MlpP?yaR0zu{(gLRx$0;{w+#^T90n2QeQqOH;)J|C`Lrg0JX9;bcw9Emck ziAfq{_Lz+I->!YJyuuHI$H-dgJUd@4SiO;v(PnW*=+>@E&iorYch|T$Ni{Ei#ieo4 zEcztBBi>y^uyRbwEFmU+OWhF#Q}REjjn4XL>2!w#hpyg`u{K(bbMeJ+zQ$uO*s=o^ zhGc=RFSM5dF0?mlUuL;r09Fg;S&YKXLOeW8-J%z)5wBf;N)vj-Om~m4hF(=R1WJ`xio#vn3SLT1b{#o!udc6pXkjx50HvglHv)hawvT+fwDF->fA&CX~D6ED(p z0A7{=xOr;;|I_3R;e6#QP9UxFl!aq>i2-h(7z14NkJG+1zzjpx0A_eIS#Ylcsq-Dc zW)p)tT-r>Pv63pvGan$KkFF3qT!H;_<5S>vM&%o*sODTlJbR3UrVAtnDK>IxZ$ z2`J-=YFP*3uW$ds3qsV*HAy&2!J;dn5_myFJjS{H!h!?U+D%3~&GcL1Tnn~(4-1lrrfGf(RWF5?X z7~71e$Sz?hELuC=A#ddpSCmRrdb)2Yv?-Hp9h~$A3~R<)-0i|lOniUL97kml@Vv%F zr*EuC#KQ2gLB~=?jn;@@ICUkzP+tm82L=<%*PzuvwpxHm-=zR@#qcq>pgXBG>UI)i zWb_q#3a{S0phq)q27IDmS%}023SnN1X&rLx$ zD8P?b++?}=tRx7a#?0{d0P)ZrJpa8?Xg^Ar32Qwc_L)R31u)5F_*_Thbm(}wD<0F} z?}xBC2|RQF;Hm1yNmcKwofh-9h6kVk$e1~E0c`J?hZRGc;^DCxl9-+ zT-JcJdRjH-8euQhQjL_psw}mSRDqp*E616vvPKqdUGx)j%I%E7M)4|c?Sk7CKq?h4 zcwB$ddWyj%$``!`$C6*;%~s@#Nc4CYjiik)Omuu)wm6W3QNpWPw-3+FD@G`|-XS}s zX2L~E0-Tf~Wrl3p-fo>7w>S828d3@-AWt|Y@3l7#jc*650jy+`2rR^l0X$cz3wFXu zz9kvse+}+@0W9PaEpgmIv+C1|TO7D(FSwAIH)6}x{4A^s9VO?-SEU_$F|on;KuX~U zOkb^}LQA))Io=t&kf3qjuGiS{R;+h1FJAIY6rHC@gIoxY;>rnIK0#lL6dj<&f}OfR_Egtp+pTl;K}gWA6|1c6(Oeb zn0h8yd~;>Wsa0`)l2(X8dex8t7XUK%(WEl{=Ii-+uR$816k9KoS94JkTqcd7{$Sp^ s=I7xB25BIZwPM6nS6}P*7Y^?K0sibwW-e79uK)l507*qoM6N<$g0wkMMF0Q* literal 0 HcmV?d00001 diff --git a/Packs/Workday/Integrations/WorkdaySignOnEventCollector/WorkdaySignOnEventCollector_test.py b/Packs/Workday/Integrations/WorkdaySignOnEventCollector/WorkdaySignOnEventCollector_test.py new file mode 100644 index 000000000000..95821d56184d --- /dev/null +++ b/Packs/Workday/Integrations/WorkdaySignOnEventCollector/WorkdaySignOnEventCollector_test.py @@ -0,0 +1,629 @@ +import json +import unittest +from typing import Any +from unittest.mock import patch +from freezegun import freeze_time + +from CommonServerPython import DemistoException +from WorkdaySignOnEventCollector import ( + get_from_time, + fletcher16, + generate_pseudo_id, + convert_to_json, + Client, + fetch_sign_on_logs, + get_sign_on_events_command, + fetch_sign_on_events_command, + process_and_filter_events, + main, + VENDOR, + PRODUCT, +) + + +def test_get_from_time() -> None: + """ + Given: + - A time duration in seconds (3600 seconds or 1 hour ago). + + When: + - The function `get_from_time` is called to convert this duration to a UTC datetime string. + + Then: + - Ensure that the returned value is a string. + - Validate that the string ends with 'Z', indicating it's in UTC format. + """ + # Given: A time duration of 3600 seconds (or 1 hour) ago. + seconds_ago = 3600 # 1 hour ago + + # When: Calling the function to convert this to a UTC datetime string. + result: Any = get_from_time(seconds_ago) + + # Then: Validate the type and format of the returned value. + assert isinstance(result, str) + assert result.endswith("Z") # Check if it's in the right format + + +def test_fletcher16() -> None: + """ + Given: + - Two types of byte strings, one containing the word 'test' and another being empty. + + When: + - The function `fletcher16` is called to calculate the checksum for these byte strings. + + Then: + - Ensure that the checksum calculated for the byte string 'test' matches the expected value of 22976. + - Validate that the checksum for an empty byte string is 0. + """ + # Given: A byte string containing the word 'test'. + data = b"test" + + # When: Calling `fletcher16` to calculate the checksum. + result: Any = fletcher16(data) + + # Then: Validate that the checksum matches the expected value. + expected = 22976 + assert result == expected + + # Given: An empty byte string. + data = b"" + + # When: Calling `fletcher16` to calculate the checksum. + result = fletcher16(data) + + # Then: Validate that the checksum for an empty byte string is 0. + expected = 0 + assert result == expected + + +def test_generate_pseudo_id() -> None: + """ + Given: + - Four different event dictionaries: + 1. A valid event dictionary with known values. + 2. An empty event dictionary. + 3. An event dictionary missing the "Signon_DateTime" key. + 4. A large event dictionary. + + When: + - Calling `generate_pseudo_id` to calculate a unique ID based on the event dictionary. + + Then: + - For the first case, ensure that the unique ID matches the expected value. + - For the second and third cases, ensure that an exception is raised. + - For the fourth case, ensure the function can handle large dictionaries without errors. + """ + + # Given: A valid event dictionary with known values. + event1 = { + "Short_Session_ID": "12345", + "User_Name": "ABC123", + "Successful": 1, + "Signon_DateTime": "2023-09-04T07:47:57.460-07:00", + } + # When: Calling `generate_pseudo_id` to calculate the unique ID. + event1_str: str = json.dumps(event1, sort_keys=True) + expected_checksum1: Any = fletcher16(event1_str.encode()) + expected_unique_id1: str = f"{expected_checksum1}_{event1['Signon_DateTime']}" + result1: str = generate_pseudo_id(event1) + # Then: Validate that the unique ID matches the expected value. + assert result1 == expected_unique_id1 + + # Given: An empty event dictionary. + event2 = {} + # When & Then: Calling `generate_pseudo_id` and expecting an exception. + try: + generate_pseudo_id(event2) + except DemistoException as e: + assert ( + str(e) + == "While calculating the pseudo ID for an event, an event without a Signon_DateTime was " + "found.\nError: 'Signon_DateTime'" + ) + else: + raise AssertionError("Expected DemistoException but did not get one") + + # Given: An event dictionary missing the "Signon_DateTime" key. + event3 = { + "Short_Session_ID": "12345", + "User_Name": "ABC123", + "Successful": 1, + } + # When & Then: Calling `generate_pseudo_id` and expecting an exception. + try: + generate_pseudo_id(event3) + except DemistoException: + pass + else: + raise AssertionError("Expected DemistoException but did not get one") + + # Given: A large event dictionary. + event4 = {str(i): i for i in range(10000)} # Create a large dictionary + event4["Signon_DateTime"] = "2023-09-04T07:47:57.460-07:00" # Add a Signon_DateTime key + # When & Then: Calling `generate_pseudo_id` to check if the function can handle it. + assert generate_pseudo_id(event4) + + +def test_process_and_filter_events() -> None: + """ + Given: + - A list of two valid sign-on events that differ by 1 second in their "Signon_DateTime". + - An initial time ("from_time") that matches the "Signon_DateTime" of one of the events. + - An empty set of pseudo_ids from the previous run. + + When: + - Calling the `process_and_filter_events` function to filter out duplicates and process events for the next + iteration. + + Then: + - The list of non-duplicate events should match the original list of events. + - The set of pseudo_ids for the next iteration should contain two elements. + - Each event in the list of non-duplicates should have an additional "_time" key that matches its + "Signon_DateTime". + """ + + # Given: A list of two valid sign-on events and other initial conditions + events = [ + { + "Short_Session_ID": "12345", + "User_Name": "ABC6789", + "Successful": 1, + "Signon_DateTime": "2023-09-04T07:47:57.460-07:00", + }, + { + "Short_Session_ID": "12346", + "User_Name": "ABC6790", + "Successful": 1, + "Signon_DateTime": "2023-09-04T07:47:57.460-07:00", + }, + ] + from_time: str = "2021-09-01T12:00:00Z" + previous_run_pseudo_ids: set[ + Any + ] = set() # Assume no previous checksums for simplicity + + # When: Calling the function to test + non_duplicates, pseudo_ids_for_next_iteration = process_and_filter_events( + events, from_time, previous_run_pseudo_ids + ) + + # Then: Validate the function's output + assert ( + non_duplicates == events + ) # Check if the list of non-duplicates is as expected + assert ( + len(pseudo_ids_for_next_iteration) == 2 + ) # Check if the set of pseudo_ids for next iteration is updated + + # Check if '_time' key is added to each event + for event in non_duplicates: + assert "_time" in event + assert event["_time"] == event["Signon_DateTime"] + + +def test_convert_to_json() -> None: + """ + Given: + - A sample XML response string containing a single 'Workday_Account_Signon' entry with a 'Signon_DateTime'. + + When: + - Calling the 'convert_to_json' function to convert the XML data to a Python dictionary. + + Then: + - The function should return two Python dictionaries. + - The first dictionary should represent the entire XML structure. + - The second dictionary should contain just the 'Workday_Account_Signon' entries. + - Both dictionaries should correctly reflect the 'Signon_DateTime' from the original XML. + """ + + # Given: Test with XML data (this is a simplified version for the sake of the test) + xml_response = """ + + + + + + 2023-09-04T07:47:57.460-07:00 + + + + + + """ + + # When: Calling the function to test + raw_json_response, account_signon_data = convert_to_json(xml_response) + + # Then: Check if the converted data matches the expected structure + assert ( + raw_json_response["Envelope"]["Body"]["Get_Workday_Account_Signons_Response"][ + "Response_Data" + ]["Workday_Account_Signon"][0]["Signon_DateTime"] + == "2023-09-04T07:47:57.460-07:00" + ) + + assert ( + account_signon_data["Workday_Account_Signon"][0]["Signon_DateTime"] + == "2023-09-04T07:47:57.460-07:00" + ) + + +def test_generate_workday_account_signons_body() -> None: + """ + Given: + - A Client object initialized with a base URL, verification settings, a tenant name, and login credentials. + - Parameters specifying the page, count, and time range for fetching Workday sign-on events. + + When: + - Calling the 'generate_workday_account_signons_body' method on the Client object to generate the SOAP request body. + + Then: + - The returned SOAP request body should contain all the specified parameters. + - The body should also contain the username and password for authentication. + """ + + # Given: Initialize a Client object with sample data + client = Client( + base_url="", + verify_certificate=True, + proxy=False, + tenant_name="test_tenant", + username="test_user", + password="test_pass", + ) + + # When: Generate the SOAP request body + body = client.generate_workday_account_signons_body( + page=1, + count=10, + to_time="2021-09-01T12:00:00Z", + from_time="2021-09-01T11:00:00Z", + ) + + # Then: Verify that the SOAP request body contains all the specified parameters + assert "1" in body + assert "10" in body + assert "2021-09-01T11:00:00Z" in body + assert "2021-09-01T12:00:00Z" in body + assert "test_user" in body + assert ( + 'test_pass' # noqa:E501 + in body + ) + + +def test_generate_test_payload() -> None: + """ + Given: + - A Client object initialized with a base URL, verification settings, a tenant name, and login credentials. + - Parameters specifying the time range for fetching Workday sign-on events for the test payload. + + When: + - Calling the 'generate_test_payload' method on the Client object to generate a SOAP request payload for testing. + + Then: + - The returned SOAP request payload should contain all the specified parameters. + - The payload should also contain the username and password for authentication. + """ + + # Given: Initialize a Client object with sample data + client = Client( + base_url="", + verify_certificate=True, + proxy=False, + tenant_name="test_tenant", + username="test_user", + password="test_pass", + ) + + # When: Generate the SOAP request payload for testing + payload = client.generate_test_payload( + from_time="2021-09-01T11:00:00Z", to_time="2021-09-01T12:00:00Z" + ) + + # Then: Verify that the SOAP request payload contains all the specified parameters + assert "1" in payload + assert "1" in payload + assert "2021-09-01T11:00:00Z" in payload + assert "2021-09-01T12:00:00Z" in payload + assert "test_user" in payload + assert ( + 'test_pass' # noqa:E501 + in payload + ) + + +def test_convert_to_json_valid_input() -> None: + """ + Given: + - An XML-formatted response string from the Workday API, containing sign-on event data. + + When: + - Calling the 'convert_to_json' function to convert the XML response to JSON format. + + Then: + - The function should return two JSON objects: one containing the full JSON-converted data, + and another containing only the sign-on event data. + - Both JSON objects should be properly formatted and contain the expected data fields. + """ + + # Given: An XML-formatted response string from the Workday API + response = """ + + + + + + 2021-09-01T11:00:00Z + + + + + + """ + + # When: Converting the XML to JSON + full_json, account_signon_data = convert_to_json(response) + + # Then: Validate the full_json data structure + envelope = full_json.get("Envelope", {}) + body = envelope.get("Body", {}) + response = body.get("Get_Workday_Account_Signons_Response", {}) + response_data = response.get("Response_Data", {}) + workday_account_signons = response_data.get("Workday_Account_Signon", []) + + # Assertions for full_json + assert isinstance( + workday_account_signons, list + ), "workday_account_signons is not a list" + assert workday_account_signons, "workday_account_signons is empty" + assert workday_account_signons[0].get("Signon_DateTime") == "2021-09-01T11:00:00Z" + + # Then: Validate the account_signon_data structure + workday_account_signons_data = account_signon_data.get("Workday_Account_Signon", []) + + # Assertions for account_signon_data + assert workday_account_signons_data + assert ( + workday_account_signons_data[0].get("Signon_DateTime") == "2021-09-01T11:00:00Z" + ) + + +class TestFetchSignOnLogs(unittest.TestCase): + def setUp(self) -> None: + """ + Given: + - A Client object with mock URL, tenant, username, and password. + + When: + - Setting up each unit test case. + + Then: + - The Client object should be initialized and ready for testing. + """ + self.client = Client( + "mock_url", + False, + False, + "mock_tenant", + "mock_user", + "mock_pass", + ) + + @patch.object(Client, "retrieve_events") + def test_fetch_sign_on_logs_single_page(self, mock_retrieve_events) -> None: + """ + Given: + - A mock Client object with a retrieve_events method that returns a sample response. + - The sample response contains a single Workday sign-on event. + + When: + - Calling the fetch_sign_on_logs function to fetch sign-on logs. + + Then: + - The function should return a list of events. + - The length of the list should be 1. + - The event in the list should have the User_Name "John". + """ + + # Given: Sample data to be returned by the mock + mock_response = ( + { + "Workday_Account_Signon": [ + { + "Signon_DateTime": "2021-09-01T11:00:00Z", + "User_Name": "John", + "Short_Session_ID": "123456", + "Successful": 1, + } + ] + }, + 1, + ) + + # Setup: Configure the mock to return the sample data + mock_retrieve_events.return_value = mock_response + + # When: Fetching sign-on logs + events = fetch_sign_on_logs( + self.client, 10, "2021-09-01T00:00:00Z", "2021-09-02T00:00:00Z" + ) + + # Then: Validate the function's return value + assert len(events) == 1 + assert events[0]["User_Name"] == "John" + + +class TestGetSignOnEventsCommand(unittest.TestCase): + def test_get_sign_on_events_command(self) -> None: + """ + Given: + - A Client object with mock settings. + - A patch for the fetch_sign_on_logs function to return a mock event. + - The mock event has details such as Signon_DateTime, User_Name, Short_Session_ID, and Successful status. + + When: + - Calling the get_sign_on_events_command function to get sign-on events between two date-time ranges. + + Then: + - The function should return a list of events and results. + - The length of the list should be 1. + - The event in the list should have the User_Name "John" and _time "2021-09-01T11:00:00Z". + - The readable_output of the results should start with "### Sign On Events List:". + """ + + # Given: Sample data to be returned by the mock + mock_events = [ + { + "Signon_DateTime": "2023-09-04T07:47:57.460-07:00", + "User_Name": "John", + "Short_Session_ID": "123456", + "Successful": 1, + "_time": "2021-09-01T11:00:00Z", # This is added by the process_events function + } + ] + + # Setup: Use patch to mock the fetch_sign_on_logs function + with patch( + "WorkdaySignOnEventCollector.fetch_sign_on_logs", return_value=mock_events + ): + client = Client( + "mock_url", + False, + False, + "mock_tenant", + "mock_user", + "mock_pass", + ) + + # When: Calling the get_sign_on_events_command + events, results = get_sign_on_events_command( + client, "2021-09-01T00:00:00Z", "2021-09-02T00:00:00Z", 10 + ) + + # Then: Validate the function's return value + assert len(events) == 1 + assert events[0]["User_Name"] == "John" + assert events[0]["_time"] == "2023-09-04T07:47:57.460-07:00" + assert results.readable_output.startswith("### Sign On Events List:") + + +@freeze_time("2023-09-04T00:00:00.000-07:00") +def test_fetch_sign_on_events_command_single_page() -> None: + """ + Given: + - A Client object with mock settings. + - A patch for the Client's retrieve_events method to return a mock event. + - A patch for demisto.getLastRun function to return a mock last_run dictionary. + - The mock event has details such as Signon_DateTime, User_Name, Short_Session_ID, and Successful status. + - The mock last_run dictionary contains last_fetch_time and previous_run_pseudo_ids. + + When: + - Calling the fetch_sign_on_events_command function to fetch sign-on events. + + Then: + - The function should return a list of events and a new_last_run dictionary. + - The length of the list should be 1. + - The event in the list should have the User_Name "John" and _time "2021-09-01T11:00:00Z". + - The new_last_run dictionary should have last_fetch_time updated to "2021-09-01T11:00:00Z". + """ + + # Given: Sample data to be returned by the mock + mock_events = [ + { + "Signon_DateTime": "2023-09-04T07:47:57.460-07:00", + "User_Name": "John", + "Short_Session_ID": "123456", + "Successful": 1, + "_time": "2023-09-04T07:47:57.460-07:00", # This is added by the process_events function + } + ] + + # Setup: Mock the client's retrieve_events method and demisto.getLastRun function + mock_retrieve_response = ({"Workday_Account_Signon": mock_events}, 1) + mock_last_run = { + "last_fetch_time": "2023-09-04T07:47:57.460-07:00", + "previous_run_pseudo_ids": set(), + } + + # When: Calling the fetch_sign_on_events_command + with patch.object( + Client, "retrieve_events", return_value=mock_retrieve_response + ), patch("demistomock.getLastRun", return_value=mock_last_run): + client = Client( + "mock_url", + False, + False, + "mock_tenant", + "mock_user", + "mock_pass", + ) + events, new_last_run = fetch_sign_on_events_command(client, 10, mock_last_run) + + # Then: Validate the function's return value + assert len(events) == 1 + assert events[0]["User_Name"] == "John" + assert events[0]["_time"] == "2023-09-04T07:47:57.460-07:00" + assert new_last_run["last_fetch_time"] == "2023-09-04T07:47:57.460-07:00" + + +def test_main_fetch_events() -> None: + """ + Given: + - A set of mock parameters for the client. + - Mock functions for demisto's getLastRun, setLastRun, and params. + - Mock for the fetch_sign_on_events_command function to return mock events and new last_run data. + - Mock for the send_events_to_xsiam function. + + When: + - The main function is called and the command is 'fetch-events'. + + Then: + - Ensure that fetch_sign_on_events_command is called with the correct arguments. + - Ensure that send_events_to_xsiam is called with the mock events. + - Ensure that setLastRun is called to update the last_run data. + """ + # Given: Mock parameters and last run data + mock_params = { + "tenant_name": "TestTenant", + "max_fetch": "10000", + "base_url": "https://testurl.com", + "credentials": {"identifier": "TestUser", "password": "testpass"}, + "insecure": True, + } + + # Mocking demisto.command to return 'fetch-events' + with patch("demistomock.command", return_value="fetch-events"), patch( + "demistomock.getLastRun", return_value={"some": "data"} + ), patch("demistomock.setLastRun") as mock_set_last_run, patch( + "demistomock.params", return_value=mock_params + ), patch( + "WorkdaySignOnEventCollector.Client" + ) as mock_client, patch( + "WorkdaySignOnEventCollector.fetch_sign_on_events_command" + ) as mock_fetch_sign_on_events_command, patch( + "WorkdaySignOnEventCollector.send_events_to_xsiam" + ) as mock_send_events_to_xsiam: + # Mocking the output of fetch_sign_on_events_command + mock_events = [{"event": "data"}] + mock_new_last_run = {"new": "data"} + mock_fetch_sign_on_events_command.return_value = ( + mock_events, + mock_new_last_run, + ) + + # When: Calling the main function + main() + + # Then: Validate the function calls and arguments + mock_fetch_sign_on_events_command.assert_called_with( + client=mock_client.return_value, + max_fetch=10000, + last_run={"some": "data"}, + ) + + mock_send_events_to_xsiam.assert_called_with( + mock_events, vendor=VENDOR, product=PRODUCT + ) + mock_set_last_run.assert_called_with(mock_new_last_run) diff --git a/Packs/Workday/Integrations/WorkdaySignOnEventCollector/command_examples b/Packs/Workday/Integrations/WorkdaySignOnEventCollector/command_examples new file mode 100644 index 000000000000..1b69d3470859 --- /dev/null +++ b/Packs/Workday/Integrations/WorkdaySignOnEventCollector/command_examples @@ -0,0 +1 @@ +workday-get-sign-on-events should_push_events=false limit=1 from_date="2023-08-23T18:20:03Z" to_date="2023-08-23T18:20:08Z" \ No newline at end of file diff --git a/Packs/Workday/Integrations/WorkdaySignOnEventCollector/test_data/example_event.json b/Packs/Workday/Integrations/WorkdaySignOnEventCollector/test_data/example_event.json new file mode 100644 index 000000000000..000fa21d00ae --- /dev/null +++ b/Packs/Workday/Integrations/WorkdaySignOnEventCollector/test_data/example_event.json @@ -0,0 +1,39 @@ +{ + "Workday_Account_Signon": { + "Signon_DateTime": "2023-08-08T23:04:01.788-07:00", + "User_Name": 123456, + "Successful": 1, + "Failed_Signon": 0, + "Invalid_Credentials": 0, + "Password_Changed": 0, + "Forgotten_Password_Reset_Request": 0, + "Signon_IP_Address": "Workday Internal", + "Signoff_DateTime": "2023-08-08T23:10:17.310-07:00", + "Authentication_Channel": "Web Services", + "Authentication_Type": "Trusted", + "Workday_Account_Reference": { + "ID": { + "WID": "1234567890qwertyuiop", + "System_User_ID": 123456, + "WorkdayUserName": 123456 + } + }, + "System_Account_Signon_Reference": { + "ID": "1234567890" + }, + "Request_Originator_Reference": { + "ID": "1234567890qwertyuiop" + }, + "Invalid_for_Authentication_Channel": 0, + "Invalid_for_Authentication_Policy": 0, + "Required_Password_Change": 0, + "Account_Disabled_or_Expired": 0, + "MFA_Authentication_Exempt": 0, + "Has_Grace_Period_for_MFA": 0, + "MFA_Enrollment": 0, + "Short_Session_ID": "abc123", + "Device_is_Trusted": 0, + "Tenant_Access_Read_Only": 0 + } +} + diff --git a/Packs/Workday/Integrations/WorkdaySignonEventGenerator/README.md b/Packs/Workday/Integrations/WorkdaySignonEventGenerator/README.md new file mode 100644 index 000000000000..fe283ec6c71c --- /dev/null +++ b/Packs/Workday/Integrations/WorkdaySignonEventGenerator/README.md @@ -0,0 +1,15 @@ +Generates mock signon events for Workday Signon Event Collector. Use these for testing and development. +This integration was integrated and tested with version 37.0 of WorkdaySignonEventGenerator. + +## Configure Workday Signon Event Generator (Beta) on Cortex XSOAR + +1. Navigate to **Settings** > **Integrations** > **Servers & Services**. +2. Search for Workday Signon Event Generator (Beta). +3. Click **Add instance** to create and configure a new integration instance. + + | **Parameter** | **Required** | + | --- | --- | + | Long running instance | False | + | Port mapping (<port> or <host port>:<docker port>) | True | + +4. Click **Test** to validate the URLs, token, and connection. diff --git a/Packs/Workday/Integrations/WorkdaySignonEventGenerator/WorkdaySignonEventGenerator.py b/Packs/Workday/Integrations/WorkdaySignonEventGenerator/WorkdaySignonEventGenerator.py new file mode 100644 index 000000000000..eba9da1fe808 --- /dev/null +++ b/Packs/Workday/Integrations/WorkdaySignonEventGenerator/WorkdaySignonEventGenerator.py @@ -0,0 +1,189 @@ +import random +import string + +from gevent.pywsgi import WSGIServer +from flask import Flask, request, Response +from CommonServerPython import * + +import urllib3 + +# Disable insecure warnings +urllib3.disable_warnings() + +''' CONSTANTS ''' +APP: Flask = Flask('xsoar-workday-signon') +DATE_FORMAT = '%Y-%m-%dT%H:%M:%SZ' # ISO8601 format with UTC, default in XSOAR + +SIGNON_ITEM_TEMPLATE = """ + + {signon_datetime} + {user_name} + 1 + 0 + 0 + 0 + 0 + Workday Internal + Web Services + Trusted + + dc28d59c523f1010e415d814cbd50002 + 12345678 + {user_name} + + + 4328$170406698 + + + 02f60ab5ed5744c0afbc9cc5096d7a73 + + 0 + 0 + 0 + 0 + 0 + 0 + 0 + {short_session_id} + 0 + 0 + + """ + + +def generate_xml_template(from_date: str, to_date: str, count: int, total_responses: int): + return f""" + + + + + {from_date} + {to_date} + + + {from_date} + 1 + {count} + + + {total_responses} + 1 + {total_responses} + 1 + + + %%workday_account_signon_items%% + + + + +""" + + +def random_datetime_in_range(start_str: str, end_str: str): + start_datetime = datetime.strptime(start_str, DATE_FORMAT) + end_datetime = datetime.strptime(end_str, DATE_FORMAT) + + random_seconds = random.randint(0, int((end_datetime - start_datetime).total_seconds())) + return (start_datetime + timedelta(seconds=random_seconds)).strftime(DATE_FORMAT) + + +def random_string(length: int = 10): + return ''.join(random.choices(string.ascii_uppercase + string.digits, k=length)) + + +def xml_generator(from_datetime: str, to_datetime: str, count: int): + # Generate randomized Signon_DateTime + random_signon_datetime = random_datetime_in_range(from_datetime, to_datetime) + + # Determine the number of Workday_Account_Signon items + num_signon_items = random.randint(1, count) + + template = generate_xml_template(from_date=from_datetime, to_date=to_datetime, total_responses=num_signon_items, + count=num_signon_items) + + # Generate Workday_Account_Signon items + signon_items = [] + for _ in range(num_signon_items): + signon_item = SIGNON_ITEM_TEMPLATE.format( + signon_datetime=random_signon_datetime, + user_name=random_string(), + short_session_id=random_string(length=6) + ) + signon_items.append(signon_item) + + # Insert the generated items into the main template + populated_template = template.replace("%%workday_account_signon_items%%", "\n".join(signon_items)) + + return populated_template + + +@APP.route('/', methods=['POST']) +def mock_workday_endpoint(): + request_text = request.get_data(as_text=True) + demisto.info(f"{request_text}") + + # Define regex patterns + from_datetime_pattern = r'(.*?)' + to_datetime_pattern = r'(.*?)' + count_pattern = r'(\d+)' + + # Extract values using regex + from_datetime_match = re.search(from_datetime_pattern, request_text) + from_datetime = from_datetime_match.group(1) if from_datetime_match else "2023-08-23T18:20:03Z" + + to_datetime_match = re.search(to_datetime_pattern, request_text) + to_datetime = to_datetime_match.group(1) if to_datetime_match else "2023-08-23T18:20:08Z" + + count_match = re.search(count_pattern, request_text) + count = int(count_match.group(1)) if count_match else 1 + + # Use the extracted values to generate the response XML + response_xml = xml_generator(from_datetime, to_datetime, count) + + # Return the generated XML + return Response(response_xml, mimetype='text/xml') + + +def module_of_testing(is_longrunning: bool, longrunning_port: int): + if longrunning_port and is_longrunning: + xml_response = xml_generator('2023-08-21T11:46:02Z', '2023-08-21T11:47:02Z', 2) + if xml_response: + return_results('ok') + else: + raise DemistoException('Could not connect to the long running server. Please make sure everything is ' + 'configured.') + else: + raise DemistoException('Please make sure the long running port is filled and the long running checkbox is ' + 'marked.') + + +''' MAIN FUNCTION ''' + + +def main(): + command = demisto.command() + params = demisto.params() + port = int(params.get('longRunningPort', '5000')) + is_longrunning = params.get("longRunning") + try: + if command == 'test-module': + module_of_testing(longrunning_port=port, is_longrunning=is_longrunning) + elif command == 'long-running-execution': + while True: + server = WSGIServer(('0.0.0.0', port), APP) + server.serve_forever() + else: + raise NotImplementedError(f"command {command} is not implemented.") + + # Log exceptions and return errors + except Exception as e: + return_error( + f"Failed to execute {demisto.command()} command.\nError:\n{str(e)}" + ) + + +''' ENTRY POINT ''' + +if __name__ in ('__main__', '__builtin__', 'builtins'): + main() diff --git a/Packs/Workday/Integrations/WorkdaySignonEventGenerator/WorkdaySignonEventGenerator.yml b/Packs/Workday/Integrations/WorkdaySignonEventGenerator/WorkdaySignonEventGenerator.yml new file mode 100644 index 000000000000..ab55b598c955 --- /dev/null +++ b/Packs/Workday/Integrations/WorkdaySignonEventGenerator/WorkdaySignonEventGenerator.yml @@ -0,0 +1,31 @@ +category: Analytics & SIEM +beta: true +commonfields: + id: WorkdaySignonEventGenerator + version: -1 +configuration: +- display: Long running instance + name: longRunning + type: 8 + required: false +- defaultvalue: '5000' + display: Port mapping ( or :) + name: longRunningPort + required: true + type: 0 +description: Generates mock sign on events for Workday Signon Event Collector. Use these for testing and development. +display: Workday Signon Event Generator (Beta) +name: WorkdaySignonEventGenerator +system: true +script: + runonce: false + script: '-' + type: python + subtype: python3 + dockerimage: demisto/teams:1.0.0.72377 + longRunning: true + longRunningPort: true +fromversion: 6.8.0 +toversion: 7.9.9 +tests: +- No tests (auto formatted) diff --git a/Packs/Workday/Integrations/WorkdaySignonEventGenerator/WorkdaySignonEventGenerator_description.md b/Packs/Workday/Integrations/WorkdaySignonEventGenerator/WorkdaySignonEventGenerator_description.md new file mode 100644 index 000000000000..68892bac28ae --- /dev/null +++ b/Packs/Workday/Integrations/WorkdaySignonEventGenerator/WorkdaySignonEventGenerator_description.md @@ -0,0 +1,5 @@ +## Event Generator Help + +Generates mock sign on events for the Workday Signon Event Collector. Use these for testing and development. + +Note: This is a beta Integration, which lets you implement and test pre-release software. Since the integration is beta, it might contain bugs. Updates to the integration during the beta phase might include non-backward compatible features. We appreciate your feedback on the quality and usability of the integration to help us identify issues, fix them, and continually improve. \ No newline at end of file diff --git a/Packs/Workday/Integrations/WorkdaySignonEventGenerator/WorkdaySignonEventGenerator_image.png b/Packs/Workday/Integrations/WorkdaySignonEventGenerator/WorkdaySignonEventGenerator_image.png new file mode 100644 index 0000000000000000000000000000000000000000..1426dc48413218e3c859ace23ebd9f8ce3605796 GIT binary patch literal 4818 zcmV;@5-shCP)mX?Vy%;3NPr}zE-|3CNKv0(B9UBlz6_0;WruX%`f0LHSBrdR?KC;2yG&weS`E8(}M&yA^0_C%WgewaumQjud7vV!QUqU-lo2DC4v$} z(FGX~+Sw1#Hv{k|F>eJGVBIu1dhHB(y<406a6k8bkX}N9Rn_B93@iXS7{FYDzmNCM zC>q zw*bIx3Mv7eSq5|C%(9j8Gur#`JhP5g-72d9=)djvzU0`xnPt$q9@pjiXuBPAz7z8T zi%#+~Nc|uRb9usQ)=ZN_0KlCDWRr2PTWEos&odz5`$hnA7vkz{D8LxTSAewNy0c`D z=t9{knjypZm*3a29sP*UqcQL2n0F_h4Fs0!ab0}h%zdI+`uXlPEpk}Qn=f^P@wp#F z0s5n)j-R_`$?kNS1d+lAAiZ?83Tm5d!S|cHTjYebE%MIvIxmEJTV!8&hjCc%CMplt z=RVwTvjrNTFJE`Q?7{KsS$xCeAiV&J=ExBD8S(N?Tjk@h;S+I8OZxUn`g6qUFwtI%WAHZJj zk~s3gcuiLhK7N{*`v z|32~o9=h>)qvh}Nc;28TSEpn$1p!`_aLIkq&sC`NR-Yrgu;04a9m}&{I^sUf)}o)9 zCOgAz|B5afd~G7oi=CCQUe%1YRVKN9T9!+`UM?bk+}C{v zxZ{~!uWwU6)w>YnrDFc|Q$)zilsF^kK%&giOcT^KF;p zG@LIAx>{tVEjymI2~2DS>H2h_5czT-ceTkW_7bF*WgY&mLfad3-1cb6OhQ~D0#Ik$ zxZ6!3F@uiB3^2=0Q^h&jWTCy%?4he>ww)fc-H@vnc^Aw~nXb+LeY)!`*i(LG%bv17I$D^8X;5X7~%#$Bt|fo*aO+mqSqtAQMVe9L8Bb(cNQ z(s;}ONfKXjatFoX57&6r0+&Xiw8#zVu`imMoAc7V9F`Srdl%2O3wI2WQGNwruGGUh zuCpMoXI|lM-j=^gJW{_X8B-CfuGWMK0P7nfxN9BLe@#z;x-CJjzLw*gFF!P^rsA5H z6iebUuk~2$4%jta_U|??=v4?^+xix$v2*vKjh*|;M1PoRs1D2vrg7K2uD&5SXlP-< zzKPa~iw#r-i!Hc4np>PB2e5scoNXYJJ0O4a1PxymT`UpZG+znY{(iVI3u0l;kX$ji zLnbbb@?O}YZR)a{`zKsGKVJ-*<)jr+&hzizso42O?HlE5435K3tBlkiAzZAkn!g~? zaj$uRNiJ6LbssGEQeG6ezW$X(2H-u9oj)Sy1rJNp?<0VrrHaxv0=V7_TIIw)MBHOo z!xi4D z9KUnae8ARABCeeTNL?<3s*h_o`|e$sGaT0SfCB7=bL@;tfSV(89{#OG%*4l?KU4wK ztQ-J)!8r=B=RcGMup`#`e{MQ<#GK~l;mW73QDZ+R<62`#l!fB36**^8mX@d6vOrFc z$s%{iG+DsY%`uhNHV4WSRQa&Rf@=V5fT}GV1@@ZC-$Ij5P`z3(U33kABk+@+Zzw0n z&E@@c?ftW4OsdvigKBR?Mzbi{o;{|rEhzvVyup9d@IuF4Z);v&zKl-`Nh!5<@_?f| z_L1YS+&lhm0nD~wu7k%~VI`&ZNR9!o!^bY#d)EmIcXds9&!28AILro^%9l|1*Zt6c z{qPALdkOx=W6~Zei_-nsa;`yEBcIgpJjgG=l@82d!zAz$!I5c*n{DRRQKk(_!D+JY-}ygg(lz)o4l`Fmjg zN@mzn>Wu1&!B@3iW1kbEf_sOaXTo*h&0mf6{vzXlM?P2AdH!$N2SGoQ0yq}3?o0m6 z*}8K^srx7&o#Fcjr11l56|9NuKEd~oOG(X1A4h2$aEnaxF zGNcx9>0a6vTX66bIbjJ76yAgWjd=b8;D#sHz{Z!!c%8qRTCRN-Ambr|E}3)#`)`+$ z|1NDOl{!8l%y=hUuYyo(Rf=4!0>HuNA@Dwo+oFmFFH&D>A6W+r!M?Mk(4l3B^ zm$PW_gK$6(WQ4w6O2KEe-FdRul7iXFYq0~5e|9{dB@uxdyEu}D-#{MzzlM*sAwu&4 zs~}!@g6>gT$bC+aL$W51;={A=elDPsjaakifw@q1DT@ zEtv#S>r^X)?x79-esQ~I+@LY-BZV#-Le0}%lPD~!nF_EMY*PNb;PLY9RDz6~&9LC$ z*KP;^x!x+-hWes^ur@M7F3%SsVDx4bVllkU&5f<3VZnPGFW6%h<8wv-@r!mgJT7Cu zzV=i~#&ybymYnQ&R_p~As^=&KTi{uZ0#GqPb*11-2BWKZt_!tKm9tiCc2?qb9tw~* z>g^A2Y8=r8(W(im7aW4mw-QLiuG7k67Q7F0pCZ%P_{ZAjtXJnFwwp@_U)!9fJIf(4 zG^jg-h8{bAgtm<`K-L#JzF-t!8P{*sK3b#3%;ja*T2Us!D>U82@w3>b9tN2&*6FJ+ zb&S;EN!MlpP?yaR0zu{(gLRx$0;{w+#^T90n2QeQqOH;)J|C`Lrg0JX9;bcw9Emck ziAfq{_Lz+I->!YJyuuHI$H-dgJUd@4SiO;v(PnW*=+>@E&iorYch|T$Ni{Ei#ieo4 zEcztBBi>y^uyRbwEFmU+OWhF#Q}REjjn4XL>2!w#hpyg`u{K(bbMeJ+zQ$uO*s=o^ zhGc=RFSM5dF0?mlUuL;r09Fg;S&YKXLOeW8-J%z)5wBf;N)vj-Om~m4hF(=R1WJ`xio#vn3SLT1b{#o!udc6pXkjx50HvglHv)hawvT+fwDF->fA&CX~D6ED(p z0A7{=xOr;;|I_3R;e6#QP9UxFl!aq>i2-h(7z14NkJG+1zzjpx0A_eIS#Ylcsq-Dc zW)p)tT-r>Pv63pvGan$KkFF3qT!H;_<5S>vM&%o*sODTlJbR3UrVAtnDK>IxZ$ z2`J-=YFP*3uW$ds3qsV*HAy&2!J;dn5_myFJjS{H!h!?U+D%3~&GcL1Tnn~(4-1lrrfGf(RWF5?X z7~71e$Sz?hELuC=A#ddpSCmRrdb)2Yv?-Hp9h~$A3~R<)-0i|lOniUL97kml@Vv%F zr*EuC#KQ2gLB~=?jn;@@ICUkzP+tm82L=<%*PzuvwpxHm-=zR@#qcq>pgXBG>UI)i zWb_q#3a{S0phq)q27IDmS%}023SnN1X&rLx$ zD8P?b++?}=tRx7a#?0{d0P)ZrJpa8?Xg^Ar32Qwc_L)R31u)5F_*_Thbm(}wD<0F} z?}xBC2|RQF;Hm1yNmcKwofh-9h6kVk$e1~E0c`J?hZRGc;^DCxl9-+ zT-JcJdRjH-8euQhQjL_psw}mSRDqp*E616vvPKqdUGx)j%I%E7M)4|c?Sk7CKq?h4 zcwB$ddWyj%$``!`$C6*;%~s@#Nc4CYjiik)Omuu)wm6W3QNpWPw-3+FD@G`|-XS}s zX2L~E0-Tf~Wrl3p-fo>7w>S828d3@-AWt|Y@3l7#jc*650jy+`2rR^l0X$cz3wFXu zz9kvse+}+@0W9PaEpgmIv+C1|TO7D(FSwAIH)6}x{4A^s9VO?-SEU_$F|on;KuX~U zOkb^}LQA))Io=t&kf3qjuGiS{R;+h1FJAIY6rHC@gIoxY;>rnIK0#lL6dj<&f}OfR_Egtp+pTl;K}gWA6|1c6(Oeb zn0h8yd~;>Wsa0`)l2(X8dex8t7XUK%(WEl{=Ii-+uR$816k9KoS94JkTqcd7{$Sp^ s=I7xB25BIZwPM6nS6}P*7Y^?K0sibwW-e79uK)l507*qoM6N<$g0wkMMF0Q* literal 0 HcmV?d00001 diff --git a/Packs/Workday/Integrations/WorkdaySignonEventGenerator/WorkdaySignonEventGenerator_test.py b/Packs/Workday/Integrations/WorkdaySignonEventGenerator/WorkdaySignonEventGenerator_test.py new file mode 100644 index 000000000000..e3cd63d4a6ef --- /dev/null +++ b/Packs/Workday/Integrations/WorkdaySignonEventGenerator/WorkdaySignonEventGenerator_test.py @@ -0,0 +1,133 @@ +import unittest +from unittest.mock import patch + +from CommonServerPython import DemistoException +from WorkdaySignonEventGenerator import ( + random_datetime_in_range, + random_string, + xml_generator, + mock_workday_endpoint, + module_of_testing, + main, +) + +from WorkdaySignonEventGenerator import APP as app + + +class TestWorkdaySignonEventGenerator(unittest.TestCase): + def test_random_datetime_in_range(self) -> None: + """ + Given: + - A start datetime '2023-08-21T11:46:02Z' and an end datetime '2023-08-21T11:47:02Z' + + When: + - Generating a random datetime in the given range + + Then: + - Ensure that the random datetime generated falls within the specified range + """ + random_date = random_datetime_in_range( + "2023-08-21T11:46:02Z", "2023-08-21T11:47:02Z" + ) + assert "2023-08-21T11:46:02Z" <= random_date <= "2023-08-21T11:47:02Z" + + def test_random_string(self) -> None: + """ + Given: + - No initial conditions + + When: + - Generating a random string of default length 10 + + Then: + - Ensure that the length of the generated string is 10 + """ + assert len(random_string()) == 10 + + def test_random_guid(self) -> None: + """ + Given: + - No initial conditions + + When: + - Generating a random GUID-like string of default length 6 + + Then: + - Ensure that the length of the generated string is 6 + """ + assert len(random_string(length=6)) == 6 + + def test_xml_generator(self) -> None: + """ + Given: + - A start datetime '2023-08-21T11:46:02Z', an end datetime '2023-08-21T11:47:02Z', and a count 1 + + When: + - Generating an XML response containing Workday sign-on events + + Then: + - Ensure that the XML response contains exactly one Workday sign-on event + """ + xml_response = xml_generator("2023-08-21T11:46:02Z", "2023-08-21T11:47:02Z", 1) + assert xml_response.count("") == 1 + + +class TestMockWorkdayEndpoint(unittest.TestCase): + def setUp(self): + self.app = app.test_client() + self.app.testing = True + + @patch("WorkdaySignonEventGenerator.Response") + def test_mock_workday_endpoint(self, MockResponse): + mock_post_data = """2023-08-21T11:46:02Z + 2023-08-21T11:47:02Z + 2""" + with self.app as c, c.post("/", data=mock_post_data): + mock_workday_endpoint() + + MockResponse.assert_called() + + +class TestModuleOfTesting(unittest.TestCase): + @patch("WorkdaySignonEventGenerator.demisto.results") + @patch("WorkdaySignonEventGenerator.return_error") + @patch("WorkdaySignonEventGenerator.xml_generator") + def test_module_of_testing(self, MockXmlGenerator, MockReturnError, MockResults): + MockXmlGenerator.return_value = "some response" + + # Test for valid input + module_of_testing(True, 5000) + MockResults.assert_called_with("ok") + + # Test for invalid input + try: + module_of_testing(False, None) + except DemistoException as e: + assert ( + str(e) + == "Please make sure the long running port is filled and the long running checkbox is marked." + ) + else: + raise AssertionError("Expected DemistoException but did not get one") + + +class TestMainTestingFunction(unittest.TestCase): + @patch("WorkdaySignonEventGenerator.demisto") + def test_main_function_test_module(self, MockDemisto): + MockDemisto.params.return_value = { + "longRunningPort": "5000", + "longRunning": True, + } + MockDemisto.command.return_value = "test-module" + + with patch( + "WorkdaySignonEventGenerator.module_of_testing" + ) as MockModuleTesting: + main() + MockModuleTesting.assert_called_with( + longrunning_port=5000, is_longrunning=True + ) + + +if __name__ == "__main__": + unittest.main() diff --git a/Packs/Workday/ModelingRules/WorkdayEventCollector/WorkdayEventCollector.xif b/Packs/Workday/ModelingRules/WorkdayEventCollector/WorkdayEventCollector.xif index e29850d0b324..83ee952ba809 100644 --- a/Packs/Workday/ModelingRules/WorkdayEventCollector/WorkdayEventCollector.xif +++ b/Packs/Workday/ModelingRules/WorkdayEventCollector/WorkdayEventCollector.xif @@ -15,4 +15,64 @@ alter | alter xdm.source.user.identifier = json_extract_scalar(`target`, "$.id"), xdm.target.host.device_category = json_extract_scalar(`target`, "$.descriptor"), - xdm.target.url = json_extract_scalar(`target`, "$.href"); \ No newline at end of file + xdm.target.url = json_extract_scalar(`target`, "$.href"); + +[MODEL: dataset=workday_signon_raw] +alter + // define util constants + boolean_true = to_boolean("TRUE"), + boolean_false = to_boolean("FALSE"), + + // add labels for enriching event description according to the boolean flags + sigon_successful_label = if(to_integer(Successful) = 1, "Signon was successful.", to_integer(Successful) = 0, "Signon was unsuccessful."), + account_disabled_or_expired_label = if(to_integer(Account_Disabled_or_Expired) = 1, "Account is disabled or expired."), + device_trusted_label = if(to_integer(Device_is_Trusted) = 1, "Sign on is from a trusted device."), + failed_signon_label = if(to_integer(Failed_Signon) = 1, "An invalid password was supplied for the Signon attempt."), + invalid_credentials_label = if(to_integer(Invalid_Credentials) = 1, "User provided invalid credentials."), + invalid_auth_channel_label = if(to_integer(Invalid_for_Authentication_Channel) = 1, "Invalid for authentication channel."), + invalid_auth_policy_label = if(to_integer(Invalid_for_Authentication_Policy) = 1, "Invalid for authentication policy."), + mfa_required_label = if(to_integer(Requires_MFA) = 1, "MFA is required."), + mfa_has_grace_label = if(to_integer(Has_Grace_Period_for_MFA) = 1, "MFA has a grace period."), + mfa_auth_exempt_label = if(to_integer(MFA_Authentication_Exempt) = 1, "MFA authentication is exempted."), + mfa_enrollment_label = if(to_integer(MFA_Enrollment) = 1, "User is enrolled in MFA."), + password_change_required_label = if(to_integer(Required_Password_Change) = 1, "Password change required."), + password_reset_label = if(to_integer(Forgotten_Password_Reset_Request) = 1, "A request was made to reset the password in the Signon attempt."), + password_changed_label = if(to_integer(Password_Changed) = 1, "The password was changed after the signon."), + read_only_label = if(to_integer(Tenant_Access_Read_Only) = 1, "Read only Access is enabled for the signon.") +| alter + // init useful flags & extract nested json properties + device_type_reference_id = Device_Type_Reference -> ID, + is_account_disabled = if(to_integer(Account_Disabled_or_Expired) = 1, boolean_true, to_integer(Account_Disabled_or_Expired) = 0, boolean_false), + is_mfa_needed = if(to_integer(Requires_MFA) = 1, boolean_true, to_integer(Requires_MFA) = 0, boolean_false), + is_password_change_required = if(to_integer(Required_Password_Change) = 1, boolean_true, to_integer(Required_Password_Change) = 0, boolean_false), + is_sign_on_successful = if(to_integer(Successful) = 1, boolean_true, to_integer(Successful) = 0, boolean_false), + mfa_authentication_type_id = Multi_Factor_Authentication_Type_Reference -> ID, + os = lowercase(Operating_System), + saml_identity_provider_id = SAML_Identity_Provider_Reference -> ID, + src_ipv4 = if(Signon_IP_Address ~= "\.", Signon_IP_Address), + src_ipv6 = if(Signon_IP_Address ~= ":", Signon_IP_Address), + event_labels = arraycreate(sigon_successful_label, account_disabled_or_expired_label, device_trusted_label, failed_signon_label, invalid_credentials_label, invalid_auth_channel_label, invalid_auth_policy_label, mfa_required_label, mfa_has_grace_label, mfa_auth_exempt_label, mfa_enrollment_label, password_change_required_label, password_reset_label, password_changed_label, read_only_label) +| alter + // map fields + xdm.auth.auth_method = Authentication_Type, + xdm.auth.is_mfa_needed = is_mfa_needed, + xdm.auth.mfa.method = mfa_authentication_type_id, + xdm.auth.mfa.provider = if(to_integer(MFA_Enrollment) = 1, saml_identity_provider_id), + xdm.event.type = if(is_sign_on_successful, "Successful Signon", is_sign_on_successful = false, "Signon Failure", "Signon"), + xdm.event.description = arraystring(event_labels, " "), + xdm.event.outcome = if(is_sign_on_successful, XDM_CONST.OUTCOME_SUCCESS, is_sign_on_successful = boolean_false, XDM_CONST.OUTCOME_FAILED, XDM_CONST.OUTCOME_UNKNOWN), + xdm.event.outcome_reason = Authentication_Failure_Message, + xdm.logon.type = Authentication_Channel, + xdm.network.session_id = Short_Session_ID, + xdm.network.tls.protocol_version = TLS_Version, + xdm.observer.unique_identifier = API_Client_ID, + xdm.source.host.device_id = device_type_reference_id, + xdm.source.host.os = Operating_System, + xdm.source.host.os_family = if(os contains "windows", XDM_CONST.OS_FAMILY_WINDOWS, os contains "mac", XDM_CONST.OS_FAMILY_MACOS, os contains "linux", XDM_CONST.OS_FAMILY_LINUX, os contains "android", XDM_CONST.OS_FAMILY_ANDROID, os contains "ios", XDM_CONST.OS_FAMILY_IOS, os contains "ubuntu", XDM_CONST.OS_FAMILY_UBUNTU, os contains "debian", XDM_CONST.OS_FAMILY_DEBIAN, os contains "fedora", XDM_CONST.OS_FAMILY_FEDORA, os contains "centos", XDM_CONST.OS_FAMILY_CENTOS, os contains "chrome", XDM_CONST.OS_FAMILY_CHROMEOS, os contains "solaris", XDM_CONST.OS_FAMILY_SOLARIS, os contains "scada", XDM_CONST.OS_FAMILY_SCADA, Operating_System), + xdm.source.ipv4 = src_ipv4, + xdm.source.ipv6 = src_ipv6, + xdm.source.user_agent = Browser_Type, + xdm.source.user.is_disabled = is_account_disabled, + xdm.source.user.is_password_expired = is_password_change_required, + xdm.source.user.username = to_string(User_Name), + xdm.source.zone = Location; \ No newline at end of file diff --git a/Packs/Workday/ModelingRules/WorkdayEventCollector/WorkdayEventCollector.yml b/Packs/Workday/ModelingRules/WorkdayEventCollector/WorkdayEventCollector.yml index d1a92a50a454..7d4539826df3 100644 --- a/Packs/Workday/ModelingRules/WorkdayEventCollector/WorkdayEventCollector.yml +++ b/Packs/Workday/ModelingRules/WorkdayEventCollector/WorkdayEventCollector.yml @@ -1,5 +1,5 @@ -fromversion: 8.2.0 -id: workday_workday_modeling_rule +fromversion: 8.3.0 +id: Workday_Workday_ModelingRule name: Workday Modeling Rule rules: '' schema: '' diff --git a/Packs/Workday/ModelingRules/WorkdayEventCollector/WorkdayEventCollector_schema.json b/Packs/Workday/ModelingRules/WorkdayEventCollector/WorkdayEventCollector_schema.json index a62eec04cb26..80a8291efc11 100644 --- a/Packs/Workday/ModelingRules/WorkdayEventCollector/WorkdayEventCollector_schema.json +++ b/Packs/Workday/ModelingRules/WorkdayEventCollector/WorkdayEventCollector_schema.json @@ -36,5 +36,139 @@ "type": "string", "is_array": false } - } + }, + + "workday_signon_raw": { + "access_restriction_reference": { + "type": "string", + " is_array": false + }, + "account_disabled_or_expired": { + "type": "int", + " is_array": false + }, + "api_client_id": { + "type": "string", + " is_array": false + }, + "authentication_channel": { + "type": "string", + " is_array": false + }, + "authentication_failure_message": { + "type": "string", + " is_array": false + }, + "authentication_type": { + "type": "string", + " is_array": false + }, + "browser_type": { + "type": "string", + " is_array": false + }, + "device_is_trusted": { + "type": "int", + " is_array": false + }, + "device_type_reference": { + "type": "string", + " is_array": false + }, + "failed_signon": { + "type": "int", + " is_array": false + }, + "forgotten_password_reset_request": { + "type": "int", + " is_array": false + }, + "has_grace_period_for_mfa": { + "type": "int", + " is_array": false + }, + "invalid_for_authentication_channel": { + "type": "int", + " is_array": false + }, + "invalid_for_authentication_policy": { + "type": "int", + " is_array": false + }, + "invalid_credentials": { + "type": "int", + " is_array": false + }, + "location": { + "type": "string", + " is_array": false + }, + "mfa_enrollment": { + "type": "int", + " is_array": false + }, + "mfa_authentication_exempt": { + "type": "int", + " is_array": false + }, + + "multi_factor_authentication_type_reference": { + "type": "string", + " is_array": false + }, + "operating_system": { + "type": "string", + " is_array": false + }, + "password_changed": { + "type": "int", + " is_array": false + }, + "required_password_change": { + "type": "int", + " is_array": false + }, + "requires_mfa": { + "type": "int", + " is_array": false + }, + "saml_identity_provider_reference": { + "type": "string", + " is_array": false + }, + "short_session_id": { + "type": "string", + " is_array": false + }, + "signon_datetime": { + "type": "datetime", + " is_array": false + }, + "signoff_datetime": { + "type": "datetime", + " is_array": false + }, + "signon_ip_address": { + "type": "string", + " is_array": false + }, + "successful": { + "type": "int", + " is_array": false + }, + "tenant_access_read_only": { + "type": "int", + " is_array": false + }, + "tls_version": { + "type": "string", + " is_array": false + }, + "user_name": { + "type": "string", + " is_array": false + } + } + + } \ No newline at end of file diff --git a/Packs/Workday/README.md b/Packs/Workday/README.md index e9de4e400b56..1e9adbd722fb 100644 --- a/Packs/Workday/README.md +++ b/Packs/Workday/README.md @@ -1,2 +1,12 @@ -Note: In order to parse the timestamp correctly, make sure that the "requestTime" field is in UTC time zone (timestamp ends with "Z"). -The supported time format is YYYY-MM-DDTHH:MM:SS.E3Z%z (2023-07-15T07:00:00.000Z). \ No newline at end of file +<~XSIAM> + +This pack supports collection and modeling of the following event types: +- *User activity* audit log entries. +- *Sign-on* events. + +Note: Regarding the *user activity* audit log entries, +in order to parse the timestamp correctly, +make sure that the "requestTime" field is in UTC time zone (timestamp ends with "Z"). +The supported time format is *YYYY-MM-DDTHH:MM:SS.E3Z%z* (e.g, *2023-09-05T14:00:00.123Z*). + + \ No newline at end of file diff --git a/Packs/Workday/ReleaseNotes/1_4_0.md b/Packs/Workday/ReleaseNotes/1_4_0.md new file mode 100644 index 000000000000..af0c80de1a7d --- /dev/null +++ b/Packs/Workday/ReleaseNotes/1_4_0.md @@ -0,0 +1,17 @@ + +#### Integrations + +##### New: Workday Sign On Event Collector + +New: Use the Workday Sign On Event Collector integration to get sign on logs from Workday (Available from Cortex XSIAM 8.2.0). + +##### New: Workday Signon Event Generator (Beta) + +New: Generates mock sign on events for Workday Signon Event Collector. Use these for testing and development. (Available from Cortex XSIAM 8.3.0). + +#### Modeling Rules + +##### Workday Modeling Rule + +Added support for modeling sign on events (Available from Cortex XSIAM 8.3.0). + diff --git a/Packs/Workday/pack_metadata.json b/Packs/Workday/pack_metadata.json index 21d564790b38..f42484049282 100644 --- a/Packs/Workday/pack_metadata.json +++ b/Packs/Workday/pack_metadata.json @@ -2,7 +2,7 @@ "name": "Workday", "description": "Workday offers enterprise-level software solutions for financial management, human resources, and planning.", "support": "xsoar", - "currentVersion": "1.3.9", + "currentVersion": "1.4.0", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "",