?Tjk@h;S+I8OZxUn`g6qUFwtI%WAHZJj
zk~s3gcuiLhK7N{*`v
z|32~o9=h>)qvh}Nc;28TSEpn$1p!`_aLIkq&sC`NR-Yrgu;04a9m}&{I^sUf)}o)9
zCOgAz|B5afd~G7oi=CCQUe%1YRVKN9T9!+`UM?bk+}C{v
zxZ{~!uWwU6)w>YnrDFc|Q$)zilsF^kK%&giOcT^KF;p
zG@LIAx>{tVEjymI2~2DS>H2h_5czT-ceTkW_7bF*WgY&mLfad3-1cb6OhQ~D0#Ik$
zxZ6!3F@uiB3^2=0Q^h&jWTCy%?4he>ww)fc-H@vnc^Aw~nXb+LeY)!`*i(LG%bv17I$D^8X;5X7~%#$Bt|fo*aO+mqSqtAQMVe9L8Bb(cNQ
z(s;}ONfKXjatFoX57&6r0+&Xiw8#zVu`imMoAc7V9F`Srdl%2O3wI2WQGNwruGGUh
zuCpMoXI|lM-j=^gJW{_X8B-CfuGWMK0P7nfxN9BLe@#z;x-CJjzLw*gFF!P^rsA5H
z6iebUuk~2$4%jta_U|??=v4?^+xix$v2*vKjh*|;M1PoRs1D2vrg7K2uD&5SXlP-<
zzKPa~iw#r-i!Hc4np>PB2e5scoNXYJJ0O4a1PxymT`UpZG+znY{(iVI3u0l;kX$ji
zLnbbb@?O}YZR)a{`zKsGKVJ-*<)jr+&hzizso42O?HlE5435K3tBlkiAzZAkn!g~?
zaj$uRNiJ6LbssGEQeG6ezW$X(2H-u9oj)Sy1rJNp?<0VrrHaxv0=V7_TIIw)MBHOo
z!xi4D
z9KUnae8ARABCeeTNL?<3s*h_o`|e$sGaT0SfCB7=bL@;tfSV(89{#OG%*4l?KU4wK
ztQ-J)!8r=B=RcGMup`#`e{MQ<#GK~l;mW73QDZ+R<62`#l!fB36**^8mX@d6vOrFc
z$s%{iG+DsY%`uhNHV4WSRQa&Rf@=V5fT}GV1@@ZC-$Ij5P`z3(U33kABk+@+Zzw0n
z&E@@c?ftW4OsdvigKBR?Mzbi{o;{|rEhzvVyup9d@IuF4Z);v&zKl-`Nh!5<@_?f|
z_L1YS+&lhm0nD~wu7k%~VI`&ZNR9!o!^bY#d)EmIcXds9&!28AILro^%9l|1*Zt6c
z{qPALdkOx=W6~Zei_-nsa;`yEBcIgpJjgG=l@82d!zAz$!I5c*n{DRRQKk(_!D+JY-}ygg(lz)o4l`Fmjg
zN@mzn>Wu1&!B@3iW1kbEf_sOaXTo*h&0mf6{vzXlM?P2AdH!$N2SGoQ0yq}3?o0m6
z*}8K^srx7&o#Fcjr11l56|9NuKEd~oOG(X1A4h2$aEnaxF
zGNcx9>0a6vTX66bIbjJ76yAgWjd=b8;D#sHz{Z!!c%8qRTCRN-Ambr|E}3)#`)`+$
z|1NDOl{!8l%y=hUuYyo(Rf=4!0>HuNA@Dwo+oFmFFH&D>A6W+r!M?Mk(4l3B^
zm$PW_gK$6(WQ4w6O2KEe-FdRul7iXFYq0~5e|9{dB@uxdyEu}D-#{MzzlM*sAwu&4
zs~}!@g6>gT$bC+aL$W51;={A=elDPsjaakifw@q1DT@
zEtv#S>r^X)?x79-esQ~I+@LY-BZV#-Le0}%lPD~!nF_EMY*PNb;PLY9RDz6~&9LC$
z*KP;^x!x+-hWes^ur@M7F3%SsVDx4bVllkU&5f<3VZnPGFW6%h<8wv-@r!mgJT7Cu
zzV=i~#&ybymYnQ&R_p~As^=&KTi{uZ0#GqPb*11-2BWKZt_!tKm9tiCc2?qb9tw~*
z>g^A2Y8=r8(W(im7aW4mw-QLiuG7k67Q7F0pCZ%P_{ZAjtXJnFwwp@_U)!9fJIf(4
zG^jg-h8{bAgtm<`K-L#JzF-t!8P{*sK3b#3%;ja*T2Us!D>U82@w3>b9tN2&*6FJ+
zb&S;EN!MlpP?yaR0zu{(gLRx$0;{w+#^T90n2QeQqOH;)J|C`Lrg0JX9;bcw9Emck
ziAfq{_Lz+I->!YJyuuHI$H-dgJUd@4SiO;v(PnW*=+>@E&iorYch|T$Ni{Ei#ieo4
zEcztBBi>y^uyRbwEFmU+OWhF#Q}REjjn4XL>2!w#hpyg`u{K(bbMeJ+zQ$uO*s=o^
zhGc=RFSM5dF0?mlUuL;r09Fg;S&YKXLOeW8-J%z)5wBf;N)vj-Om~m4hF(=R1WJ`xio#vn3SLT1b{#o!udc6pXkjx50HvglHv)hawvT+fwDF->fA&CX~D6ED(p
z0A7{=xOr;;|I_3R;e6#QP9UxFl!aq>i2-h(7z14NkJG+1zzjpx0A_eIS#Ylcsq-Dc
zW)p)tT-r>Pv63pvGan$KkFF3qT!H;_<5S>vM&%o*sODTlJbR3UrVAtnDK>IxZ$
z2`J-=YFP*3uW$ds3qsV*HAy&2!J;dn5_myFJjS{H!h!?U+D%3~&GcL1Tnn~(4-1lrrfGf(RWF5?X
z7~71e$Sz?hELuC=A#ddpSCmRrdb)2Yv?-Hp9h~$A3~R<)-0i|lOniUL97kml@Vv%F
zr*EuC#KQ2gLB~=?jn;@@ICUkzP+tm82L=<%*PzuvwpxHm-=zR@#qcq>pgXBG>UI)i
zWb_q#3a{S0phq)q27IDmS%}023SnN1X&rLx$
zD8P?b++?}=tRx7a#?0{d0P)ZrJpa8?Xg^Ar32Qwc_L)R31u)5F_*_Thbm(}wD<0F}
z?}xBC2|RQF;Hm1yNmcKwofh-9h6kVk$e1~E0c`J?hZRGc;^DCxl9-+
zT-JcJdRjH-8euQhQjL_psw}mSRDqp*E616vvPKqdUGx)j%I%E7M)4|c?Sk7CKq?h4
zcwB$ddWyj%$``!`$C6*;%~s@#Nc4CYjiik)Omuu)wm6W3QNpWPw-3+FD@G`|-XS}s
zX2L~E0-Tf~Wrl3p-fo>7w>S828d3@-AWt|Y@3l7#jc*650jy+`2rR^l0X$cz3wFXu
zz9kvse+}+@0W9PaEpgmIv+C1|TO7D(FSwAIH)6}x{4A^s9VO?-SEU_$F|on;KuX~U
zOkb^}LQA))Io=t&kf3qjuGiS{R;+h1FJAIY6rHC@gIoxY;>rnIK0#lL6dj<&f}OfR_Egtp+pTl;K}gWA6|1c6(Oeb
zn0h8yd~;>Wsa0`)l2(X8dex8t7XUK%(WEl{=Ii-+uR$816k9KoS94JkTqcd7{$Sp^
s=I7xB25BIZwPM6nS6}P*7Y^?K0sibwW-e79uK)l507*qoM6N<$g0wkMMF0Q*
literal 0
HcmV?d00001
diff --git a/Packs/Workday/Integrations/WorkdaySignOnEventCollector/WorkdaySignOnEventCollector_test.py b/Packs/Workday/Integrations/WorkdaySignOnEventCollector/WorkdaySignOnEventCollector_test.py
new file mode 100644
index 000000000000..95821d56184d
--- /dev/null
+++ b/Packs/Workday/Integrations/WorkdaySignOnEventCollector/WorkdaySignOnEventCollector_test.py
@@ -0,0 +1,629 @@
+import json
+import unittest
+from typing import Any
+from unittest.mock import patch
+from freezegun import freeze_time
+
+from CommonServerPython import DemistoException
+from WorkdaySignOnEventCollector import (
+ get_from_time,
+ fletcher16,
+ generate_pseudo_id,
+ convert_to_json,
+ Client,
+ fetch_sign_on_logs,
+ get_sign_on_events_command,
+ fetch_sign_on_events_command,
+ process_and_filter_events,
+ main,
+ VENDOR,
+ PRODUCT,
+)
+
+
+def test_get_from_time() -> None:
+ """
+ Given:
+ - A time duration in seconds (3600 seconds or 1 hour ago).
+
+ When:
+ - The function `get_from_time` is called to convert this duration to a UTC datetime string.
+
+ Then:
+ - Ensure that the returned value is a string.
+ - Validate that the string ends with 'Z', indicating it's in UTC format.
+ """
+ # Given: A time duration of 3600 seconds (or 1 hour) ago.
+ seconds_ago = 3600 # 1 hour ago
+
+ # When: Calling the function to convert this to a UTC datetime string.
+ result: Any = get_from_time(seconds_ago)
+
+ # Then: Validate the type and format of the returned value.
+ assert isinstance(result, str)
+ assert result.endswith("Z") # Check if it's in the right format
+
+
+def test_fletcher16() -> None:
+ """
+ Given:
+ - Two types of byte strings, one containing the word 'test' and another being empty.
+
+ When:
+ - The function `fletcher16` is called to calculate the checksum for these byte strings.
+
+ Then:
+ - Ensure that the checksum calculated for the byte string 'test' matches the expected value of 22976.
+ - Validate that the checksum for an empty byte string is 0.
+ """
+ # Given: A byte string containing the word 'test'.
+ data = b"test"
+
+ # When: Calling `fletcher16` to calculate the checksum.
+ result: Any = fletcher16(data)
+
+ # Then: Validate that the checksum matches the expected value.
+ expected = 22976
+ assert result == expected
+
+ # Given: An empty byte string.
+ data = b""
+
+ # When: Calling `fletcher16` to calculate the checksum.
+ result = fletcher16(data)
+
+ # Then: Validate that the checksum for an empty byte string is 0.
+ expected = 0
+ assert result == expected
+
+
+def test_generate_pseudo_id() -> None:
+ """
+ Given:
+ - Four different event dictionaries:
+ 1. A valid event dictionary with known values.
+ 2. An empty event dictionary.
+ 3. An event dictionary missing the "Signon_DateTime" key.
+ 4. A large event dictionary.
+
+ When:
+ - Calling `generate_pseudo_id` to calculate a unique ID based on the event dictionary.
+
+ Then:
+ - For the first case, ensure that the unique ID matches the expected value.
+ - For the second and third cases, ensure that an exception is raised.
+ - For the fourth case, ensure the function can handle large dictionaries without errors.
+ """
+
+ # Given: A valid event dictionary with known values.
+ event1 = {
+ "Short_Session_ID": "12345",
+ "User_Name": "ABC123",
+ "Successful": 1,
+ "Signon_DateTime": "2023-09-04T07:47:57.460-07:00",
+ }
+ # When: Calling `generate_pseudo_id` to calculate the unique ID.
+ event1_str: str = json.dumps(event1, sort_keys=True)
+ expected_checksum1: Any = fletcher16(event1_str.encode())
+ expected_unique_id1: str = f"{expected_checksum1}_{event1['Signon_DateTime']}"
+ result1: str = generate_pseudo_id(event1)
+ # Then: Validate that the unique ID matches the expected value.
+ assert result1 == expected_unique_id1
+
+ # Given: An empty event dictionary.
+ event2 = {}
+ # When & Then: Calling `generate_pseudo_id` and expecting an exception.
+ try:
+ generate_pseudo_id(event2)
+ except DemistoException as e:
+ assert (
+ str(e)
+ == "While calculating the pseudo ID for an event, an event without a Signon_DateTime was "
+ "found.\nError: 'Signon_DateTime'"
+ )
+ else:
+ raise AssertionError("Expected DemistoException but did not get one")
+
+ # Given: An event dictionary missing the "Signon_DateTime" key.
+ event3 = {
+ "Short_Session_ID": "12345",
+ "User_Name": "ABC123",
+ "Successful": 1,
+ }
+ # When & Then: Calling `generate_pseudo_id` and expecting an exception.
+ try:
+ generate_pseudo_id(event3)
+ except DemistoException:
+ pass
+ else:
+ raise AssertionError("Expected DemistoException but did not get one")
+
+ # Given: A large event dictionary.
+ event4 = {str(i): i for i in range(10000)} # Create a large dictionary
+ event4["Signon_DateTime"] = "2023-09-04T07:47:57.460-07:00" # Add a Signon_DateTime key
+ # When & Then: Calling `generate_pseudo_id` to check if the function can handle it.
+ assert generate_pseudo_id(event4)
+
+
+def test_process_and_filter_events() -> None:
+ """
+ Given:
+ - A list of two valid sign-on events that differ by 1 second in their "Signon_DateTime".
+ - An initial time ("from_time") that matches the "Signon_DateTime" of one of the events.
+ - An empty set of pseudo_ids from the previous run.
+
+ When:
+ - Calling the `process_and_filter_events` function to filter out duplicates and process events for the next
+ iteration.
+
+ Then:
+ - The list of non-duplicate events should match the original list of events.
+ - The set of pseudo_ids for the next iteration should contain two elements.
+ - Each event in the list of non-duplicates should have an additional "_time" key that matches its
+ "Signon_DateTime".
+ """
+
+ # Given: A list of two valid sign-on events and other initial conditions
+ events = [
+ {
+ "Short_Session_ID": "12345",
+ "User_Name": "ABC6789",
+ "Successful": 1,
+ "Signon_DateTime": "2023-09-04T07:47:57.460-07:00",
+ },
+ {
+ "Short_Session_ID": "12346",
+ "User_Name": "ABC6790",
+ "Successful": 1,
+ "Signon_DateTime": "2023-09-04T07:47:57.460-07:00",
+ },
+ ]
+ from_time: str = "2021-09-01T12:00:00Z"
+ previous_run_pseudo_ids: set[
+ Any
+ ] = set() # Assume no previous checksums for simplicity
+
+ # When: Calling the function to test
+ non_duplicates, pseudo_ids_for_next_iteration = process_and_filter_events(
+ events, from_time, previous_run_pseudo_ids
+ )
+
+ # Then: Validate the function's output
+ assert (
+ non_duplicates == events
+ ) # Check if the list of non-duplicates is as expected
+ assert (
+ len(pseudo_ids_for_next_iteration) == 2
+ ) # Check if the set of pseudo_ids for next iteration is updated
+
+ # Check if '_time' key is added to each event
+ for event in non_duplicates:
+ assert "_time" in event
+ assert event["_time"] == event["Signon_DateTime"]
+
+
+def test_convert_to_json() -> None:
+ """
+ Given:
+ - A sample XML response string containing a single 'Workday_Account_Signon' entry with a 'Signon_DateTime'.
+
+ When:
+ - Calling the 'convert_to_json' function to convert the XML data to a Python dictionary.
+
+ Then:
+ - The function should return two Python dictionaries.
+ - The first dictionary should represent the entire XML structure.
+ - The second dictionary should contain just the 'Workday_Account_Signon' entries.
+ - Both dictionaries should correctly reflect the 'Signon_DateTime' from the original XML.
+ """
+
+ # Given: Test with XML data (this is a simplified version for the sake of the test)
+ xml_response = """
+
+
+
+
+
+ 2023-09-04T07:47:57.460-07:00
+
+
+
+
+
+ """
+
+ # When: Calling the function to test
+ raw_json_response, account_signon_data = convert_to_json(xml_response)
+
+ # Then: Check if the converted data matches the expected structure
+ assert (
+ raw_json_response["Envelope"]["Body"]["Get_Workday_Account_Signons_Response"][
+ "Response_Data"
+ ]["Workday_Account_Signon"][0]["Signon_DateTime"]
+ == "2023-09-04T07:47:57.460-07:00"
+ )
+
+ assert (
+ account_signon_data["Workday_Account_Signon"][0]["Signon_DateTime"]
+ == "2023-09-04T07:47:57.460-07:00"
+ )
+
+
+def test_generate_workday_account_signons_body() -> None:
+ """
+ Given:
+ - A Client object initialized with a base URL, verification settings, a tenant name, and login credentials.
+ - Parameters specifying the page, count, and time range for fetching Workday sign-on events.
+
+ When:
+ - Calling the 'generate_workday_account_signons_body' method on the Client object to generate the SOAP request body.
+
+ Then:
+ - The returned SOAP request body should contain all the specified parameters.
+ - The body should also contain the username and password for authentication.
+ """
+
+ # Given: Initialize a Client object with sample data
+ client = Client(
+ base_url="",
+ verify_certificate=True,
+ proxy=False,
+ tenant_name="test_tenant",
+ username="test_user",
+ password="test_pass",
+ )
+
+ # When: Generate the SOAP request body
+ body = client.generate_workday_account_signons_body(
+ page=1,
+ count=10,
+ to_time="2021-09-01T12:00:00Z",
+ from_time="2021-09-01T11:00:00Z",
+ )
+
+ # Then: Verify that the SOAP request body contains all the specified parameters
+ assert "1" in body
+ assert "10" in body
+ assert "2021-09-01T11:00:00Z" in body
+ assert "2021-09-01T12:00:00Z" in body
+ assert "test_user" in body
+ assert (
+ 'test_pass' # noqa:E501
+ in body
+ )
+
+
+def test_generate_test_payload() -> None:
+ """
+ Given:
+ - A Client object initialized with a base URL, verification settings, a tenant name, and login credentials.
+ - Parameters specifying the time range for fetching Workday sign-on events for the test payload.
+
+ When:
+ - Calling the 'generate_test_payload' method on the Client object to generate a SOAP request payload for testing.
+
+ Then:
+ - The returned SOAP request payload should contain all the specified parameters.
+ - The payload should also contain the username and password for authentication.
+ """
+
+ # Given: Initialize a Client object with sample data
+ client = Client(
+ base_url="",
+ verify_certificate=True,
+ proxy=False,
+ tenant_name="test_tenant",
+ username="test_user",
+ password="test_pass",
+ )
+
+ # When: Generate the SOAP request payload for testing
+ payload = client.generate_test_payload(
+ from_time="2021-09-01T11:00:00Z", to_time="2021-09-01T12:00:00Z"
+ )
+
+ # Then: Verify that the SOAP request payload contains all the specified parameters
+ assert "1" in payload
+ assert "1" in payload
+ assert "2021-09-01T11:00:00Z" in payload
+ assert "2021-09-01T12:00:00Z" in payload
+ assert "test_user" in payload
+ assert (
+ 'test_pass' # noqa:E501
+ in payload
+ )
+
+
+def test_convert_to_json_valid_input() -> None:
+ """
+ Given:
+ - An XML-formatted response string from the Workday API, containing sign-on event data.
+
+ When:
+ - Calling the 'convert_to_json' function to convert the XML response to JSON format.
+
+ Then:
+ - The function should return two JSON objects: one containing the full JSON-converted data,
+ and another containing only the sign-on event data.
+ - Both JSON objects should be properly formatted and contain the expected data fields.
+ """
+
+ # Given: An XML-formatted response string from the Workday API
+ response = """
+
+
+
+
+
+ 2021-09-01T11:00:00Z
+
+
+
+
+
+ """
+
+ # When: Converting the XML to JSON
+ full_json, account_signon_data = convert_to_json(response)
+
+ # Then: Validate the full_json data structure
+ envelope = full_json.get("Envelope", {})
+ body = envelope.get("Body", {})
+ response = body.get("Get_Workday_Account_Signons_Response", {})
+ response_data = response.get("Response_Data", {})
+ workday_account_signons = response_data.get("Workday_Account_Signon", [])
+
+ # Assertions for full_json
+ assert isinstance(
+ workday_account_signons, list
+ ), "workday_account_signons is not a list"
+ assert workday_account_signons, "workday_account_signons is empty"
+ assert workday_account_signons[0].get("Signon_DateTime") == "2021-09-01T11:00:00Z"
+
+ # Then: Validate the account_signon_data structure
+ workday_account_signons_data = account_signon_data.get("Workday_Account_Signon", [])
+
+ # Assertions for account_signon_data
+ assert workday_account_signons_data
+ assert (
+ workday_account_signons_data[0].get("Signon_DateTime") == "2021-09-01T11:00:00Z"
+ )
+
+
+class TestFetchSignOnLogs(unittest.TestCase):
+ def setUp(self) -> None:
+ """
+ Given:
+ - A Client object with mock URL, tenant, username, and password.
+
+ When:
+ - Setting up each unit test case.
+
+ Then:
+ - The Client object should be initialized and ready for testing.
+ """
+ self.client = Client(
+ "mock_url",
+ False,
+ False,
+ "mock_tenant",
+ "mock_user",
+ "mock_pass",
+ )
+
+ @patch.object(Client, "retrieve_events")
+ def test_fetch_sign_on_logs_single_page(self, mock_retrieve_events) -> None:
+ """
+ Given:
+ - A mock Client object with a retrieve_events method that returns a sample response.
+ - The sample response contains a single Workday sign-on event.
+
+ When:
+ - Calling the fetch_sign_on_logs function to fetch sign-on logs.
+
+ Then:
+ - The function should return a list of events.
+ - The length of the list should be 1.
+ - The event in the list should have the User_Name "John".
+ """
+
+ # Given: Sample data to be returned by the mock
+ mock_response = (
+ {
+ "Workday_Account_Signon": [
+ {
+ "Signon_DateTime": "2021-09-01T11:00:00Z",
+ "User_Name": "John",
+ "Short_Session_ID": "123456",
+ "Successful": 1,
+ }
+ ]
+ },
+ 1,
+ )
+
+ # Setup: Configure the mock to return the sample data
+ mock_retrieve_events.return_value = mock_response
+
+ # When: Fetching sign-on logs
+ events = fetch_sign_on_logs(
+ self.client, 10, "2021-09-01T00:00:00Z", "2021-09-02T00:00:00Z"
+ )
+
+ # Then: Validate the function's return value
+ assert len(events) == 1
+ assert events[0]["User_Name"] == "John"
+
+
+class TestGetSignOnEventsCommand(unittest.TestCase):
+ def test_get_sign_on_events_command(self) -> None:
+ """
+ Given:
+ - A Client object with mock settings.
+ - A patch for the fetch_sign_on_logs function to return a mock event.
+ - The mock event has details such as Signon_DateTime, User_Name, Short_Session_ID, and Successful status.
+
+ When:
+ - Calling the get_sign_on_events_command function to get sign-on events between two date-time ranges.
+
+ Then:
+ - The function should return a list of events and results.
+ - The length of the list should be 1.
+ - The event in the list should have the User_Name "John" and _time "2021-09-01T11:00:00Z".
+ - The readable_output of the results should start with "### Sign On Events List:".
+ """
+
+ # Given: Sample data to be returned by the mock
+ mock_events = [
+ {
+ "Signon_DateTime": "2023-09-04T07:47:57.460-07:00",
+ "User_Name": "John",
+ "Short_Session_ID": "123456",
+ "Successful": 1,
+ "_time": "2021-09-01T11:00:00Z", # This is added by the process_events function
+ }
+ ]
+
+ # Setup: Use patch to mock the fetch_sign_on_logs function
+ with patch(
+ "WorkdaySignOnEventCollector.fetch_sign_on_logs", return_value=mock_events
+ ):
+ client = Client(
+ "mock_url",
+ False,
+ False,
+ "mock_tenant",
+ "mock_user",
+ "mock_pass",
+ )
+
+ # When: Calling the get_sign_on_events_command
+ events, results = get_sign_on_events_command(
+ client, "2021-09-01T00:00:00Z", "2021-09-02T00:00:00Z", 10
+ )
+
+ # Then: Validate the function's return value
+ assert len(events) == 1
+ assert events[0]["User_Name"] == "John"
+ assert events[0]["_time"] == "2023-09-04T07:47:57.460-07:00"
+ assert results.readable_output.startswith("### Sign On Events List:")
+
+
+@freeze_time("2023-09-04T00:00:00.000-07:00")
+def test_fetch_sign_on_events_command_single_page() -> None:
+ """
+ Given:
+ - A Client object with mock settings.
+ - A patch for the Client's retrieve_events method to return a mock event.
+ - A patch for demisto.getLastRun function to return a mock last_run dictionary.
+ - The mock event has details such as Signon_DateTime, User_Name, Short_Session_ID, and Successful status.
+ - The mock last_run dictionary contains last_fetch_time and previous_run_pseudo_ids.
+
+ When:
+ - Calling the fetch_sign_on_events_command function to fetch sign-on events.
+
+ Then:
+ - The function should return a list of events and a new_last_run dictionary.
+ - The length of the list should be 1.
+ - The event in the list should have the User_Name "John" and _time "2021-09-01T11:00:00Z".
+ - The new_last_run dictionary should have last_fetch_time updated to "2021-09-01T11:00:00Z".
+ """
+
+ # Given: Sample data to be returned by the mock
+ mock_events = [
+ {
+ "Signon_DateTime": "2023-09-04T07:47:57.460-07:00",
+ "User_Name": "John",
+ "Short_Session_ID": "123456",
+ "Successful": 1,
+ "_time": "2023-09-04T07:47:57.460-07:00", # This is added by the process_events function
+ }
+ ]
+
+ # Setup: Mock the client's retrieve_events method and demisto.getLastRun function
+ mock_retrieve_response = ({"Workday_Account_Signon": mock_events}, 1)
+ mock_last_run = {
+ "last_fetch_time": "2023-09-04T07:47:57.460-07:00",
+ "previous_run_pseudo_ids": set(),
+ }
+
+ # When: Calling the fetch_sign_on_events_command
+ with patch.object(
+ Client, "retrieve_events", return_value=mock_retrieve_response
+ ), patch("demistomock.getLastRun", return_value=mock_last_run):
+ client = Client(
+ "mock_url",
+ False,
+ False,
+ "mock_tenant",
+ "mock_user",
+ "mock_pass",
+ )
+ events, new_last_run = fetch_sign_on_events_command(client, 10, mock_last_run)
+
+ # Then: Validate the function's return value
+ assert len(events) == 1
+ assert events[0]["User_Name"] == "John"
+ assert events[0]["_time"] == "2023-09-04T07:47:57.460-07:00"
+ assert new_last_run["last_fetch_time"] == "2023-09-04T07:47:57.460-07:00"
+
+
+def test_main_fetch_events() -> None:
+ """
+ Given:
+ - A set of mock parameters for the client.
+ - Mock functions for demisto's getLastRun, setLastRun, and params.
+ - Mock for the fetch_sign_on_events_command function to return mock events and new last_run data.
+ - Mock for the send_events_to_xsiam function.
+
+ When:
+ - The main function is called and the command is 'fetch-events'.
+
+ Then:
+ - Ensure that fetch_sign_on_events_command is called with the correct arguments.
+ - Ensure that send_events_to_xsiam is called with the mock events.
+ - Ensure that setLastRun is called to update the last_run data.
+ """
+ # Given: Mock parameters and last run data
+ mock_params = {
+ "tenant_name": "TestTenant",
+ "max_fetch": "10000",
+ "base_url": "https://testurl.com",
+ "credentials": {"identifier": "TestUser", "password": "testpass"},
+ "insecure": True,
+ }
+
+ # Mocking demisto.command to return 'fetch-events'
+ with patch("demistomock.command", return_value="fetch-events"), patch(
+ "demistomock.getLastRun", return_value={"some": "data"}
+ ), patch("demistomock.setLastRun") as mock_set_last_run, patch(
+ "demistomock.params", return_value=mock_params
+ ), patch(
+ "WorkdaySignOnEventCollector.Client"
+ ) as mock_client, patch(
+ "WorkdaySignOnEventCollector.fetch_sign_on_events_command"
+ ) as mock_fetch_sign_on_events_command, patch(
+ "WorkdaySignOnEventCollector.send_events_to_xsiam"
+ ) as mock_send_events_to_xsiam:
+ # Mocking the output of fetch_sign_on_events_command
+ mock_events = [{"event": "data"}]
+ mock_new_last_run = {"new": "data"}
+ mock_fetch_sign_on_events_command.return_value = (
+ mock_events,
+ mock_new_last_run,
+ )
+
+ # When: Calling the main function
+ main()
+
+ # Then: Validate the function calls and arguments
+ mock_fetch_sign_on_events_command.assert_called_with(
+ client=mock_client.return_value,
+ max_fetch=10000,
+ last_run={"some": "data"},
+ )
+
+ mock_send_events_to_xsiam.assert_called_with(
+ mock_events, vendor=VENDOR, product=PRODUCT
+ )
+ mock_set_last_run.assert_called_with(mock_new_last_run)
diff --git a/Packs/Workday/Integrations/WorkdaySignOnEventCollector/command_examples b/Packs/Workday/Integrations/WorkdaySignOnEventCollector/command_examples
new file mode 100644
index 000000000000..1b69d3470859
--- /dev/null
+++ b/Packs/Workday/Integrations/WorkdaySignOnEventCollector/command_examples
@@ -0,0 +1 @@
+workday-get-sign-on-events should_push_events=false limit=1 from_date="2023-08-23T18:20:03Z" to_date="2023-08-23T18:20:08Z"
\ No newline at end of file
diff --git a/Packs/Workday/Integrations/WorkdaySignOnEventCollector/test_data/example_event.json b/Packs/Workday/Integrations/WorkdaySignOnEventCollector/test_data/example_event.json
new file mode 100644
index 000000000000..000fa21d00ae
--- /dev/null
+++ b/Packs/Workday/Integrations/WorkdaySignOnEventCollector/test_data/example_event.json
@@ -0,0 +1,39 @@
+{
+ "Workday_Account_Signon": {
+ "Signon_DateTime": "2023-08-08T23:04:01.788-07:00",
+ "User_Name": 123456,
+ "Successful": 1,
+ "Failed_Signon": 0,
+ "Invalid_Credentials": 0,
+ "Password_Changed": 0,
+ "Forgotten_Password_Reset_Request": 0,
+ "Signon_IP_Address": "Workday Internal",
+ "Signoff_DateTime": "2023-08-08T23:10:17.310-07:00",
+ "Authentication_Channel": "Web Services",
+ "Authentication_Type": "Trusted",
+ "Workday_Account_Reference": {
+ "ID": {
+ "WID": "1234567890qwertyuiop",
+ "System_User_ID": 123456,
+ "WorkdayUserName": 123456
+ }
+ },
+ "System_Account_Signon_Reference": {
+ "ID": "1234567890"
+ },
+ "Request_Originator_Reference": {
+ "ID": "1234567890qwertyuiop"
+ },
+ "Invalid_for_Authentication_Channel": 0,
+ "Invalid_for_Authentication_Policy": 0,
+ "Required_Password_Change": 0,
+ "Account_Disabled_or_Expired": 0,
+ "MFA_Authentication_Exempt": 0,
+ "Has_Grace_Period_for_MFA": 0,
+ "MFA_Enrollment": 0,
+ "Short_Session_ID": "abc123",
+ "Device_is_Trusted": 0,
+ "Tenant_Access_Read_Only": 0
+ }
+}
+
diff --git a/Packs/Workday/Integrations/WorkdaySignonEventGenerator/README.md b/Packs/Workday/Integrations/WorkdaySignonEventGenerator/README.md
new file mode 100644
index 000000000000..fe283ec6c71c
--- /dev/null
+++ b/Packs/Workday/Integrations/WorkdaySignonEventGenerator/README.md
@@ -0,0 +1,15 @@
+Generates mock signon events for Workday Signon Event Collector. Use these for testing and development.
+This integration was integrated and tested with version 37.0 of WorkdaySignonEventGenerator.
+
+## Configure Workday Signon Event Generator (Beta) on Cortex XSOAR
+
+1. Navigate to **Settings** > **Integrations** > **Servers & Services**.
+2. Search for Workday Signon Event Generator (Beta).
+3. Click **Add instance** to create and configure a new integration instance.
+
+ | **Parameter** | **Required** |
+ | --- | --- |
+ | Long running instance | False |
+ | Port mapping (<port> or <host port>:<docker port>) | True |
+
+4. Click **Test** to validate the URLs, token, and connection.
diff --git a/Packs/Workday/Integrations/WorkdaySignonEventGenerator/WorkdaySignonEventGenerator.py b/Packs/Workday/Integrations/WorkdaySignonEventGenerator/WorkdaySignonEventGenerator.py
new file mode 100644
index 000000000000..eba9da1fe808
--- /dev/null
+++ b/Packs/Workday/Integrations/WorkdaySignonEventGenerator/WorkdaySignonEventGenerator.py
@@ -0,0 +1,189 @@
+import random
+import string
+
+from gevent.pywsgi import WSGIServer
+from flask import Flask, request, Response
+from CommonServerPython import *
+
+import urllib3
+
+# Disable insecure warnings
+urllib3.disable_warnings()
+
+''' CONSTANTS '''
+APP: Flask = Flask('xsoar-workday-signon')
+DATE_FORMAT = '%Y-%m-%dT%H:%M:%SZ' # ISO8601 format with UTC, default in XSOAR
+
+SIGNON_ITEM_TEMPLATE = """
+
+ {signon_datetime}
+ {user_name}
+ 1
+ 0
+ 0
+ 0
+ 0
+ Workday Internal
+ Web Services
+ Trusted
+
+ dc28d59c523f1010e415d814cbd50002
+ 12345678
+ {user_name}
+
+
+ 4328$170406698
+
+
+ 02f60ab5ed5744c0afbc9cc5096d7a73
+
+ 0
+ 0
+ 0
+ 0
+ 0
+ 0
+ 0
+ {short_session_id}
+ 0
+ 0
+
+ """
+
+
+def generate_xml_template(from_date: str, to_date: str, count: int, total_responses: int):
+ return f"""
+
+
+
+
+ {from_date}
+ {to_date}
+
+
+ {from_date}
+ 1
+ {count}
+
+
+ {total_responses}
+ 1
+ {total_responses}
+ 1
+
+
+ %%workday_account_signon_items%%
+
+
+
+
+"""
+
+
+def random_datetime_in_range(start_str: str, end_str: str):
+ start_datetime = datetime.strptime(start_str, DATE_FORMAT)
+ end_datetime = datetime.strptime(end_str, DATE_FORMAT)
+
+ random_seconds = random.randint(0, int((end_datetime - start_datetime).total_seconds()))
+ return (start_datetime + timedelta(seconds=random_seconds)).strftime(DATE_FORMAT)
+
+
+def random_string(length: int = 10):
+ return ''.join(random.choices(string.ascii_uppercase + string.digits, k=length))
+
+
+def xml_generator(from_datetime: str, to_datetime: str, count: int):
+ # Generate randomized Signon_DateTime
+ random_signon_datetime = random_datetime_in_range(from_datetime, to_datetime)
+
+ # Determine the number of Workday_Account_Signon items
+ num_signon_items = random.randint(1, count)
+
+ template = generate_xml_template(from_date=from_datetime, to_date=to_datetime, total_responses=num_signon_items,
+ count=num_signon_items)
+
+ # Generate Workday_Account_Signon items
+ signon_items = []
+ for _ in range(num_signon_items):
+ signon_item = SIGNON_ITEM_TEMPLATE.format(
+ signon_datetime=random_signon_datetime,
+ user_name=random_string(),
+ short_session_id=random_string(length=6)
+ )
+ signon_items.append(signon_item)
+
+ # Insert the generated items into the main template
+ populated_template = template.replace("%%workday_account_signon_items%%", "\n".join(signon_items))
+
+ return populated_template
+
+
+@APP.route('/', methods=['POST'])
+def mock_workday_endpoint():
+ request_text = request.get_data(as_text=True)
+ demisto.info(f"{request_text}")
+
+ # Define regex patterns
+ from_datetime_pattern = r'(.*?)'
+ to_datetime_pattern = r'(.*?)'
+ count_pattern = r'(\d+)'
+
+ # Extract values using regex
+ from_datetime_match = re.search(from_datetime_pattern, request_text)
+ from_datetime = from_datetime_match.group(1) if from_datetime_match else "2023-08-23T18:20:03Z"
+
+ to_datetime_match = re.search(to_datetime_pattern, request_text)
+ to_datetime = to_datetime_match.group(1) if to_datetime_match else "2023-08-23T18:20:08Z"
+
+ count_match = re.search(count_pattern, request_text)
+ count = int(count_match.group(1)) if count_match else 1
+
+ # Use the extracted values to generate the response XML
+ response_xml = xml_generator(from_datetime, to_datetime, count)
+
+ # Return the generated XML
+ return Response(response_xml, mimetype='text/xml')
+
+
+def module_of_testing(is_longrunning: bool, longrunning_port: int):
+ if longrunning_port and is_longrunning:
+ xml_response = xml_generator('2023-08-21T11:46:02Z', '2023-08-21T11:47:02Z', 2)
+ if xml_response:
+ return_results('ok')
+ else:
+ raise DemistoException('Could not connect to the long running server. Please make sure everything is '
+ 'configured.')
+ else:
+ raise DemistoException('Please make sure the long running port is filled and the long running checkbox is '
+ 'marked.')
+
+
+''' MAIN FUNCTION '''
+
+
+def main():
+ command = demisto.command()
+ params = demisto.params()
+ port = int(params.get('longRunningPort', '5000'))
+ is_longrunning = params.get("longRunning")
+ try:
+ if command == 'test-module':
+ module_of_testing(longrunning_port=port, is_longrunning=is_longrunning)
+ elif command == 'long-running-execution':
+ while True:
+ server = WSGIServer(('0.0.0.0', port), APP)
+ server.serve_forever()
+ else:
+ raise NotImplementedError(f"command {command} is not implemented.")
+
+ # Log exceptions and return errors
+ except Exception as e:
+ return_error(
+ f"Failed to execute {demisto.command()} command.\nError:\n{str(e)}"
+ )
+
+
+''' ENTRY POINT '''
+
+if __name__ in ('__main__', '__builtin__', 'builtins'):
+ main()
diff --git a/Packs/Workday/Integrations/WorkdaySignonEventGenerator/WorkdaySignonEventGenerator.yml b/Packs/Workday/Integrations/WorkdaySignonEventGenerator/WorkdaySignonEventGenerator.yml
new file mode 100644
index 000000000000..ab55b598c955
--- /dev/null
+++ b/Packs/Workday/Integrations/WorkdaySignonEventGenerator/WorkdaySignonEventGenerator.yml
@@ -0,0 +1,31 @@
+category: Analytics & SIEM
+beta: true
+commonfields:
+ id: WorkdaySignonEventGenerator
+ version: -1
+configuration:
+- display: Long running instance
+ name: longRunning
+ type: 8
+ required: false
+- defaultvalue: '5000'
+ display: Port mapping ( or :)
+ name: longRunningPort
+ required: true
+ type: 0
+description: Generates mock sign on events for Workday Signon Event Collector. Use these for testing and development.
+display: Workday Signon Event Generator (Beta)
+name: WorkdaySignonEventGenerator
+system: true
+script:
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python3
+ dockerimage: demisto/teams:1.0.0.72377
+ longRunning: true
+ longRunningPort: true
+fromversion: 6.8.0
+toversion: 7.9.9
+tests:
+- No tests (auto formatted)
diff --git a/Packs/Workday/Integrations/WorkdaySignonEventGenerator/WorkdaySignonEventGenerator_description.md b/Packs/Workday/Integrations/WorkdaySignonEventGenerator/WorkdaySignonEventGenerator_description.md
new file mode 100644
index 000000000000..68892bac28ae
--- /dev/null
+++ b/Packs/Workday/Integrations/WorkdaySignonEventGenerator/WorkdaySignonEventGenerator_description.md
@@ -0,0 +1,5 @@
+## Event Generator Help
+
+Generates mock sign on events for the Workday Signon Event Collector. Use these for testing and development.
+
+Note: This is a beta Integration, which lets you implement and test pre-release software. Since the integration is beta, it might contain bugs. Updates to the integration during the beta phase might include non-backward compatible features. We appreciate your feedback on the quality and usability of the integration to help us identify issues, fix them, and continually improve.
\ No newline at end of file
diff --git a/Packs/Workday/Integrations/WorkdaySignonEventGenerator/WorkdaySignonEventGenerator_image.png b/Packs/Workday/Integrations/WorkdaySignonEventGenerator/WorkdaySignonEventGenerator_image.png
new file mode 100644
index 0000000000000000000000000000000000000000..1426dc48413218e3c859ace23ebd9f8ce3605796
GIT binary patch
literal 4818
zcmV;@5-shCP)mX?Vy%;3NPr}zE-|3CNKv0(B9UBlz6_0;WruX%`f0LHSBrdR?KC;2yG&weS`E8(}M&yA^0_C%WgewaumQjud7vV!QUqU-lo2DC4v$}
z(FGX~+Sw1#Hv{k|F>eJGVBIu1dhHB(y<406a6k8bkX}N9Rn_B93@iXS7{FYDzmNCM
zC>q
zw*bIx3Mv7eSq5|C%(9j8Gur#`JhP5g-72d9=)djvzU0`xnPt$q9@pjiXuBPAz7z8T
zi%#+~Nc|uRb9usQ)=ZN_0KlCDWRr2PTWEos&odz5`$hnA7vkz{D8LxTSAewNy0c`D
z=t9{knjypZm*3a29sP*UqcQL2n0F_h4Fs0!ab0}h%zdI+`uXlPEpk}Qn=f^P@wp#F
z0s5n)j-R_`$?kNS1d+lAAiZ?83Tm5d!S|cHTjYebE%MIvIxmEJTV!8&hjCc%CMplt
z=RVwTvjrNTFJE`Q?7{KsS$xCeAiV&J=ExBD8S(N?Tjk@h;S+I8OZxUn`g6qUFwtI%WAHZJj
zk~s3gcuiLhK7N{*`v
z|32~o9=h>)qvh}Nc;28TSEpn$1p!`_aLIkq&sC`NR-Yrgu;04a9m}&{I^sUf)}o)9
zCOgAz|B5afd~G7oi=CCQUe%1YRVKN9T9!+`UM?bk+}C{v
zxZ{~!uWwU6)w>YnrDFc|Q$)zilsF^kK%&giOcT^KF;p
zG@LIAx>{tVEjymI2~2DS>H2h_5czT-ceTkW_7bF*WgY&mLfad3-1cb6OhQ~D0#Ik$
zxZ6!3F@uiB3^2=0Q^h&jWTCy%?4he>ww)fc-H@vnc^Aw~nXb+LeY)!`*i(LG%bv17I$D^8X;5X7~%#$Bt|fo*aO+mqSqtAQMVe9L8Bb(cNQ
z(s;}ONfKXjatFoX57&6r0+&Xiw8#zVu`imMoAc7V9F`Srdl%2O3wI2WQGNwruGGUh
zuCpMoXI|lM-j=^gJW{_X8B-CfuGWMK0P7nfxN9BLe@#z;x-CJjzLw*gFF!P^rsA5H
z6iebUuk~2$4%jta_U|??=v4?^+xix$v2*vKjh*|;M1PoRs1D2vrg7K2uD&5SXlP-<
zzKPa~iw#r-i!Hc4np>PB2e5scoNXYJJ0O4a1PxymT`UpZG+znY{(iVI3u0l;kX$ji
zLnbbb@?O}YZR)a{`zKsGKVJ-*<)jr+&hzizso42O?HlE5435K3tBlkiAzZAkn!g~?
zaj$uRNiJ6LbssGEQeG6ezW$X(2H-u9oj)Sy1rJNp?<0VrrHaxv0=V7_TIIw)MBHOo
z!xi4D
z9KUnae8ARABCeeTNL?<3s*h_o`|e$sGaT0SfCB7=bL@;tfSV(89{#OG%*4l?KU4wK
ztQ-J)!8r=B=RcGMup`#`e{MQ<#GK~l;mW73QDZ+R<62`#l!fB36**^8mX@d6vOrFc
z$s%{iG+DsY%`uhNHV4WSRQa&Rf@=V5fT}GV1@@ZC-$Ij5P`z3(U33kABk+@+Zzw0n
z&E@@c?ftW4OsdvigKBR?Mzbi{o;{|rEhzvVyup9d@IuF4Z);v&zKl-`Nh!5<@_?f|
z_L1YS+&lhm0nD~wu7k%~VI`&ZNR9!o!^bY#d)EmIcXds9&!28AILro^%9l|1*Zt6c
z{qPALdkOx=W6~Zei_-nsa;`yEBcIgpJjgG=l@82d!zAz$!I5c*n{DRRQKk(_!D+JY-}ygg(lz)o4l`Fmjg
zN@mzn>Wu1&!B@3iW1kbEf_sOaXTo*h&0mf6{vzXlM?P2AdH!$N2SGoQ0yq}3?o0m6
z*}8K^srx7&o#Fcjr11l56|9NuKEd~oOG(X1A4h2$aEnaxF
zGNcx9>0a6vTX66bIbjJ76yAgWjd=b8;D#sHz{Z!!c%8qRTCRN-Ambr|E}3)#`)`+$
z|1NDOl{!8l%y=hUuYyo(Rf=4!0>HuNA@Dwo+oFmFFH&D>A6W+r!M?Mk(4l3B^
zm$PW_gK$6(WQ4w6O2KEe-FdRul7iXFYq0~5e|9{dB@uxdyEu}D-#{MzzlM*sAwu&4
zs~}!@g6>gT$bC+aL$W51;={A=elDPsjaakifw@q1DT@
zEtv#S>r^X)?x79-esQ~I+@LY-BZV#-Le0}%lPD~!nF_EMY*PNb;PLY9RDz6~&9LC$
z*KP;^x!x+-hWes^ur@M7F3%SsVDx4bVllkU&5f<3VZnPGFW6%h<8wv-@r!mgJT7Cu
zzV=i~#&ybymYnQ&R_p~As^=&KTi{uZ0#GqPb*11-2BWKZt_!tKm9tiCc2?qb9tw~*
z>g^A2Y8=r8(W(im7aW4mw-QLiuG7k67Q7F0pCZ%P_{ZAjtXJnFwwp@_U)!9fJIf(4
zG^jg-h8{bAgtm<`K-L#JzF-t!8P{*sK3b#3%;ja*T2Us!D>U82@w3>b9tN2&*6FJ+
zb&S;EN!MlpP?yaR0zu{(gLRx$0;{w+#^T90n2QeQqOH;)J|C`Lrg0JX9;bcw9Emck
ziAfq{_Lz+I->!YJyuuHI$H-dgJUd@4SiO;v(PnW*=+>@E&iorYch|T$Ni{Ei#ieo4
zEcztBBi>y^uyRbwEFmU+OWhF#Q}REjjn4XL>2!w#hpyg`u{K(bbMeJ+zQ$uO*s=o^
zhGc=RFSM5dF0?mlUuL;r09Fg;S&YKXLOeW8-J%z)5wBf;N)vj-Om~m4hF(=R1WJ`xio#vn3SLT1b{#o!udc6pXkjx50HvglHv)hawvT+fwDF->fA&CX~D6ED(p
z0A7{=xOr;;|I_3R;e6#QP9UxFl!aq>i2-h(7z14NkJG+1zzjpx0A_eIS#Ylcsq-Dc
zW)p)tT-r>Pv63pvGan$KkFF3qT!H;_<5S>vM&%o*sODTlJbR3UrVAtnDK>IxZ$
z2`J-=YFP*3uW$ds3qsV*HAy&2!J;dn5_myFJjS{H!h!?U+D%3~&GcL1Tnn~(4-1lrrfGf(RWF5?X
z7~71e$Sz?hELuC=A#ddpSCmRrdb)2Yv?-Hp9h~$A3~R<)-0i|lOniUL97kml@Vv%F
zr*EuC#KQ2gLB~=?jn;@@ICUkzP+tm82L=<%*PzuvwpxHm-=zR@#qcq>pgXBG>UI)i
zWb_q#3a{S0phq)q27IDmS%}023SnN1X&rLx$
zD8P?b++?}=tRx7a#?0{d0P)ZrJpa8?Xg^Ar32Qwc_L)R31u)5F_*_Thbm(}wD<0F}
z?}xBC2|RQF;Hm1yNmcKwofh-9h6kVk$e1~E0c`J?hZRGc;^DCxl9-+
zT-JcJdRjH-8euQhQjL_psw}mSRDqp*E616vvPKqdUGx)j%I%E7M)4|c?Sk7CKq?h4
zcwB$ddWyj%$``!`$C6*;%~s@#Nc4CYjiik)Omuu)wm6W3QNpWPw-3+FD@G`|-XS}s
zX2L~E0-Tf~Wrl3p-fo>7w>S828d3@-AWt|Y@3l7#jc*650jy+`2rR^l0X$cz3wFXu
zz9kvse+}+@0W9PaEpgmIv+C1|TO7D(FSwAIH)6}x{4A^s9VO?-SEU_$F|on;KuX~U
zOkb^}LQA))Io=t&kf3qjuGiS{R;+h1FJAIY6rHC@gIoxY;>rnIK0#lL6dj<&f}OfR_Egtp+pTl;K}gWA6|1c6(Oeb
zn0h8yd~;>Wsa0`)l2(X8dex8t7XUK%(WEl{=Ii-+uR$816k9KoS94JkTqcd7{$Sp^
s=I7xB25BIZwPM6nS6}P*7Y^?K0sibwW-e79uK)l507*qoM6N<$g0wkMMF0Q*
literal 0
HcmV?d00001
diff --git a/Packs/Workday/Integrations/WorkdaySignonEventGenerator/WorkdaySignonEventGenerator_test.py b/Packs/Workday/Integrations/WorkdaySignonEventGenerator/WorkdaySignonEventGenerator_test.py
new file mode 100644
index 000000000000..e3cd63d4a6ef
--- /dev/null
+++ b/Packs/Workday/Integrations/WorkdaySignonEventGenerator/WorkdaySignonEventGenerator_test.py
@@ -0,0 +1,133 @@
+import unittest
+from unittest.mock import patch
+
+from CommonServerPython import DemistoException
+from WorkdaySignonEventGenerator import (
+ random_datetime_in_range,
+ random_string,
+ xml_generator,
+ mock_workday_endpoint,
+ module_of_testing,
+ main,
+)
+
+from WorkdaySignonEventGenerator import APP as app
+
+
+class TestWorkdaySignonEventGenerator(unittest.TestCase):
+ def test_random_datetime_in_range(self) -> None:
+ """
+ Given:
+ - A start datetime '2023-08-21T11:46:02Z' and an end datetime '2023-08-21T11:47:02Z'
+
+ When:
+ - Generating a random datetime in the given range
+
+ Then:
+ - Ensure that the random datetime generated falls within the specified range
+ """
+ random_date = random_datetime_in_range(
+ "2023-08-21T11:46:02Z", "2023-08-21T11:47:02Z"
+ )
+ assert "2023-08-21T11:46:02Z" <= random_date <= "2023-08-21T11:47:02Z"
+
+ def test_random_string(self) -> None:
+ """
+ Given:
+ - No initial conditions
+
+ When:
+ - Generating a random string of default length 10
+
+ Then:
+ - Ensure that the length of the generated string is 10
+ """
+ assert len(random_string()) == 10
+
+ def test_random_guid(self) -> None:
+ """
+ Given:
+ - No initial conditions
+
+ When:
+ - Generating a random GUID-like string of default length 6
+
+ Then:
+ - Ensure that the length of the generated string is 6
+ """
+ assert len(random_string(length=6)) == 6
+
+ def test_xml_generator(self) -> None:
+ """
+ Given:
+ - A start datetime '2023-08-21T11:46:02Z', an end datetime '2023-08-21T11:47:02Z', and a count 1
+
+ When:
+ - Generating an XML response containing Workday sign-on events
+
+ Then:
+ - Ensure that the XML response contains exactly one Workday sign-on event
+ """
+ xml_response = xml_generator("2023-08-21T11:46:02Z", "2023-08-21T11:47:02Z", 1)
+ assert xml_response.count("") == 1
+
+
+class TestMockWorkdayEndpoint(unittest.TestCase):
+ def setUp(self):
+ self.app = app.test_client()
+ self.app.testing = True
+
+ @patch("WorkdaySignonEventGenerator.Response")
+ def test_mock_workday_endpoint(self, MockResponse):
+ mock_post_data = """2023-08-21T11:46:02Z
+ 2023-08-21T11:47:02Z
+ 2"""
+ with self.app as c, c.post("/", data=mock_post_data):
+ mock_workday_endpoint()
+
+ MockResponse.assert_called()
+
+
+class TestModuleOfTesting(unittest.TestCase):
+ @patch("WorkdaySignonEventGenerator.demisto.results")
+ @patch("WorkdaySignonEventGenerator.return_error")
+ @patch("WorkdaySignonEventGenerator.xml_generator")
+ def test_module_of_testing(self, MockXmlGenerator, MockReturnError, MockResults):
+ MockXmlGenerator.return_value = "some response"
+
+ # Test for valid input
+ module_of_testing(True, 5000)
+ MockResults.assert_called_with("ok")
+
+ # Test for invalid input
+ try:
+ module_of_testing(False, None)
+ except DemistoException as e:
+ assert (
+ str(e)
+ == "Please make sure the long running port is filled and the long running checkbox is marked."
+ )
+ else:
+ raise AssertionError("Expected DemistoException but did not get one")
+
+
+class TestMainTestingFunction(unittest.TestCase):
+ @patch("WorkdaySignonEventGenerator.demisto")
+ def test_main_function_test_module(self, MockDemisto):
+ MockDemisto.params.return_value = {
+ "longRunningPort": "5000",
+ "longRunning": True,
+ }
+ MockDemisto.command.return_value = "test-module"
+
+ with patch(
+ "WorkdaySignonEventGenerator.module_of_testing"
+ ) as MockModuleTesting:
+ main()
+ MockModuleTesting.assert_called_with(
+ longrunning_port=5000, is_longrunning=True
+ )
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/Packs/Workday/ModelingRules/WorkdayEventCollector/WorkdayEventCollector.xif b/Packs/Workday/ModelingRules/WorkdayEventCollector/WorkdayEventCollector.xif
index e29850d0b324..83ee952ba809 100644
--- a/Packs/Workday/ModelingRules/WorkdayEventCollector/WorkdayEventCollector.xif
+++ b/Packs/Workday/ModelingRules/WorkdayEventCollector/WorkdayEventCollector.xif
@@ -15,4 +15,64 @@ alter
| alter
xdm.source.user.identifier = json_extract_scalar(`target`, "$.id"),
xdm.target.host.device_category = json_extract_scalar(`target`, "$.descriptor"),
- xdm.target.url = json_extract_scalar(`target`, "$.href");
\ No newline at end of file
+ xdm.target.url = json_extract_scalar(`target`, "$.href");
+
+[MODEL: dataset=workday_signon_raw]
+alter
+ // define util constants
+ boolean_true = to_boolean("TRUE"),
+ boolean_false = to_boolean("FALSE"),
+
+ // add labels for enriching event description according to the boolean flags
+ sigon_successful_label = if(to_integer(Successful) = 1, "Signon was successful.", to_integer(Successful) = 0, "Signon was unsuccessful."),
+ account_disabled_or_expired_label = if(to_integer(Account_Disabled_or_Expired) = 1, "Account is disabled or expired."),
+ device_trusted_label = if(to_integer(Device_is_Trusted) = 1, "Sign on is from a trusted device."),
+ failed_signon_label = if(to_integer(Failed_Signon) = 1, "An invalid password was supplied for the Signon attempt."),
+ invalid_credentials_label = if(to_integer(Invalid_Credentials) = 1, "User provided invalid credentials."),
+ invalid_auth_channel_label = if(to_integer(Invalid_for_Authentication_Channel) = 1, "Invalid for authentication channel."),
+ invalid_auth_policy_label = if(to_integer(Invalid_for_Authentication_Policy) = 1, "Invalid for authentication policy."),
+ mfa_required_label = if(to_integer(Requires_MFA) = 1, "MFA is required."),
+ mfa_has_grace_label = if(to_integer(Has_Grace_Period_for_MFA) = 1, "MFA has a grace period."),
+ mfa_auth_exempt_label = if(to_integer(MFA_Authentication_Exempt) = 1, "MFA authentication is exempted."),
+ mfa_enrollment_label = if(to_integer(MFA_Enrollment) = 1, "User is enrolled in MFA."),
+ password_change_required_label = if(to_integer(Required_Password_Change) = 1, "Password change required."),
+ password_reset_label = if(to_integer(Forgotten_Password_Reset_Request) = 1, "A request was made to reset the password in the Signon attempt."),
+ password_changed_label = if(to_integer(Password_Changed) = 1, "The password was changed after the signon."),
+ read_only_label = if(to_integer(Tenant_Access_Read_Only) = 1, "Read only Access is enabled for the signon.")
+| alter
+ // init useful flags & extract nested json properties
+ device_type_reference_id = Device_Type_Reference -> ID,
+ is_account_disabled = if(to_integer(Account_Disabled_or_Expired) = 1, boolean_true, to_integer(Account_Disabled_or_Expired) = 0, boolean_false),
+ is_mfa_needed = if(to_integer(Requires_MFA) = 1, boolean_true, to_integer(Requires_MFA) = 0, boolean_false),
+ is_password_change_required = if(to_integer(Required_Password_Change) = 1, boolean_true, to_integer(Required_Password_Change) = 0, boolean_false),
+ is_sign_on_successful = if(to_integer(Successful) = 1, boolean_true, to_integer(Successful) = 0, boolean_false),
+ mfa_authentication_type_id = Multi_Factor_Authentication_Type_Reference -> ID,
+ os = lowercase(Operating_System),
+ saml_identity_provider_id = SAML_Identity_Provider_Reference -> ID,
+ src_ipv4 = if(Signon_IP_Address ~= "\.", Signon_IP_Address),
+ src_ipv6 = if(Signon_IP_Address ~= ":", Signon_IP_Address),
+ event_labels = arraycreate(sigon_successful_label, account_disabled_or_expired_label, device_trusted_label, failed_signon_label, invalid_credentials_label, invalid_auth_channel_label, invalid_auth_policy_label, mfa_required_label, mfa_has_grace_label, mfa_auth_exempt_label, mfa_enrollment_label, password_change_required_label, password_reset_label, password_changed_label, read_only_label)
+| alter
+ // map fields
+ xdm.auth.auth_method = Authentication_Type,
+ xdm.auth.is_mfa_needed = is_mfa_needed,
+ xdm.auth.mfa.method = mfa_authentication_type_id,
+ xdm.auth.mfa.provider = if(to_integer(MFA_Enrollment) = 1, saml_identity_provider_id),
+ xdm.event.type = if(is_sign_on_successful, "Successful Signon", is_sign_on_successful = false, "Signon Failure", "Signon"),
+ xdm.event.description = arraystring(event_labels, " "),
+ xdm.event.outcome = if(is_sign_on_successful, XDM_CONST.OUTCOME_SUCCESS, is_sign_on_successful = boolean_false, XDM_CONST.OUTCOME_FAILED, XDM_CONST.OUTCOME_UNKNOWN),
+ xdm.event.outcome_reason = Authentication_Failure_Message,
+ xdm.logon.type = Authentication_Channel,
+ xdm.network.session_id = Short_Session_ID,
+ xdm.network.tls.protocol_version = TLS_Version,
+ xdm.observer.unique_identifier = API_Client_ID,
+ xdm.source.host.device_id = device_type_reference_id,
+ xdm.source.host.os = Operating_System,
+ xdm.source.host.os_family = if(os contains "windows", XDM_CONST.OS_FAMILY_WINDOWS, os contains "mac", XDM_CONST.OS_FAMILY_MACOS, os contains "linux", XDM_CONST.OS_FAMILY_LINUX, os contains "android", XDM_CONST.OS_FAMILY_ANDROID, os contains "ios", XDM_CONST.OS_FAMILY_IOS, os contains "ubuntu", XDM_CONST.OS_FAMILY_UBUNTU, os contains "debian", XDM_CONST.OS_FAMILY_DEBIAN, os contains "fedora", XDM_CONST.OS_FAMILY_FEDORA, os contains "centos", XDM_CONST.OS_FAMILY_CENTOS, os contains "chrome", XDM_CONST.OS_FAMILY_CHROMEOS, os contains "solaris", XDM_CONST.OS_FAMILY_SOLARIS, os contains "scada", XDM_CONST.OS_FAMILY_SCADA, Operating_System),
+ xdm.source.ipv4 = src_ipv4,
+ xdm.source.ipv6 = src_ipv6,
+ xdm.source.user_agent = Browser_Type,
+ xdm.source.user.is_disabled = is_account_disabled,
+ xdm.source.user.is_password_expired = is_password_change_required,
+ xdm.source.user.username = to_string(User_Name),
+ xdm.source.zone = Location;
\ No newline at end of file
diff --git a/Packs/Workday/ModelingRules/WorkdayEventCollector/WorkdayEventCollector.yml b/Packs/Workday/ModelingRules/WorkdayEventCollector/WorkdayEventCollector.yml
index d1a92a50a454..7d4539826df3 100644
--- a/Packs/Workday/ModelingRules/WorkdayEventCollector/WorkdayEventCollector.yml
+++ b/Packs/Workday/ModelingRules/WorkdayEventCollector/WorkdayEventCollector.yml
@@ -1,5 +1,5 @@
-fromversion: 8.2.0
-id: workday_workday_modeling_rule
+fromversion: 8.3.0
+id: Workday_Workday_ModelingRule
name: Workday Modeling Rule
rules: ''
schema: ''
diff --git a/Packs/Workday/ModelingRules/WorkdayEventCollector/WorkdayEventCollector_schema.json b/Packs/Workday/ModelingRules/WorkdayEventCollector/WorkdayEventCollector_schema.json
index a62eec04cb26..80a8291efc11 100644
--- a/Packs/Workday/ModelingRules/WorkdayEventCollector/WorkdayEventCollector_schema.json
+++ b/Packs/Workday/ModelingRules/WorkdayEventCollector/WorkdayEventCollector_schema.json
@@ -36,5 +36,139 @@
"type": "string",
"is_array": false
}
- }
+ },
+
+ "workday_signon_raw": {
+ "access_restriction_reference": {
+ "type": "string",
+ " is_array": false
+ },
+ "account_disabled_or_expired": {
+ "type": "int",
+ " is_array": false
+ },
+ "api_client_id": {
+ "type": "string",
+ " is_array": false
+ },
+ "authentication_channel": {
+ "type": "string",
+ " is_array": false
+ },
+ "authentication_failure_message": {
+ "type": "string",
+ " is_array": false
+ },
+ "authentication_type": {
+ "type": "string",
+ " is_array": false
+ },
+ "browser_type": {
+ "type": "string",
+ " is_array": false
+ },
+ "device_is_trusted": {
+ "type": "int",
+ " is_array": false
+ },
+ "device_type_reference": {
+ "type": "string",
+ " is_array": false
+ },
+ "failed_signon": {
+ "type": "int",
+ " is_array": false
+ },
+ "forgotten_password_reset_request": {
+ "type": "int",
+ " is_array": false
+ },
+ "has_grace_period_for_mfa": {
+ "type": "int",
+ " is_array": false
+ },
+ "invalid_for_authentication_channel": {
+ "type": "int",
+ " is_array": false
+ },
+ "invalid_for_authentication_policy": {
+ "type": "int",
+ " is_array": false
+ },
+ "invalid_credentials": {
+ "type": "int",
+ " is_array": false
+ },
+ "location": {
+ "type": "string",
+ " is_array": false
+ },
+ "mfa_enrollment": {
+ "type": "int",
+ " is_array": false
+ },
+ "mfa_authentication_exempt": {
+ "type": "int",
+ " is_array": false
+ },
+
+ "multi_factor_authentication_type_reference": {
+ "type": "string",
+ " is_array": false
+ },
+ "operating_system": {
+ "type": "string",
+ " is_array": false
+ },
+ "password_changed": {
+ "type": "int",
+ " is_array": false
+ },
+ "required_password_change": {
+ "type": "int",
+ " is_array": false
+ },
+ "requires_mfa": {
+ "type": "int",
+ " is_array": false
+ },
+ "saml_identity_provider_reference": {
+ "type": "string",
+ " is_array": false
+ },
+ "short_session_id": {
+ "type": "string",
+ " is_array": false
+ },
+ "signon_datetime": {
+ "type": "datetime",
+ " is_array": false
+ },
+ "signoff_datetime": {
+ "type": "datetime",
+ " is_array": false
+ },
+ "signon_ip_address": {
+ "type": "string",
+ " is_array": false
+ },
+ "successful": {
+ "type": "int",
+ " is_array": false
+ },
+ "tenant_access_read_only": {
+ "type": "int",
+ " is_array": false
+ },
+ "tls_version": {
+ "type": "string",
+ " is_array": false
+ },
+ "user_name": {
+ "type": "string",
+ " is_array": false
+ }
+ }
+
+
}
\ No newline at end of file
diff --git a/Packs/Workday/README.md b/Packs/Workday/README.md
index e9de4e400b56..1e9adbd722fb 100644
--- a/Packs/Workday/README.md
+++ b/Packs/Workday/README.md
@@ -1,2 +1,12 @@
-Note: In order to parse the timestamp correctly, make sure that the "requestTime" field is in UTC time zone (timestamp ends with "Z").
-The supported time format is YYYY-MM-DDTHH:MM:SS.E3Z%z (2023-07-15T07:00:00.000Z).
\ No newline at end of file
+<~XSIAM>
+
+This pack supports collection and modeling of the following event types:
+- *User activity* audit log entries.
+- *Sign-on* events.
+
+Note: Regarding the *user activity* audit log entries,
+in order to parse the timestamp correctly,
+make sure that the "requestTime" field is in UTC time zone (timestamp ends with "Z").
+The supported time format is *YYYY-MM-DDTHH:MM:SS.E3Z%z* (e.g, *2023-09-05T14:00:00.123Z*).
+
+~XSIAM>
\ No newline at end of file
diff --git a/Packs/Workday/ReleaseNotes/1_4_0.md b/Packs/Workday/ReleaseNotes/1_4_0.md
new file mode 100644
index 000000000000..af0c80de1a7d
--- /dev/null
+++ b/Packs/Workday/ReleaseNotes/1_4_0.md
@@ -0,0 +1,17 @@
+
+#### Integrations
+
+##### New: Workday Sign On Event Collector
+
+New: Use the Workday Sign On Event Collector integration to get sign on logs from Workday (Available from Cortex XSIAM 8.2.0).
+
+##### New: Workday Signon Event Generator (Beta)
+
+New: Generates mock sign on events for Workday Signon Event Collector. Use these for testing and development. (Available from Cortex XSIAM 8.3.0).
+
+#### Modeling Rules
+
+##### Workday Modeling Rule
+
+Added support for modeling sign on events (Available from Cortex XSIAM 8.3.0).
+
diff --git a/Packs/Workday/pack_metadata.json b/Packs/Workday/pack_metadata.json
index 21d564790b38..f42484049282 100644
--- a/Packs/Workday/pack_metadata.json
+++ b/Packs/Workday/pack_metadata.json
@@ -2,7 +2,7 @@
"name": "Workday",
"description": "Workday offers enterprise-level software solutions for financial management, human resources, and planning.",
"support": "xsoar",
- "currentVersion": "1.3.9",
+ "currentVersion": "1.4.0",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",