Skip to content

Commit

Permalink
Merge branch 'master' into pr/11253
Browse files Browse the repository at this point in the history
  • Loading branch information
v-prasadboke committed Nov 4, 2024
2 parents c06398c + 3261a3d commit 5566c1d
Show file tree
Hide file tree
Showing 201 changed files with 19,313 additions and 5,927 deletions.
2 changes: 0 additions & 2 deletions .github/workflows/runAsimSchemaAndDataTesters.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -192,8 +192,6 @@ jobs:
# Execute the script
& $filePath
azPSVersion: "latest"
errorActionPreference: continue
failOnStandardError: false
Run-ASim-Parser-Filtering-Tests:
needs: Run-ASim-Sample-Data-Ingest
name: Run ASim Parser Filtering tests
Expand Down
8 changes: 6 additions & 2 deletions .script/dataConnectorValidator.ts
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,11 @@ export async function IsValidDataConnectorSchema(filePath: string): Promise<Exit

/* Disabling temporarily till we get confirmation from PM*/
// isValidFileName(filePath
isValidPermissions(jsonFile.permissions, connectorCategory);
/* Skip validation for Solution Microsoft Exchange Security - Exchange On-Premises Solution */
if (!filePath.includes('Microsoft Exchange Security - Exchange On-Premises'))
{
isValidPermissions(jsonFile.permissions, connectorCategory);
}
}
else{
console.warn(`Skipping File as it is of type Events : ${filePath}`)
Expand Down Expand Up @@ -173,4 +177,4 @@ let CheckOptions = {
},
};

runCheckOverChangedFiles(CheckOptions, fileKinds, fileTypeSuffixes, filePathFolderPrefixes);
runCheckOverChangedFiles(CheckOptions, fileKinds, fileTypeSuffixes, filePathFolderPrefixes);
Original file line number Diff line number Diff line change
@@ -0,0 +1,201 @@
{
"Name":"Tenable_VM_Compliance_CL",
"Properties":[
{
"Name": "TenantId",
"Type": "string"
},
{
"Name": "SourceSystem",
"Type": "string"
},
{
"Name": "MG",
"Type": "string"
},
{
"Name": "ManagementGroupName",
"Type": "string"
},
{
"Name": "TimeGenerated",
"Type": "datetime"
},
{
"Name": "Computer",
"Type": "string"
},
{
"Name": "RawData",
"Type": "string"
},
{
"Name": "asset_uuid_g",
"Type": "string"
},
{
"Name": "first_seen_t",
"Type": "datetime"
},
{
"Name": "last_seen_t",
"Type": "datetime"
},
{
"Name": "audit_file_s",
"Type": "string"
},
{
"Name": "check_id_s",
"Type": "string"
},
{
"Name": "check_name_s",
"Type": "string"
},
{
"Name": "check_info_s",
"Type": "string"
},
{
"Name": "expected_value_s",
"Type": "string"
},
{
"Name": "actual_value_s",
"Type": "string"
},
{
"Name": "status_s",
"Type": "string"
},
{
"Name": "reference_s",
"Type": "string"
},
{
"Name": "see_also_s",
"Type": "string"
},
{
"Name": "solution_s",
"Type": "string"
},
{
"Name": "plugin_id_d",
"Type": "real"
},
{
"Name": "state_s",
"Type": "string"
},
{
"Name": "description_s",
"Type": "string"
},
{
"Name": "compliance_benchmark_name_s",
"Type": "string"
},
{
"Name": "compliance_benchmark_version_s",
"Type": "string"
},
{
"Name": "compliance_control_id_s",
"Type": "string"
},
{
"Name": "compliance_full_id_s",
"Type": "string"
},
{
"Name": "compliance_functional_id_s",
"Type": "string"
},
{
"Name": "compliance_informational_id_s",
"Type": "string"
},
{
"Name": "synopsis_s",
"Type": "string"
},
{
"Name": "last_observed_t",
"Type": "datetime"
},
{
"Name": "metadata_id_s",
"Type": "string"
},
{
"Name": "uname_output_s",
"Type": "string"
},
{
"Name": "indexed_at_t",
"Type": "datetime"
},
{
"Name": "plugin_name_s",
"Type": "string"
},
{
"Name": "asset_id_g",
"Type": "string"
},
{
"Name": "asset_ipv4_addresses_s",
"Type": "string"
},
{
"Name": "asset_ipv6_addresses_s",
"Type": "string"
},
{
"Name": "asset_fqdns_s",
"Type": "string"
},
{
"Name": "asset_name_s",
"Type": "string"
},
{
"Name": "asset_agent_uuid_g",
"Type": "string"
},
{
"Name": "asset_tags_s",
"Type": "string"
},
{
"Name": "asset_mac_addresses_s",
"Type": "string"
},
{
"Name": "asset_operating_systems_s",
"Type": "string"
},
{
"Name": "asset_system_type_s",
"Type": "string"
},
{
"Name": "asset_network_id_g",
"Type": "string"
},
{
"Name": "asset_agent_name_s",
"Type": "string"
},
{
"Name": "Type",
"Type": "string"
},
{
"Name": "_ResourceId",
"Type": "string"
}
]
}
15 changes: 15 additions & 0 deletions .script/tests/KqlvalidationsTests/SkipValidationsTemplates.json
Original file line number Diff line number Diff line change
Expand Up @@ -2624,6 +2624,16 @@
"templateName": "InfobloxSOCInsightsDataConnector_API.json",
"validationFailReason": "The name 'insightId_g' does not refer to any known column, table, variable or function."
},
{
"id": "ESI-Opt6ExchangeMessageTrackingLogs",
"templateName": "ESI-Opt6ExchangeMessageTrackingLogs.json",
"validationFailReason": "This is a Azure Monitor Connector which doesnt requires more permissions. Skipping this ID as a check is failing for required permissions for Data Connector template. "
},
{
"id": "ESI-Opt7ExchangeHTTPProxyLogs",
"templateName": "ESI-Opt7ExchangeHTTPProxyLogs.json",
"validationFailReason": "This is a Azure Monitor Connector which doesnt requires more permissions. Skipping this ID as a check is failing for required permissions for Data Connector template. "
},
// Temporarily adding Data connector template id's for KQL Validations - End


Expand Down Expand Up @@ -2819,6 +2829,11 @@
"templateName": "ExchangeConfiguration.yaml",
"validationFailReason": "Temporarily Added for Parser KQL Queries validation"
},
{
"id": "0a0f4ea0-6b94-4420-892e-41ca985f2f01",
"templateName": "MESCompareDataOnPMRA.yaml",
"validationFailReason": "Temporarily Added for Parser KQL Queries validation"
},
{
"id": "1acab329-1c11-42a7-b5ea-41264947947a",
"templateName": "ExchangeEnvironmentList.yaml",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,11 @@
"id": "39f51672-8c63-4600-882a-5db8275f798f",
"templateName": "Microsoft Exchange Security - MESCompareDataMRA parser",
"validationFailReason": "Non-ASCII characters are required to test comparison of strings with non-ASCII characters"
},
{
"id": "0a0f4ea0-6b94-4420-892e-41ca985f2f01",
"templateName": "Microsoft Exchange Security - MESCompareDataOnPMRA parser",
"validationFailReason": "Non-ASCII characters are required to test comparison of strings with non-ASCII characters"
}
]

Expand Down
67 changes: 55 additions & 12 deletions .script/tests/asimParsersTest/ASimFilteringTest.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,13 @@
end_time = datetime.now(timezone.utc)
start_time = end_time - timedelta(days = TIME_SPAN_IN_DAYS)

# Define the dictionary mapping schemas to their respective messages
failure_messages = {
'AuditEvent': "This single failure is because only one value exist in 'EventResult' field in 'AuditEvent' schema. Audit Event is a special case where 'EventResult' validations could be partial as only 'Success' events exists. Ignoring this error.",
'Authentication': "This single failure is because only two values exist in 'EventType' field in 'Authentication' schema. 'Authentication' is a special case where 'EventType' validations could be partial as only 'Logon' or 'Logoff' events may exists. Ignoring this error.",
'Dns': "This single failure is because only one value exist in 'EventType' field in 'Dns' schema. 'Dns' is a special case where 'EventType' validations could be 'Query' only. Ignoring this error."
}

def attempt_to_connect():
try:
credential = DefaultAzureCredential()
Expand Down Expand Up @@ -230,6 +237,20 @@ def read_exclusion_list_from_csv():
exclusion_list.append(row[0])
return exclusion_list

# Function to handle printing and flushing
def print_and_flush(message):
print(f"{YELLOW} {message} {RESET}")
sys.stdout.flush()

# Function to handle error printing, flushing, and exiting
def handle_test_failure(parser_file_path, error_message=None):
if error_message:
print(f"::error::{error_message}")
else:
print(f"::error::Tests failed for {parser_file_path}")
sys.stdout.flush()
sys.exit(1) # Uncomment this line to fail workflow when tests are not successful.

def main():
# Get modified ASIM Parser files along with their status
current_directory = os.path.dirname(os.path.abspath(__file__))
Expand Down Expand Up @@ -288,13 +309,27 @@ def main():
if parser_file['EquivalentBuiltInParser'] in read_exclusion_list_from_csv():
print(f"{YELLOW}The parser {parser_file_path} is listed in the exclusions file. Therefore, this workflow run will not fail because of it. To allow this parser to cause the workflow to fail, please remove its name from the exclusions list file located at: {exclusion_file_path}{RESET}")
sys.stdout.flush()
# Check for exception cases where the failure can be ignored
# Check if the failure message and schema match the exception cases
if len(result.failures) == 1:
failure_message = result.failures[0][1]
schema = parser_file['Normalization']['Schema']
match schema:
case 'AuditEvent' if "eventresult - validations for this parameter are partial" in failure_message:
print_and_flush(failure_messages['AuditEvent'])
case 'Authentication' if "eventtype_in - Expected to have less results after filtering." in failure_message:
print_and_flush(failure_messages['Authentication'])
case 'Dns' if "eventtype - validations for this parameter are partial" in failure_message:
print_and_flush(failure_messages['Dns'])
case _:
# Default case when single error and if no specific condition matches
handle_test_failure(parser_file_path)
else:
print(f"::error::Tests failed for {parser_file_path}")
sys.stdout.flush() # Explicitly flush stdout
#sys.exit(1) # uncomment this line to fail workflow when tests are not successful.
# When more than one failures or no specific exception case
handle_test_failure(parser_file_path)
except subprocess.CalledProcessError as e:
print(f"::error::An error occurred while reading parser file: {e}")
sys.stdout.flush() # Explicitly flush stdout
# Handle exceptions raised during the parser execution e.g. error in KQL query
handle_test_failure(parser_file_path, f"An error occurred while running parser: {e}")


class FilteringTest(unittest.TestCase):
Expand Down Expand Up @@ -388,7 +423,7 @@ def check_required_fields(self, parser_file):
def datetime_test(self, param, query_definition, column_name_in_table):
param_name = param['Name']
# Get count of rows without filtering
no_filter_query = query_definition + f"query()\n"
no_filter_query = query_definition + f"query() | project TimeGenerated \n"
no_filter_response = self.send_query(no_filter_query)
num_of_rows_when_no_filters_in_query = len(no_filter_response.tables[0].rows)
self.assertNotEqual(len(no_filter_response.tables[0].rows) , 0 , f"No data for parameter:{param_name}")
Expand Down Expand Up @@ -547,7 +582,9 @@ def dynamic_tests_helper(self, parameter_name, query_definition, num_of_rows_whe

# Performing filtering with two values if possible
if len(values_list) == 1 or num_of_rows_when_no_filters_in_query <= MAX_FILTERING_PARAMETERS:
self.fail(f"Parameter: {parameter_name} - Not enough data to perform two values {test_type} tests")
# Skip self.fail if values_list contains both "Logon" and "Logoff" and parameter_name is "eventtype_in"
if not (set(values_list) == {"Logon", "Logoff"} and parameter_name == "eventtype_in"):
self.fail(f"Parameter: {parameter_name} - Not enough data to perform two values {test_type} tests")
filtering_response, values_string = self.get_response_for_query_with_parameters(parameter_name, query_definition, column_name_in_table, values_list)
num_of_rows_with_parameters_in_query = len(filtering_response.tables[0].rows)
self.dynamic_tests_assertions(parameter_name, num_of_rows_with_parameters_in_query, values_string, num_of_rows_when_no_filters_in_query)
Expand Down Expand Up @@ -585,13 +622,19 @@ def get_substrings_list(self, rows, num_of_substrings, delimiter):

value = row[COLUMN_INDEX_IN_ROW]
post = get_postfix(value, rows, substrings_list, delimiter)
# Post will equal value if: value dont contain the delimiter, post is in the list, post is contained in an item in the list.
if post != value:

# Add post to the list if it's not already present
if post not in substrings_list:
substrings_list.append(post)
else:

# If the list has reached the required number of substrings, break the loop
if len(substrings_list) == num_of_substrings:
break

# If post is equal to value, also add pre to the list
if post == value:
pre = get_prefix(value, rows, substrings_list, delimiter)
# pre will equal value if: value dont contain the delimiter, pre is in the list, pre is contained in an item in the list.
if pre != value:
if pre not in substrings_list:
substrings_list.append(pre)

return substrings_list
Expand Down
Loading

0 comments on commit 5566c1d

Please sign in to comment.