From 91f079b5a7f48d441485b4c2185d67964f463e20 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rub=C3=A9n=20De=20la=20Torre=20Vico?= Date: Thu, 19 Sep 2024 17:44:07 +0200 Subject: [PATCH 01/21] feat(prowler-check-kreator): first ProwlerCheckKreator version this is an script that helps to create checks with AI --- util/__init__.py | 0 util/prowler_check_kreator/__iniy__.py | 0 .../lib/metadata_types.py | 223 ++++++++ util/prowler_check_kreator/lib/templates.py | 119 ++++ .../prowler_check_kreator.py | 510 ++++++++++++++++++ 5 files changed, 852 insertions(+) create mode 100644 util/__init__.py create mode 100644 util/prowler_check_kreator/__iniy__.py create mode 100644 util/prowler_check_kreator/lib/metadata_types.py create mode 100644 util/prowler_check_kreator/lib/templates.py create mode 100644 util/prowler_check_kreator/prowler_check_kreator.py diff --git a/util/__init__.py b/util/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/util/prowler_check_kreator/__iniy__.py b/util/prowler_check_kreator/__iniy__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/util/prowler_check_kreator/lib/metadata_types.py b/util/prowler_check_kreator/lib/metadata_types.py new file mode 100644 index 00000000000..33cc6143b66 --- /dev/null +++ b/util/prowler_check_kreator/lib/metadata_types.py @@ -0,0 +1,223 @@ +def get_metadata_valid_check_type(provider: str = "aws") -> list: + """Get the valid check types for the provider + + Keyword arguments: + provider -- The provider of the service + """ + check_types = [] + + if provider == "aws": + check_types = [ + { + "namespace": "Software and Configuration Checks", + "children": [ + { + "category": "Vulnerabilities", + "children": [{"classifier": "CVE"}], + }, + { + "category": "AWS Security Best Practices", + "children": [ + {"classifier": "Network Reachability"}, + {"classifier": "Runtime Behavior Analysis"}, + ], + }, + { + "category": "Industry and Regulatory Standards", + "children": [ + {"classifier": "AWS Foundational Security Best Practices"}, + {"classifier": "CIS Host Hardening Benchmarks"}, + {"classifier": "CIS AWS Foundations Benchmark"}, + {"classifier": "PCI-DSS"}, + {"classifier": "Cloud Security Alliance Controls"}, + {"classifier": "ISO 90001 Controls"}, + {"classifier": "ISO 27001 Controls"}, + {"classifier": "ISO 27017 Controls"}, + {"classifier": "ISO 27018 Controls"}, + {"classifier": "SOC 1"}, + {"classifier": "SOC 2"}, + {"classifier": "HIPAA Controls (USA)"}, + {"classifier": "NIST 800-53 Controls (USA)"}, + {"classifier": "NIST CSF Controls (USA)"}, + {"classifier": "IRAP Controls (Australia)"}, + {"classifier": "K-ISMS Controls (Korea)"}, + {"classifier": "MTCS Controls (Singapore)"}, + {"classifier": "FISC Controls (Japan)"}, + {"classifier": "My Number Act Controls (Japan)"}, + {"classifier": "ENS Controls (Spain)"}, + {"classifier": "Cyber Essentials Plus Controls (UK)"}, + {"classifier": "G-Cloud Controls (UK)"}, + {"classifier": "C5 Controls (Germany)"}, + {"classifier": "IT-Grundschutz Controls (Germany)"}, + {"classifier": "GDPR Controls (Europe)"}, + {"classifier": "TISAX Controls (Europe)"}, + ], + }, + {"category": "Patch Management"}, + ], + }, + { + "namespace": "TTPs", + "children": [ + {"category": "Initial Access"}, + {"category": "Execution"}, + {"category": "Persistence"}, + {"category": "Privilege Escalation"}, + {"category": "Defense Evasion"}, + {"category": "Credential Access"}, + {"category": "Discovery"}, + {"category": "Lateral Movement"}, + {"category": "Collection"}, + {"category": "Command and Control"}, + ], + }, + { + "namespace": "Effects", + "children": [ + {"category": "Data Exposure"}, + {"category": "Data Exfiltration"}, + {"category": "Data Destruction"}, + {"category": "Denial of Service"}, + {"category": "Resource Consumption"}, + ], + }, + { + "namespace": "Unusual Behaviors", + "children": [ + {"category": "Application"}, + {"category": "Network Flow"}, + {"category": "IP address"}, + {"category": "User"}, + {"category": "VM"}, + {"category": "Container"}, + {"category": "Serverless"}, + {"category": "Process"}, + {"category": "Database"}, + {"category": "Data"}, + ], + }, + { + "namespace": "Sensitive Data Identifications", + "children": [ + {"category": "PII"}, + {"category": "Passwords"}, + {"category": "Legal"}, + {"category": "Financial"}, + {"category": "Security"}, + {"category": "Business"}, + ], + }, + ] + + return check_types + + +def get_metadata_valid_resource_type(provider: str = "aws") -> set: + """Get the valid resource types for the provider + + Keyword arguments: + provider -- The provider of the service + """ + valid_resource_types = set() + + if provider == "aws": + valid_resource_types = { + "AwsIamAccessKey", + "AwsElbLoadBalancer", + "AwsRedshiftCluster", + "AwsEventsEndpoint", + "AwsElbv2LoadBalancer", + "AwsAutoScalingLaunchConfiguration", + "AwsWafv2RuleGroup", + "AwsWafRegionalRule", + "AwsCloudFrontDistribution", + "AwsWafRegionalWebAcl", + "AwsWafRateBasedRule", + "AwsCertificateManagerCertificate", + "AwsKmsKey", + "AwsDmsEndpoint", + "AwsLambdaLayerVersion", + "AwsIamRole", + "AwsElasticBeanstalkEnvironment", + "AwsBackupBackupPlan", + "AwsEc2ClientVpnEndpoint", + "AwsEcrContainerImage", + "AwsSqsQueue", + "AwsIamGroup", + "AwsOpenSearchServiceDomain", + "AwsApiGatewayV2Api", + "AwsCloudTrailTrail", + "AwsWafWebAcl", + "AwsEc2Subnet", + "AwsEc2VpcPeeringConnection", + "AwsEc2VpcEndpointService", + "AwsCodeBuildProject", + "AwsLambdaFunction", + "AwsNetworkFirewallRuleGroup", + "AwsDmsReplicationInstance", + "AwsRdsEventSubscription", + "AwsCloudWatchAlarm", + "AwsS3AccountPublicAccessBlock", + "AwsWafRegionalRateBasedRule", + "AwsRdsDbInstance", + "AwsEksCluster", + "AwsXrayEncryptionConfig", + "AwsWafv2WebAcl", + "AwsWafRuleGroup", + "AwsBackupBackupVault", + "AwsKinesisStream", + "AwsNetworkFirewallFirewallPolicy", + "AwsEc2NetworkInterface", + "AwsEcsTaskDefinition", + "AwsMskCluster", + "AwsApiGatewayRestApi", + "AwsS3Object", + "AwsRdsDbSnapshot", + "AwsBackupRecoveryPoint", + "AwsWafRule", + "AwsS3AccessPoint", + "AwsApiGatewayV2Stage", + "AwsGuardDutyDetector", + "AwsEfsAccessPoint", + "AwsEcsContainer", + "AwsEcsTask", + "AwsS3Bucket", + "AwsSageMakerNotebookInstance", + "AwsNetworkFirewallFirewall", + "AwsStepFunctionStateMachine", + "AwsIamUser", + "AwsAppSyncGraphQLApi", + "AwsApiGatewayStage", + "AwsEcrRepository", + "AwsEcsService", + "AwsEc2Vpc", + "AwsAmazonMQBroker", + "AwsWafRegionalRuleGroup", + "AwsEventSchemasRegistry", + "AwsRoute53HostedZone", + "AwsEventsEventbus", + "AwsDmsReplicationTask", + "AwsEc2Instance", + "AwsEcsCluster", + "AwsRdsDbSecurityGroup", + "AwsCloudFormationStack", + "AwsSnsTopic", + "AwsDynamoDbTable", + "AwsRdsDbCluster", + "AwsEc2Eip", + "AwsEc2RouteTable", + "AwsEc2TransitGateway", + "AwsElasticSearchDomain", + "AwsEc2LaunchTemplate", + "AwsEc2Volume", + "AwsAthenaWorkGroup", + "AwsSecretsManagerSecret", + "AwsEc2SecurityGroup", + "AwsIamPolicy", + "AwsSsmPatchCompliance", + "AwsAutoScalingAutoScalingGroup", + "AwsEc2NetworkAcl", + "AwsRdsDbClusterSnapshot", + } + + return valid_resource_types diff --git a/util/prowler_check_kreator/lib/templates.py b/util/prowler_check_kreator/lib/templates.py new file mode 100644 index 00000000000..fd5d61fbd78 --- /dev/null +++ b/util/prowler_check_kreator/lib/templates.py @@ -0,0 +1,119 @@ +def load_check_template(provider: str, service: str, check_name: str) -> str: + """Load the template for the check file + + Keyword arguments: + provider -- The provider of the service + service -- The service to check + check_name -- The name of the check + """ + if provider == "aws": + return f""" +from prowler.lib.check.models import Check, Check_Report_AWS +from prowler.providers.aws.services.{service}.{service}_client import {service}_client + + +class {check_name}(Check): + def execute(self) -> list[Check_Report_AWS]: + findings = [] + for , in {service}_client..items(): + report = Check_Report_AWS(self.metadata()) + report.region = .region + report.resource_id = .name + report.resource_arn = + report.resource_tags = .tags + report.status = "FAIL" + report.status_extended = f"..." + + if : + report.status = "PASS" + report.status_extended = f"..." + + findings.append(report) + + return findings +""" + else: + raise ValueError(f"Template for {provider} not implemented yet") + + +def load_test_template(provider: str, service: str, check_name: str) -> str: + """Load the template for the test file + + Keyword arguments: + provider -- The provider of the service + service -- The service to check + check_name -- The name of the check + """ + if provider == "aws": + return f""" +from unittest import mock + +from boto3 import client +from moto import mock_aws + +from tests.providers.aws.utils import ( + AWS_REGION_EU_WEST_1, + set_mocked_aws_provider, +) + + +class Test_{check_name}: + @mock_aws + def test_(self): + from prowler.providers.aws.services.{service}.{service}_service import + + aws_provider = set_mocked_aws_provider([AWS_REGION_EU_WEST_1]) + + with mock.patch( + "prowler.providers.common.provider.Provider.get_global_provider", + return_value=aws_provider, + ), mock.patch( + "prowler.providers.aws.services.{service}.{check_name}.{check_name}.{service}_client", + new=(aws_provider), + ): + # Test Check + from prowler.providers.aws.services.{service}.{check_name}.{check_name} import ( + {check_name}, + ) + + check = {check_name}() + result = check.execute() + + assert len(result) == 0 + + @mock_aws + def test_one_compliant_{service}(self): + {service}_client = client("{service}", region_name=AWS_REGION_EU_WEST_1) + # Create a compliant resource + + from prowler.providers.aws.services.{service}.{service}_service import + + aws_provider = set_mocked_aws_provider([AWS_REGION_EU_WEST_1]) + + with mock.patch( + "prowler.providers.common.provider.Provider.get_global_provider", + return_value=aws_provider, + ), mock.patch( + "prowler.providers.aws.services.{service}.{check_name}.{check_name}.{service}_client", + new=(aws_provider), + ): + from prowler.providers.aws.services.{service}.{check_name}.{check_name} import ( + {check_name}, + ) + + check = {check_name}() + result = check.execute() + + assert len(result) == 1 + assert result[0].status == "PASS" + assert result[0].status_extended == "..." + assert result[0].region == AWS_REGION_EU_WEST_1 + assert result[0].resource_id == .id + assert ( + result[0].resource_arn + == f"arn:(aws_partition):{service}:(region):(account_id):(resource)" + ) + assert result[0].resource_tags == .tags +""" + else: + raise ValueError(f"Template for {provider} not implemented yet") diff --git a/util/prowler_check_kreator/prowler_check_kreator.py b/util/prowler_check_kreator/prowler_check_kreator.py new file mode 100644 index 00000000000..ff245cdb753 --- /dev/null +++ b/util/prowler_check_kreator/prowler_check_kreator.py @@ -0,0 +1,510 @@ +#!/usr/bin/env python3 +import json +import os +import sys + +import google.generativeai as genai + +from util.prowler_check_kreator.lib.metadata_types import ( + get_metadata_valid_check_type, + get_metadata_valid_resource_type, +) +from util.prowler_check_kreator.lib.templates import ( + load_check_template, + load_test_template, +) + + +# TODO: Support azure, gcp and kubernetes providers (only need to add check template, test template and metadata types) +# TODO: Add support for other LLMs like OpenAI's GPT or Ollama locally +# TODO: Add support to make configurable checks +# TODO: Improve the check generation with more context +class ProwlerCheckKreator: + def __init__(self, provider: str, check_name: str): + # Validate provider + + supported_providers = {"aws"} + + if provider in supported_providers: + self._provider = provider + else: + raise ValueError( + f"Invalid provider. Supported providers: {', '.join(supported_providers)}" + ) + + # Find the Prowler folder + self._prowler_folder = os.path.abspath( + os.path.join(os.path.dirname(__file__), os.pardir, os.pardir) + ) + + # Validate if service exists for the selected provider + service_name = check_name.split("_")[0] + + service_path = os.path.join( + self._prowler_folder, + "prowler/providers/", + provider, + "services/", + service_name, + ) + + if os.path.exists(service_path): + self._service_name = service_name + else: + raise ValueError(f"Service {service_name} does not exist for {provider}") + + if not self._check_exists(check_name): + self._check_name = check_name + self._check_path = os.path.join( + self._prowler_folder, + "prowler/providers/", + provider, + "services/", + service_name, + check_name, + ) + else: + # Check already exists, give the user the possibility to continue or not + user_input = ( + input( + f"Check {check_name} already exists. Do you want to continue and overwrite it? Type 'yes'/'no' and press enter: " + ) + .strip() + .lower() + ) + + if user_input == "no": + raise ValueError(f"Check {check_name} already exists") + else: + self._check_name = check_name + self._check_path = os.path.join( + self._prowler_folder, + "prowler/providers/", + provider, + "services/", + service_name, + check_name, + ) + + def kreate_check(self) -> None: + """Create a new check in Prowler""" + + # Create the check + print(f"Creating check {self._check_name} for {self._provider}") + + # Inside the check folder, create the check files: __init__.py, check_name.py, and check_name.metadata.json + os.makedirs(self._check_path, exist_ok=True) + + with open(os.path.join(self._check_path, "__init__.py"), "w") as f: + f.write("") + + # Check first if the check file already exists, in that case, ask user if want to overwrite it + if os.path.exists(os.path.join(self._check_path, f"{self._check_name}.py")): + user_input = ( + input( + f"Python check file {self._check_name} already exists. Do you want to overwrite it? Type 'yes'/'no' and press enter: " + ) + .strip() + .lower() + ) + + if user_input == "yes": + self._write_check_file() + else: + print("Check file not overwritten") + else: + self._write_check_file() + + # Check if metadata file already exists, in that case, ask user if want to overwrite it + if os.path.exists( + os.path.join(self._check_path, f"{self._check_name}.metadata.json") + ): + user_input = ( + input( + f"Metadata file {self._check_name}.metadata.json already exists. Do you want to overwrite it? Type 'yes'/'no' and press enter: " + ) + .strip() + .lower() + ) + if user_input == "yes": + self._write_metadata_file() + else: + print("Metadata file not overwritten") + else: + self._write_metadata_file() + + # Create test directory if it does not exist + test_folder = os.path.join( + self._prowler_folder, + "tests/providers/", + self._provider, + "services/", + self._service_name, + self._check_name, + ) + + os.makedirs(test_folder, exist_ok=True) + + # Check if test file already exists, in that case, ask user if want to overwrite it + if os.path.exists(os.path.join(test_folder, f"{self._check_name}_test.py")): + user_input = ( + input( + f"Python test file {self._check_name}_test.py already exists. Do you want to overwrite it? Type 'yes'/'no' and press enter: " + ) + .strip() + .lower() + ) + + if user_input == "yes": + self._write_test_file() + else: + print("Test file not overwritten") + else: + self._write_test_file() + + print(f"Check {self._check_name} created successfully") + + def _check_exists(self, check_name: str) -> bool: + """Check if the check already exists""" + + # Get the check path + check_path = os.path.join( + self._prowler_folder, + "prowler/providers/", + self._provider, + "services/", + self._service_name, + check_name, + ) + + # Get the test path + _test_path = os.path.join( + self._prowler_folder, + "tests/providers/", + self._provider, + "services/", + self._service_name, + check_name, + ) + + # Check if exits check.py, check_metadata.json and check_test.py + return ( + os.path.exists(check_path) + and os.path.exists(os.path.join(check_path, "__init__.py")) + and os.path.exists(os.path.join(check_path, f"{check_name}.py")) + and os.path.exists(os.path.join(check_path, f"{check_name}.metadata.json")) + and os.path.exists(_test_path) + ) + + def _write_check_file(self) -> None: + """Write the check file""" + + check_template = load_check_template( + self._provider, self._service_name, self._check_name + ) + + # Ask if want that Gemini to fill the check taking as reference another check + + user_input = ( + input( + "WARNING: This still in beta. The check generated may not have sense or you will have to add some parameters to the service\nDo you want to ask Gemini to fill the check now? If yes, type the reference check name and press enter. If not, press enter: " + ) + .strip() + .lower() + ) + + if user_input and self._check_exists(user_input): + # Load the file referenced by the user + with open( + os.path.join( + self._prowler_folder, + "prowler/providers/", + self._provider, + "services/", + self._service_name, + user_input, + f"{user_input}.py", + ), + "r", + ) as f: + check_reference = f.read() + + check_template = self._fill_check_with_gemini( + self._check_name, check_reference + ) + else: + print( + "Referenced check does not exist. Check will be created with the standard template" + ) + + with open(os.path.join(self._check_path, f"{self._check_name}.py"), "w") as f: + f.write(check_template) + + def _write_metadata_file(self) -> None: + """Write the metadata file""" + + metadata_template = { + "Provider": self._provider, + "CheckID": self._check_name, + "CheckTitle": "", + "CheckType": [], + "ServiceName": self._service_name, + "SubServiceName": "", + "ResourceIdTemplate": "", + "Severity": "", + "ResourceType": "", + "Description": "", + "Risk": "", + "RelatedUrl": "", + "Remediation": { + "Code": { + "CLI": "", + "NativeIaC": "", + "Other": "", + "Terraform": "", + }, + "Recommendation": {"Text": "", "Url": ""}, + }, + "Categories": [], + "DependsOn": [], + "RelatedTo": [], + "Notes": "", + } + + # Ask if want that Gemini to fill the metadata + + user_input = ( + input( + "Do you want to ask Gemini to fill the metadata now? Type 'yes'/'no' and press enter: " + ) + .strip() + .lower() + ) + + if user_input == "yes": + # Ask for some context to fill the metadata + + context_sources = {"TrendMicro": "", "SecurityHub": "", "Other": ""} + + for source in context_sources: + context_sources[source] = input( + f"Please provide some context from {source} (leave empty if none): " + ) + + filled_metadata = self._fill_metadata_with_gemini( + metadata_template, context_sources + ) + else: + filled_metadata = metadata_template + + with open( + os.path.join(self._check_path, f"{self._check_name}.metadata.json"), "w" + ) as f: + f.write(json.dumps(filled_metadata, indent=2)) + + def _write_test_file(self) -> None: + """Write the test file""" + + test_folder = os.path.join( + self._prowler_folder, + "tests/providers/", + self._provider, + "services/", + self._service_name, + self._check_name, + ) + + test_template = load_test_template( + self._provider, self._service_name, self._check_name + ) + + with open(os.path.join(test_folder, f"{self._check_name}_test.py"), "w") as f: + f.write(test_template) + + def _fill_check_with_gemini(self, check_name: str, check_reference: str) -> str: + """Fill the check with Gemini AI + + Keyword arguments: + check_name -- The name of the check to be created + check_reference -- The reference check to be used as inspiration + """ + + filled_check = "" + + if check_reference: + try: + genai.configure(api_key=os.environ["GEMINI_API_KEY"]) + + generation_config = { + "temperature": 0, + "top_p": 1, + "top_k": 1, + } + + safety_settings = [ + { + "category": "HARM_CATEGORY_HARASSMENT", + "threshold": "BLOCK_MEDIUM_AND_ABOVE", + }, + { + "category": "HARM_CATEGORY_HATE_SPEECH", + "threshold": "BLOCK_MEDIUM_AND_ABOVE", + }, + { + "category": "HARM_CATEGORY_SEXUALLY_EXPLICIT", + "threshold": "BLOCK_MEDIUM_AND_ABOVE", + }, + { + "category": "HARM_CATEGORY_DANGEROUS_CONTENT", + "threshold": "BLOCK_MEDIUM_AND_ABOVE", + }, + ] + + model = genai.GenerativeModel( + model_name="gemini-1.5-flash", + generation_config=generation_config, + safety_settings=safety_settings, + ) + + # Extract the class name from the reference check. Example: class elb_connection_draining_enabled(Check) + class_name = check_reference.split("(")[0].split("class ")[1] + + prompt_parts = [ + f"Your task is to create a new security check called '{check_name}' for Prowler (an open-source CSPM tool). The control is a Python class that inherits from the Check class and has only one method called execute. The execute method must return a list of Check_Report_AWS objects.", + "I need the answer only with Python formatted text.", + "Use the following check as inspiration to create the new check: ", + f"{class_name}:", + check_reference, + f"{check_name}:", + ] + + response = model.generate_content(prompt_parts) + + if response: + # Format the response to a Python class, removing the prompt parts + filled_check = ( + response.text.replace("python", "").replace("```", "").strip() + ) + + else: + raise Exception("Error generating check with Gemini AI") + + except Exception as e: + raise Exception(f"Error generating check with Gemini AI: {e}") + + return filled_check + + def _fill_metadata_with_gemini(self, metadata: dict, context_sources: dict) -> dict: + filled_metadata = {} + + if metadata: + try: + genai.configure(api_key=os.environ["GEMINI_API_KEY"]) + + generation_config = { + "temperature": 0, + "top_p": 1, + "top_k": 1, + } + + safety_settings = [ + { + "category": "HARM_CATEGORY_HARASSMENT", + "threshold": "BLOCK_MEDIUM_AND_ABOVE", + }, + { + "category": "HARM_CATEGORY_HATE_SPEECH", + "threshold": "BLOCK_MEDIUM_AND_ABOVE", + }, + { + "category": "HARM_CATEGORY_SEXUALLY_EXPLICIT", + "threshold": "BLOCK_MEDIUM_AND_ABOVE", + }, + { + "category": "HARM_CATEGORY_DANGEROUS_CONTENT", + "threshold": "BLOCK_MEDIUM_AND_ABOVE", + }, + ] + + # Remove empty context sources + context_sources = {k: v for k, v in context_sources.items() if v} + + # Remove metadata that we don't want to be filled by Gemini + metadata.pop("SubServiceName", None) + metadata["Remediation"]["Code"].pop("NativeIaC", None) + metadata["Remediation"]["Code"].pop("Other", None) + metadata["Remediation"]["Code"].pop("Terraform", None) + metadata.pop("DependsOn", None) + metadata.pop("RelatedTo", None) + + model = genai.GenerativeModel( + model_name="gemini-1.5-flash", + generation_config=generation_config, + safety_settings=safety_settings, + ) + + prompt_parts = [ + "Your task is to fill the metadata for a new cybersecurity check in Prowler (an open-source CSPM tool). The metadata is a JSON object with the following fields: ", + json.dumps(metadata, indent=2), + "Use the following context sources as inspiration to fill the metadata: ", + json.dumps(context_sources, indent=2), + "The field CheckType should be filled following the format: 'namespace/category/classifier', where namespace, category, and classifier are the values from the following dict: ", + json.dumps( + get_metadata_valid_check_type(metadata["Provider"]), indent=2 + ), + "One example of a valid CheckType value is: 'Software and Configuration Checks/Vulnerabilities/CVE'. If you don't have a valid value for CheckType, you can leave it empty.", + "The field ResourceType must be one of the following values:", + ", ".join(get_metadata_valid_resource_type(metadata["Provider"])), + "If you don't have a valid value for ResourceType, you can leave it empty.", + "The field Category must be one or more of the following values: encryption, forensics-ready, internet-exposed, logging, redundancy, secrets, thread-detection, trustboundaries or vulnerability-management. If you don't have a valid value for Category, you can leave it empty.", + "I need the answer only with JSON formatted text.", + ] + + response = model.generate_content(prompt_parts) + + if response: + # Format the response to a JSON object, removing the prompt parts + response = ( + response.text.replace("\n", "") + .replace("json", "") + .replace("JSON", "") + .replace("```", "") + .strip() + ) + + filled_metadata = json.loads(response) + + # Add removed fields back to the metadata + metadata["SubServiceName"] = "" + metadata["Remediation"]["Code"]["NativeIaC"] = "" + metadata["Remediation"]["Code"]["Other"] = "" + metadata["Remediation"]["Code"]["Terraform"] = "" + metadata["DependsOn"] = [] + metadata["RelatedTo"] = [] + + else: + raise Exception("Error generating metadata with Gemini AI") + + except Exception as e: + raise Exception(f"Error generating metadata with Gemini AI: {e}") + + return filled_metadata + + +if __name__ == "__main__": + try: + if len(sys.argv) < 3: + raise ValueError( + "Invalid arguments. Usage: python prowler_check_kreator.py " + ) + + prowler_check_creator = ProwlerCheckKreator(sys.argv[1], sys.argv[2]) + + sys.exit(prowler_check_creator.kreate_check()) + + except ValueError as e: + print(f"Error: {e}") + sys.exit(1) + except Exception as e: + print(f"Error: {e}") + sys.exit(1) From 099a269faa115098069de454f5e6fd60f1464732 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rub=C3=A9n=20De=20la=20Torre=20Vico?= Date: Fri, 20 Sep 2024 10:08:13 +0200 Subject: [PATCH 02/21] fix(prowler-check-kreator): fix init name --- util/prowler_check_kreator/__iniy__.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 util/prowler_check_kreator/__iniy__.py diff --git a/util/prowler_check_kreator/__iniy__.py b/util/prowler_check_kreator/__iniy__.py deleted file mode 100644 index e69de29bb2d..00000000000 From 1cdf96e5da44f198b08abae2b6b1036cbb8bbd84 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rub=C3=A9n=20De=20la=20Torre=20Vico?= Date: Fri, 20 Sep 2024 10:08:40 +0200 Subject: [PATCH 03/21] fix(prowler-check-kreator): fix init name --- util/prowler_check_kreator/__init__.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 util/prowler_check_kreator/__init__.py diff --git a/util/prowler_check_kreator/__init__.py b/util/prowler_check_kreator/__init__.py new file mode 100644 index 00000000000..e69de29bb2d From 04f31a714aea87246c107228cf5c68ca9cb104dc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rub=C3=A9n=20De=20la=20Torre=20Vico?= Date: Fri, 20 Sep 2024 11:05:57 +0200 Subject: [PATCH 04/21] refactor(prowler-check-kreator): separate gemini code into a new class --- .../lib/llms/__init__.py | 0 util/prowler_check_kreator/lib/llms/gemini.py | 160 +++++++++++++ .../prowler_check_kreator.py | 224 ++++-------------- 3 files changed, 201 insertions(+), 183 deletions(-) create mode 100644 util/prowler_check_kreator/lib/llms/__init__.py create mode 100644 util/prowler_check_kreator/lib/llms/gemini.py diff --git a/util/prowler_check_kreator/lib/llms/__init__.py b/util/prowler_check_kreator/lib/llms/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/util/prowler_check_kreator/lib/llms/gemini.py b/util/prowler_check_kreator/lib/llms/gemini.py new file mode 100644 index 00000000000..51334da6337 --- /dev/null +++ b/util/prowler_check_kreator/lib/llms/gemini.py @@ -0,0 +1,160 @@ +import json +import os + +import google.generativeai as genai + +from util.prowler_check_kreator.lib.metadata_types import ( + get_metadata_valid_check_type, + get_metadata_valid_resource_type, +) + + +class Gemini: + def __init__(self, model: str = "gemini-1.5-flash"): + self.api_key = os.getenv("GEMINI_API_KEY") + + if model not in ["gemini-1.5-flash", "gemini-1.5-pro", "gemini-1.0-pro"]: + raise Exception("Invalid Gemini AI model") + + self.model_name = model + self.generation_config = { + "temperature": 0, + "top_p": 1, + "top_k": 1, + } + self.safety_settings = [ + { + "category": "HARM_CATEGORY_HARASSMENT", + "threshold": "BLOCK_MEDIUM_AND_ABOVE", + }, + { + "category": "HARM_CATEGORY_HATE_SPEECH", + "threshold": "BLOCK_MEDIUM_AND_ABOVE", + }, + { + "category": "HARM_CATEGORY_SEXUALLY_EXPLICIT", + "threshold": "BLOCK_MEDIUM_AND_ABOVE", + }, + { + "category": "HARM_CATEGORY_DANGEROUS_CONTENT", + "threshold": "BLOCK_MEDIUM_AND_ABOVE", + }, + ] + self._configure_genai() + + def _configure_genai(self): + """Configure the Gemini AI model.""" + try: + genai.configure(api_key=self.api_key) + except Exception as e: + raise Exception(f"Error configuring Gemini AI: {e}") + + def _generate_content(self, prompt_parts: list) -> str: + """Generate content using Gemini AI based on provided prompts.""" + try: + model = genai.GenerativeModel( + model_name=self.model_name, + generation_config=self.generation_config, + safety_settings=self.safety_settings, + ) + response = model.generate_content(prompt_parts) + if response: + return response.text + else: + raise Exception("Error generating content with Gemini AI") + except Exception as e: + raise Exception(f"Error generating content with Gemini AI: {e}") + + def _prepare_check_prompt(self, check_name: str, check_reference: str) -> list: + """Prepare the prompt for generating the check.""" + class_name = check_reference.split("(")[0].split("class ")[1] + prompt_parts = [ + f"Your task is to create a new security check called '{check_name}' for Prowler (a Cloud Security tool).", + "The control is a Python class that inherits from the Check class and has only one method called execute.", + "The execute method must return a list of Check_Report_AWS objects.", + "I need the answer only with Python formatted text.", + "Use the following check as inspiration to create the new check: ", + f"{class_name}:", + check_reference, + f"{check_name}:", + ] + return prompt_parts + + def _prepare_metadata_prompt(self, metadata: dict, context_sources: dict) -> list: + """Prepare the prompt for generating the metadata.""" + # Remove empty context sources and unnecessary fields + context_sources = {k: v for k, v in context_sources.items() if v} + metadata.pop("SubServiceName", None) + metadata["Remediation"]["Code"].pop("NativeIaC", None) + metadata["Remediation"]["Code"].pop("Other", None) + metadata["Remediation"]["Code"].pop("Terraform", None) + metadata.pop("DependsOn", None) + metadata.pop("RelatedTo", None) + + valid_prowler_categories = [ + "encryption", + "forensics-ready", + "internet-exposed", + "logging", + "redundancy", + "secrets", + "thread-detection", + "trustboundaries", + "vulnerability-management", + ] + + prompt_parts = [ + "Your task is to fill the metadata for a new cybersecurity check in Prowler (a Cloud Security tool).", + "The metadata is a JSON object with the following fields: ", + json.dumps(metadata, indent=2), + "Use the following context sources as inspiration to fill the metadata: ", + json.dumps(context_sources, indent=2), + "The field CheckType should be filled following the format: 'namespace/category/classifier', where namespace, category, and classifier are the values from the following dict: ", + json.dumps(get_metadata_valid_check_type(metadata["Provider"]), indent=2), + "One example of a valid CheckType value is: 'Software and Configuration Checks/Vulnerabilities/CVE'. If you don't have a valid value for CheckType, you can leave it empty.", + "The field ResourceType must be one of the following values:", + ", ".join(get_metadata_valid_resource_type(metadata["Provider"])), + "If you don't have a valid value for ResourceType, you can leave it empty.", + f"The field Category must be one or more of the following values: {', '.join(valid_prowler_categories)}.", + "I need the answer only with JSON formatted text.", + ] + return prompt_parts + + def generate_check(self, check_name: str, check_reference: str) -> str: + """Fill the check with Gemini AI.""" + if not check_reference: + return "" + + prompt_parts = self._prepare_check_prompt(check_name, check_reference) + return ( + self._generate_content(prompt_parts) + .replace("python", "") + .replace("```", "") + .strip() + ) + + def generate_metadata(self, metadata: dict, context_sources: dict) -> dict: + """Fill the metadata with Gemini AI.""" + if not metadata: + return {} + + prompt_parts = self._prepare_metadata_prompt(metadata, context_sources) + filled_metadata_json = self._generate_content(prompt_parts) + + # Parse the generated JSON and re-add the removed fields + filled_metadata = json.loads( + filled_metadata_json.replace("\n", "") + .replace("json", "") + .replace("JSON", "") + .replace("```", "") + .strip() + ) + + filled_metadata["SubServiceName"] = "" + filled_metadata["Remediation"]["Code"]["NativeIaC"] = "" + filled_metadata["Remediation"]["Code"]["Other"] = "" + filled_metadata["Remediation"]["Code"]["Terraform"] = "" + filled_metadata["DependsOn"] = [] + filled_metadata["RelatedTo"] = [] + + return filled_metadata diff --git a/util/prowler_check_kreator/prowler_check_kreator.py b/util/prowler_check_kreator/prowler_check_kreator.py index ff245cdb753..4795d12022e 100644 --- a/util/prowler_check_kreator/prowler_check_kreator.py +++ b/util/prowler_check_kreator/prowler_check_kreator.py @@ -3,12 +3,6 @@ import os import sys -import google.generativeai as genai - -from util.prowler_check_kreator.lib.metadata_types import ( - get_metadata_valid_check_type, - get_metadata_valid_resource_type, -) from util.prowler_check_kreator.lib.templates import ( load_check_template, load_test_template, @@ -73,9 +67,7 @@ def __init__(self, provider: str, check_name: str): .lower() ) - if user_input == "no": - raise ValueError(f"Check {check_name} already exists") - else: + if user_input == "yes": self._check_name = check_name self._check_path = os.path.join( self._prowler_folder, @@ -85,6 +77,41 @@ def __init__(self, provider: str, check_name: str): service_name, check_name, ) + else: + raise ValueError(f"Check {check_name} already exists") + + # Let the user to use the model that he wants + self._model = None + supported_models = [ + "gemini-1.5-flash (default)", + "gemini-1.5-pro", + "gemini-1.0-pro", + ] + + print("Select the model that you want to use:") + for i, model in enumerate(supported_models): + print(f"{i + 1}. {model}") + + user_input = input( + "Type the number of the model and press enter (default is 1): " + ).strip() + + if not user_input: + model_index = 1 + else: + model_index = int(user_input) + + if model_index < 1 or model_index > len(supported_models): + raise ValueError("Invalid model selected") + + model_name = supported_models[model_index - 1] + + if "gemini" in model_name: + from util.prowler_check_kreator.lib.llms.gemini import Gemini + + self._model = Gemini(model_name) + else: + raise ValueError("Invalid model selected") def kreate_check(self) -> None: """Create a new check in Prowler""" @@ -199,7 +226,7 @@ def _check_exists(self, check_name: str) -> bool: def _write_check_file(self) -> None: """Write the check file""" - check_template = load_check_template( + check_content = load_check_template( self._provider, self._service_name, self._check_name ) @@ -229,16 +256,16 @@ def _write_check_file(self) -> None: ) as f: check_reference = f.read() - check_template = self._fill_check_with_gemini( + check_content = self._model.generate_check( self._check_name, check_reference ) else: print( - "Referenced check does not exist. Check will be created with the standard template" + "Referenced check does not exist. Check will be created with the standard template." ) with open(os.path.join(self._check_path, f"{self._check_name}.py"), "w") as f: - f.write(check_template) + f.write(check_content) def _write_metadata_file(self) -> None: """Write the metadata file""" @@ -291,7 +318,7 @@ def _write_metadata_file(self) -> None: f"Please provide some context from {source} (leave empty if none): " ) - filled_metadata = self._fill_metadata_with_gemini( + filled_metadata = self._model.generate_metadata( metadata_template, context_sources ) else: @@ -321,175 +348,6 @@ def _write_test_file(self) -> None: with open(os.path.join(test_folder, f"{self._check_name}_test.py"), "w") as f: f.write(test_template) - def _fill_check_with_gemini(self, check_name: str, check_reference: str) -> str: - """Fill the check with Gemini AI - - Keyword arguments: - check_name -- The name of the check to be created - check_reference -- The reference check to be used as inspiration - """ - - filled_check = "" - - if check_reference: - try: - genai.configure(api_key=os.environ["GEMINI_API_KEY"]) - - generation_config = { - "temperature": 0, - "top_p": 1, - "top_k": 1, - } - - safety_settings = [ - { - "category": "HARM_CATEGORY_HARASSMENT", - "threshold": "BLOCK_MEDIUM_AND_ABOVE", - }, - { - "category": "HARM_CATEGORY_HATE_SPEECH", - "threshold": "BLOCK_MEDIUM_AND_ABOVE", - }, - { - "category": "HARM_CATEGORY_SEXUALLY_EXPLICIT", - "threshold": "BLOCK_MEDIUM_AND_ABOVE", - }, - { - "category": "HARM_CATEGORY_DANGEROUS_CONTENT", - "threshold": "BLOCK_MEDIUM_AND_ABOVE", - }, - ] - - model = genai.GenerativeModel( - model_name="gemini-1.5-flash", - generation_config=generation_config, - safety_settings=safety_settings, - ) - - # Extract the class name from the reference check. Example: class elb_connection_draining_enabled(Check) - class_name = check_reference.split("(")[0].split("class ")[1] - - prompt_parts = [ - f"Your task is to create a new security check called '{check_name}' for Prowler (an open-source CSPM tool). The control is a Python class that inherits from the Check class and has only one method called execute. The execute method must return a list of Check_Report_AWS objects.", - "I need the answer only with Python formatted text.", - "Use the following check as inspiration to create the new check: ", - f"{class_name}:", - check_reference, - f"{check_name}:", - ] - - response = model.generate_content(prompt_parts) - - if response: - # Format the response to a Python class, removing the prompt parts - filled_check = ( - response.text.replace("python", "").replace("```", "").strip() - ) - - else: - raise Exception("Error generating check with Gemini AI") - - except Exception as e: - raise Exception(f"Error generating check with Gemini AI: {e}") - - return filled_check - - def _fill_metadata_with_gemini(self, metadata: dict, context_sources: dict) -> dict: - filled_metadata = {} - - if metadata: - try: - genai.configure(api_key=os.environ["GEMINI_API_KEY"]) - - generation_config = { - "temperature": 0, - "top_p": 1, - "top_k": 1, - } - - safety_settings = [ - { - "category": "HARM_CATEGORY_HARASSMENT", - "threshold": "BLOCK_MEDIUM_AND_ABOVE", - }, - { - "category": "HARM_CATEGORY_HATE_SPEECH", - "threshold": "BLOCK_MEDIUM_AND_ABOVE", - }, - { - "category": "HARM_CATEGORY_SEXUALLY_EXPLICIT", - "threshold": "BLOCK_MEDIUM_AND_ABOVE", - }, - { - "category": "HARM_CATEGORY_DANGEROUS_CONTENT", - "threshold": "BLOCK_MEDIUM_AND_ABOVE", - }, - ] - - # Remove empty context sources - context_sources = {k: v for k, v in context_sources.items() if v} - - # Remove metadata that we don't want to be filled by Gemini - metadata.pop("SubServiceName", None) - metadata["Remediation"]["Code"].pop("NativeIaC", None) - metadata["Remediation"]["Code"].pop("Other", None) - metadata["Remediation"]["Code"].pop("Terraform", None) - metadata.pop("DependsOn", None) - metadata.pop("RelatedTo", None) - - model = genai.GenerativeModel( - model_name="gemini-1.5-flash", - generation_config=generation_config, - safety_settings=safety_settings, - ) - - prompt_parts = [ - "Your task is to fill the metadata for a new cybersecurity check in Prowler (an open-source CSPM tool). The metadata is a JSON object with the following fields: ", - json.dumps(metadata, indent=2), - "Use the following context sources as inspiration to fill the metadata: ", - json.dumps(context_sources, indent=2), - "The field CheckType should be filled following the format: 'namespace/category/classifier', where namespace, category, and classifier are the values from the following dict: ", - json.dumps( - get_metadata_valid_check_type(metadata["Provider"]), indent=2 - ), - "One example of a valid CheckType value is: 'Software and Configuration Checks/Vulnerabilities/CVE'. If you don't have a valid value for CheckType, you can leave it empty.", - "The field ResourceType must be one of the following values:", - ", ".join(get_metadata_valid_resource_type(metadata["Provider"])), - "If you don't have a valid value for ResourceType, you can leave it empty.", - "The field Category must be one or more of the following values: encryption, forensics-ready, internet-exposed, logging, redundancy, secrets, thread-detection, trustboundaries or vulnerability-management. If you don't have a valid value for Category, you can leave it empty.", - "I need the answer only with JSON formatted text.", - ] - - response = model.generate_content(prompt_parts) - - if response: - # Format the response to a JSON object, removing the prompt parts - response = ( - response.text.replace("\n", "") - .replace("json", "") - .replace("JSON", "") - .replace("```", "") - .strip() - ) - - filled_metadata = json.loads(response) - - # Add removed fields back to the metadata - metadata["SubServiceName"] = "" - metadata["Remediation"]["Code"]["NativeIaC"] = "" - metadata["Remediation"]["Code"]["Other"] = "" - metadata["Remediation"]["Code"]["Terraform"] = "" - metadata["DependsOn"] = [] - metadata["RelatedTo"] = [] - - else: - raise Exception("Error generating metadata with Gemini AI") - - except Exception as e: - raise Exception(f"Error generating metadata with Gemini AI: {e}") - - return filled_metadata - if __name__ == "__main__": try: From 16abc94433475343911394ab061c0498edd79fec Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rub=C3=A9n=20De=20la=20Torre=20Vico?= Date: Fri, 20 Sep 2024 12:15:50 +0200 Subject: [PATCH 05/21] docs(tutorials): add prowler check kreator description and usage --- docs/tutorials/prowler-check-kreator.md | 97 +++++++++++++++++++++++++ mkdocs.yml | 1 + 2 files changed, 98 insertions(+) create mode 100644 docs/tutorials/prowler-check-kreator.md diff --git a/docs/tutorials/prowler-check-kreator.md b/docs/tutorials/prowler-check-kreator.md new file mode 100644 index 00000000000..02b21d8a610 --- /dev/null +++ b/docs/tutorials/prowler-check-kreator.md @@ -0,0 +1,97 @@ + +# Prowler Check Kreator + +???+ note + Currently, it is only available for AWS provider. + +**Prowler Check Kreator** is a tool that helps you to create new checks for Prowler. It creates the necessary files to add a new check to the Prowler repository. The files that it creates are: + +- Check folder +- Check file +- Metadata file +- Test folder and file + +## Usage + +For use it only run the main file with the following command: + +```bash +python util/prowler_check_kreator/prowler_check_kreator.py +``` + +In the case that you want to use a shortcut you can add a new function in your shell configuration file: + +???+ note + Set the `PROWLER_PATH` environment variable with the path to the Prowler repository. + +**For Bash shell:** + +Add an alias in your `~/.bashrc` file: + +```bash +function pck() { + CWD=$PWD + cd "$PROWLER_PATH" + + arg1=$1 + arg2=$2 + + # Only pass arguments if they exist + if [ -z "$arg1" ]; then + arg1="" + fi + if [ -z "$arg2" ]; then + arg2="" + fi + + poetry run python ./util/prowler_check_kreator/prowler_check_kreator.py $arg1 $arg2 + cd "$CWD" +} +``` + +**For Zsh shell:** + +Add an alias in your `~/.zshrc` file: + +```bash +function pck() { + CWD=$PWD + cd "$PROWLER_PATH" + + arg1=$1 + arg2=$2 + + # Only pass arguments if they exist + if [ -z "$arg1" ]; then + arg1="" + fi + if [ -z "$arg2" ]; then + arg2="" + fi + + poetry run python ./util/prowler_check_kreator/prowler_check_kreator.py $arg1 $arg2 + cd "$CWD" +} +``` + +**For Fish shell:** + +Add a function in your `~/.config/fish/config.fish` file: + +```bash +function pck + set CWD $PWD + cd $PROWLER_PATH + poetry run python ./util/prowler_check_kreator/prowler_check_kreator.py $argv[1] $argv[2] + cd $CWD +end +``` + +## AI integration + +???+ warning + Be careful with the code/information generated by the AI, it could have some errors. + Related to metadata, the AI could generate the metadata file with some wrong links, commands or information, review the information before commit it. + Related to the check code, the AI does not change the services, so probably you need to change some code to make it work, take this tool more than a helper than a full solution. + +The tool could use AI to generate the check code and the metadata file. For now the tool only support integration with [Gemini](https://gemini.google.com/), to use it you only need to indicate when the tool ask for it and have set the `GEMINI_API_KEY` environment variable with the Gemini API key. To get your API key refer to the [Gemini documentation](https://ai.google.dev/gemini-api/docs/api-key). diff --git a/mkdocs.yml b/mkdocs.yml index d696191908e..d77bf2c4414 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -65,6 +65,7 @@ nav: - Pentesting: tutorials/pentesting.md - Parallel Execution: tutorials/parallel-execution.md - Developer Guide: developer-guide/introduction.md + - Prowler Check Kreator: tutorials/prowler-check-kreator.md - AWS: - Authentication: tutorials/aws/authentication.md - Assume Role: tutorials/aws/role-assumption.md From 1b587217e71b0179978ddabd20b9607d597422f9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rub=C3=A9n=20De=20la=20Torre=20Vico?= Date: Fri, 20 Sep 2024 13:07:29 +0200 Subject: [PATCH 06/21] fix(prowler-check-kreator): fix index list problems --- util/prowler_check_kreator/lib/llms/gemini.py | 7 ++++++- util/prowler_check_kreator/prowler_check_kreator.py | 6 +++--- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/util/prowler_check_kreator/lib/llms/gemini.py b/util/prowler_check_kreator/lib/llms/gemini.py index 51334da6337..830e56bc6d0 100644 --- a/util/prowler_check_kreator/lib/llms/gemini.py +++ b/util/prowler_check_kreator/lib/llms/gemini.py @@ -1,5 +1,6 @@ import json import os +import re import google.generativeai as genai @@ -67,7 +68,11 @@ def _generate_content(self, prompt_parts: list) -> str: def _prepare_check_prompt(self, check_name: str, check_reference: str) -> list: """Prepare the prompt for generating the check.""" - class_name = check_reference.split("(")[0].split("class ")[1] + + class_name = re.search( + r"class\s+([A-Za-z_][A-Za-z0-9_]*)\s*\((.*?)\)\s*:", check_reference + ).group(1) + prompt_parts = [ f"Your task is to create a new security check called '{check_name}' for Prowler (a Cloud Security tool).", "The control is a Python class that inherits from the Check class and has only one method called execute.", diff --git a/util/prowler_check_kreator/prowler_check_kreator.py b/util/prowler_check_kreator/prowler_check_kreator.py index 4795d12022e..3ed08e485f2 100644 --- a/util/prowler_check_kreator/prowler_check_kreator.py +++ b/util/prowler_check_kreator/prowler_check_kreator.py @@ -83,7 +83,7 @@ def __init__(self, provider: str, check_name: str): # Let the user to use the model that he wants self._model = None supported_models = [ - "gemini-1.5-flash (default)", + "gemini-1.5-flash", "gemini-1.5-pro", "gemini-1.0-pro", ] @@ -361,8 +361,8 @@ def _write_test_file(self) -> None: sys.exit(prowler_check_creator.kreate_check()) except ValueError as e: - print(f"Error: {e}") + print(f"Error: {e} (line {sys.exc_info()[-1].tb_lineno})") sys.exit(1) except Exception as e: - print(f"Error: {e}") + print(f"Error: {e} (line {sys.exc_info()[-1].tb_lineno})") sys.exit(1) From 06da3905a85dea6a9ad9e4e0c3db6072792d6adc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rub=C3=A9n=20De=20la=20Torre=20Vico?= Date: Fri, 20 Sep 2024 16:16:54 +0200 Subject: [PATCH 07/21] feat(prowler-check-kreator): manage variable not set --- util/prowler_check_kreator/lib/llms/gemini.py | 5 ++++- util/prowler_check_kreator/prowler_check_kreator.py | 4 ++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/util/prowler_check_kreator/lib/llms/gemini.py b/util/prowler_check_kreator/lib/llms/gemini.py index 830e56bc6d0..7a390530adc 100644 --- a/util/prowler_check_kreator/lib/llms/gemini.py +++ b/util/prowler_check_kreator/lib/llms/gemini.py @@ -12,7 +12,10 @@ class Gemini: def __init__(self, model: str = "gemini-1.5-flash"): - self.api_key = os.getenv("GEMINI_API_KEY") + if os.getenv("GEMINI_API_KEY"): + self.api_key = os.getenv("GEMINI_API_KEY") + else: + raise Exception("GEMINI_API_KEY environment variable is not set") if model not in ["gemini-1.5-flash", "gemini-1.5-pro", "gemini-1.0-pro"]: raise Exception("Invalid Gemini AI model") diff --git a/util/prowler_check_kreator/prowler_check_kreator.py b/util/prowler_check_kreator/prowler_check_kreator.py index 3ed08e485f2..bece8a960fc 100644 --- a/util/prowler_check_kreator/prowler_check_kreator.py +++ b/util/prowler_check_kreator/prowler_check_kreator.py @@ -361,8 +361,8 @@ def _write_test_file(self) -> None: sys.exit(prowler_check_creator.kreate_check()) except ValueError as e: - print(f"Error: {e} (line {sys.exc_info()[-1].tb_lineno})") + print(f"Error: {e}") sys.exit(1) except Exception as e: - print(f"Error: {e} (line {sys.exc_info()[-1].tb_lineno})") + print(f"Error: {e}") sys.exit(1) From 6008da56115af5c2f065e88b8015f2da552cc93f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rub=C3=A9n=20De=20la=20Torre=20Vico?= Date: Fri, 4 Oct 2024 10:23:51 +0200 Subject: [PATCH 08/21] chore(pck): add single context source --- util/prowler_check_kreator/lib/llms/gemini.py | 11 ++++----- .../prowler_check_kreator.py | 23 +++++++++++-------- 2 files changed, 18 insertions(+), 16 deletions(-) diff --git a/util/prowler_check_kreator/lib/llms/gemini.py b/util/prowler_check_kreator/lib/llms/gemini.py index 7a390530adc..0c046d0c5af 100644 --- a/util/prowler_check_kreator/lib/llms/gemini.py +++ b/util/prowler_check_kreator/lib/llms/gemini.py @@ -88,10 +88,9 @@ def _prepare_check_prompt(self, check_name: str, check_reference: str) -> list: ] return prompt_parts - def _prepare_metadata_prompt(self, metadata: dict, context_sources: dict) -> list: + def _prepare_metadata_prompt(self, metadata: dict, context: str) -> list: """Prepare the prompt for generating the metadata.""" - # Remove empty context sources and unnecessary fields - context_sources = {k: v for k, v in context_sources.items() if v} + metadata.pop("SubServiceName", None) metadata["Remediation"]["Code"].pop("NativeIaC", None) metadata["Remediation"]["Code"].pop("Other", None) @@ -116,7 +115,7 @@ def _prepare_metadata_prompt(self, metadata: dict, context_sources: dict) -> lis "The metadata is a JSON object with the following fields: ", json.dumps(metadata, indent=2), "Use the following context sources as inspiration to fill the metadata: ", - json.dumps(context_sources, indent=2), + context, "The field CheckType should be filled following the format: 'namespace/category/classifier', where namespace, category, and classifier are the values from the following dict: ", json.dumps(get_metadata_valid_check_type(metadata["Provider"]), indent=2), "One example of a valid CheckType value is: 'Software and Configuration Checks/Vulnerabilities/CVE'. If you don't have a valid value for CheckType, you can leave it empty.", @@ -141,12 +140,12 @@ def generate_check(self, check_name: str, check_reference: str) -> str: .strip() ) - def generate_metadata(self, metadata: dict, context_sources: dict) -> dict: + def generate_metadata(self, metadata: dict, context: str) -> dict: """Fill the metadata with Gemini AI.""" if not metadata: return {} - prompt_parts = self._prepare_metadata_prompt(metadata, context_sources) + prompt_parts = self._prepare_metadata_prompt(metadata, context) filled_metadata_json = self._generate_content(prompt_parts) # Parse the generated JSON and re-add the removed fields diff --git a/util/prowler_check_kreator/prowler_check_kreator.py b/util/prowler_check_kreator/prowler_check_kreator.py index bece8a960fc..463d96933cd 100644 --- a/util/prowler_check_kreator/prowler_check_kreator.py +++ b/util/prowler_check_kreator/prowler_check_kreator.py @@ -309,18 +309,21 @@ def _write_metadata_file(self) -> None: ) if user_input == "yes": - # Ask for some context to fill the metadata + # Ask for some context to the user to generate the metadata, the context input finishes with a blank line - context_sources = {"TrendMicro": "", "SecurityHub": "", "Other": ""} - - for source in context_sources: - context_sources[source] = input( - f"Please provide some context from {source} (leave empty if none): " - ) - - filled_metadata = self._model.generate_metadata( - metadata_template, context_sources + print( + "Please provide some context to fill the metadata (end with an empty line):" ) + context_lines = [] + while True: + line = input() + if line: + context_lines.append(line) + else: + break + context = "\n".join(context_lines) + + filled_metadata = self._model.generate_metadata(metadata_template, context) else: filled_metadata = metadata_template From 0ea2119844349459582aa9a13549af9e74dccfaa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rub=C3=A9n=20De=20la=20Torre=20Vico?= Date: Fri, 4 Oct 2024 10:45:11 +0200 Subject: [PATCH 09/21] chore(pck): introduce metadata in correct order --- util/prowler_check_kreator/lib/llms/gemini.py | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/util/prowler_check_kreator/lib/llms/gemini.py b/util/prowler_check_kreator/lib/llms/gemini.py index 0c046d0c5af..17b21553161 100644 --- a/util/prowler_check_kreator/lib/llms/gemini.py +++ b/util/prowler_check_kreator/lib/llms/gemini.py @@ -157,11 +157,22 @@ def generate_metadata(self, metadata: dict, context: str) -> dict: .strip() ) - filled_metadata["SubServiceName"] = "" + # Add the removed fields back in the same order + filled_metadata["Remediation"]["Code"]["NativeIaC"] = "" filled_metadata["Remediation"]["Code"]["Other"] = "" filled_metadata["Remediation"]["Code"]["Terraform"] = "" - filled_metadata["DependsOn"] = [] - filled_metadata["RelatedTo"] = [] - return filled_metadata + # Insert key SubServiceName after ServiceName key and RelatedTo and DependsOn just before Notes key + + ordered_filled_metadata = {} + + for key, value in filled_metadata.items(): + ordered_filled_metadata[key] = value + if key == "ServiceName": + ordered_filled_metadata["SubServiceName"] = "" + if key == "Notes": + ordered_filled_metadata["DependsOn"] = [] + ordered_filled_metadata["RelatedTo"] = [] + + return ordered_filled_metadata From db6e88c038a256203c3b82f70074796ef69594c3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rub=C3=A9n=20De=20la=20Torre=20Vico?= Date: Fri, 4 Oct 2024 14:52:51 +0200 Subject: [PATCH 10/21] feat(pck): add to generate tests with Gemini --- util/prowler_check_kreator/lib/llms/gemini.py | 62 +++++++++++++++---- .../prowler_check_kreator.py | 37 ++++++++++- 2 files changed, 86 insertions(+), 13 deletions(-) diff --git a/util/prowler_check_kreator/lib/llms/gemini.py b/util/prowler_check_kreator/lib/llms/gemini.py index 17b21553161..b5bdb10b6a2 100644 --- a/util/prowler_check_kreator/lib/llms/gemini.py +++ b/util/prowler_check_kreator/lib/llms/gemini.py @@ -72,9 +72,12 @@ def _generate_content(self, prompt_parts: list) -> str: def _prepare_check_prompt(self, check_name: str, check_reference: str) -> list: """Prepare the prompt for generating the check.""" - class_name = re.search( + match = re.search( r"class\s+([A-Za-z_][A-Za-z0-9_]*)\s*\((.*?)\)\s*:", check_reference - ).group(1) + ) + if not match: + raise ValueError("No valid class definition found in the test reference.") + class_name = match.group(1) prompt_parts = [ f"Your task is to create a new security check called '{check_name}' for Prowler (a Cloud Security tool).", @@ -88,6 +91,26 @@ def _prepare_check_prompt(self, check_name: str, check_reference: str) -> list: ] return prompt_parts + def _prepare_test_prompt(self, check_name: str, test_reference: str) -> list: + """Prepare the prompt for generating the test.""" + + match = re.search(r"class\s+(Test_[A-Za-z_][A-Za-z0-9_]*)\s*:", test_reference) + if not match: + raise ValueError("No valid class definition found in the test reference.") + class_name = match.group(1) + + prompt_parts = [ + f"Your task is to create a new test for the security check '{check_name}' in Prowler (a Cloud Security tool).", + "The test must have one or more methods that start with the word 'test'.", + "The test methods must use the assert statement to check the results of the check.", + "I need the answer only with Python formatted text.", + "Use the following test as inspiration to create the new test: ", + f"{class_name}:", + test_reference, + f"Test_{check_name}:", + ] + return prompt_parts + def _prepare_metadata_prompt(self, metadata: dict, context: str) -> list: """Prepare the prompt for generating the metadata.""" @@ -129,16 +152,33 @@ def _prepare_metadata_prompt(self, metadata: dict, context: str) -> list: def generate_check(self, check_name: str, check_reference: str) -> str: """Fill the check with Gemini AI.""" - if not check_reference: - return "" + check = "" + + if check_reference: + prompt_parts = self._prepare_check_prompt(check_name, check_reference) + check = ( + self._generate_content(prompt_parts) + .replace("python", "") + .replace("```", "") + .strip() + ) - prompt_parts = self._prepare_check_prompt(check_name, check_reference) - return ( - self._generate_content(prompt_parts) - .replace("python", "") - .replace("```", "") - .strip() - ) + return check + + def generate_test(self, check_name: str, test_reference): + """Fill the test with Gemini AI.""" + test = "" + + if test_reference: + prompt_parts = self._prepare_test_prompt(check_name, test_reference) + test = ( + self._generate_content(prompt_parts) + .replace("python", "") + .replace("```", "") + .strip() + ) + + return test def generate_metadata(self, metadata: dict, context: str) -> dict: """Fill the metadata with Gemini AI.""" diff --git a/util/prowler_check_kreator/prowler_check_kreator.py b/util/prowler_check_kreator/prowler_check_kreator.py index 463d96933cd..ccb7b0cefd1 100644 --- a/util/prowler_check_kreator/prowler_check_kreator.py +++ b/util/prowler_check_kreator/prowler_check_kreator.py @@ -113,6 +113,8 @@ def __init__(self, provider: str, check_name: str): else: raise ValueError("Invalid model selected") + self._check_reference_name = "" + def kreate_check(self) -> None: """Create a new check in Prowler""" @@ -241,6 +243,7 @@ def _write_check_file(self) -> None: ) if user_input and self._check_exists(user_input): + self._check_reference_name = user_input # Load the file referenced by the user with open( os.path.join( @@ -308,7 +311,7 @@ def _write_metadata_file(self) -> None: .lower() ) - if user_input == "yes": + if user_input.lower().strip() == "yes": # Ask for some context to the user to generate the metadata, the context input finishes with a blank line print( @@ -344,10 +347,40 @@ def _write_test_file(self) -> None: self._check_name, ) - test_template = load_test_template( + test_content = load_test_template( self._provider, self._service_name, self._check_name ) + # Ask if want that Gemini to fill the test taking as reference the other check tests + if self._check_reference_name: + user_input = ( + input( + "Do you want to ask Gemini to fill the test now (based on check provided as reference in the check creation)? Type 'yes'/'no' and press enter: " + ) + .strip() + .lower() + ) + + if user_input.lower().strip() == "yes": + # Load the file referenced by the user + with open( + os.path.join( + self._prowler_folder, + "tests/providers/", + self._provider, + "services/", + self._service_name, + self._check_reference_name, + f"{self._check_reference_name}_test.py", + ), + "r", + ) as f: + test_content = f.read() + + test_template = self._model.generate_test( + self._check_name, test_content + ) + with open(os.path.join(test_folder, f"{self._check_name}_test.py"), "w") as f: f.write(test_template) From a7c27bf18f45c870c3b2d709ebb61bce466e05f2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rub=C3=A9n=20De=20la=20Torre=20Vico?= Date: Thu, 10 Oct 2024 11:46:31 +0200 Subject: [PATCH 11/21] chore(pck): add default value to ResourceType in metadata --- util/prowler_check_kreator/lib/llms/gemini.py | 12 ++++++++++-- util/prowler_check_kreator/lib/metadata_types.py | 14 ++++++++++++++ .../prowler_check_kreator/prowler_check_kreator.py | 4 ++-- 3 files changed, 26 insertions(+), 4 deletions(-) diff --git a/util/prowler_check_kreator/lib/llms/gemini.py b/util/prowler_check_kreator/lib/llms/gemini.py index b5bdb10b6a2..9223b72de59 100644 --- a/util/prowler_check_kreator/lib/llms/gemini.py +++ b/util/prowler_check_kreator/lib/llms/gemini.py @@ -5,6 +5,7 @@ import google.generativeai as genai from util.prowler_check_kreator.lib.metadata_types import ( + get_metadata_placeholder_resource_type, get_metadata_valid_check_type, get_metadata_valid_resource_type, ) @@ -142,7 +143,7 @@ def _prepare_metadata_prompt(self, metadata: dict, context: str) -> list: "The field CheckType should be filled following the format: 'namespace/category/classifier', where namespace, category, and classifier are the values from the following dict: ", json.dumps(get_metadata_valid_check_type(metadata["Provider"]), indent=2), "One example of a valid CheckType value is: 'Software and Configuration Checks/Vulnerabilities/CVE'. If you don't have a valid value for CheckType, you can leave it empty.", - "The field ResourceType must be one of the following values:", + f"The field ResourceType must be one of the following values (if there is not a valid value, you can put '{get_metadata_placeholder_resource_type(metadata["Provider"])}'): ", ", ".join(get_metadata_valid_resource_type(metadata["Provider"])), "If you don't have a valid value for ResourceType, you can leave it empty.", f"The field Category must be one or more of the following values: {', '.join(valid_prowler_categories)}.", @@ -198,7 +199,6 @@ def generate_metadata(self, metadata: dict, context: str) -> dict: ) # Add the removed fields back in the same order - filled_metadata["Remediation"]["Code"]["NativeIaC"] = "" filled_metadata["Remediation"]["Code"]["Other"] = "" filled_metadata["Remediation"]["Code"]["Terraform"] = "" @@ -215,4 +215,12 @@ def generate_metadata(self, metadata: dict, context: str) -> dict: ordered_filled_metadata["DependsOn"] = [] ordered_filled_metadata["RelatedTo"] = [] + # Check that resource type is valid + if filled_metadata["ResourceType"]: + valid_resource_types = get_metadata_valid_resource_type( + filled_metadata["Provider"] + ) + if filled_metadata["ResourceType"] not in valid_resource_types: + ordered_filled_metadata["ResourceType"] = "Other" + return ordered_filled_metadata diff --git a/util/prowler_check_kreator/lib/metadata_types.py b/util/prowler_check_kreator/lib/metadata_types.py index 33cc6143b66..11052ea4b8f 100644 --- a/util/prowler_check_kreator/lib/metadata_types.py +++ b/util/prowler_check_kreator/lib/metadata_types.py @@ -221,3 +221,17 @@ def get_metadata_valid_resource_type(provider: str = "aws") -> set: } return valid_resource_types + + +def get_metadata_placeholder_resource_type(provider: str = "aws") -> str: + """Get the placeholder for the resource type for the provider + + Keyword arguments: + provider -- The provider of the service + """ + placeholder = "" + + if provider == "aws": + placeholder = "Other" + + return placeholder diff --git a/util/prowler_check_kreator/prowler_check_kreator.py b/util/prowler_check_kreator/prowler_check_kreator.py index ccb7b0cefd1..52a1ad54535 100644 --- a/util/prowler_check_kreator/prowler_check_kreator.py +++ b/util/prowler_check_kreator/prowler_check_kreator.py @@ -236,7 +236,7 @@ def _write_check_file(self) -> None: user_input = ( input( - "WARNING: This still in beta. The check generated may not have sense or you will have to add some parameters to the service\nDo you want to ask Gemini to fill the check now? If yes, type the reference check name and press enter. If not, press enter: " + "WARNING: This still in beta. The check generated may not have sense or you will have to add some parameters to the service\nDo you want to ask Gemini to fill the check now? If yes, type the reference check name and press enter. If not, press enter (it will be created with a standard template): " ) .strip() .lower() @@ -355,7 +355,7 @@ def _write_test_file(self) -> None: if self._check_reference_name: user_input = ( input( - "Do you want to ask Gemini to fill the test now (based on check provided as reference in the check creation)? Type 'yes'/'no' and press enter: " + "Do you want to ask Gemini to fill the test now (based on check provided as reference in the check creation)? Type 'yes'/'no' and press enter (if not, it will be created with a standard template): " ) .strip() .lower() From bf3cec0a6b108e8ac516093047abc091822a4169 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rub=C3=A9n=20De=20la=20Torre=20Vico?= Date: Mon, 14 Oct 2024 13:58:02 +0200 Subject: [PATCH 12/21] chore(pck): change order to introduce Notes in metadata --- util/prowler_check_kreator/lib/llms/gemini.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/util/prowler_check_kreator/lib/llms/gemini.py b/util/prowler_check_kreator/lib/llms/gemini.py index 9223b72de59..bf06781d1b7 100644 --- a/util/prowler_check_kreator/lib/llms/gemini.py +++ b/util/prowler_check_kreator/lib/llms/gemini.py @@ -208,12 +208,12 @@ def generate_metadata(self, metadata: dict, context: str) -> dict: ordered_filled_metadata = {} for key, value in filled_metadata.items(): - ordered_filled_metadata[key] = value - if key == "ServiceName": - ordered_filled_metadata["SubServiceName"] = "" if key == "Notes": ordered_filled_metadata["DependsOn"] = [] ordered_filled_metadata["RelatedTo"] = [] + ordered_filled_metadata[key] = value + if key == "ServiceName": + ordered_filled_metadata["SubServiceName"] = "" # Check that resource type is valid if filled_metadata["ResourceType"]: From 392609c2678f40e71e9d0a72594f1bfbdb51e81c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rub=C3=A9n=20De=20la=20Torre=20Vico?= Date: Mon, 28 Oct 2024 09:16:34 +0100 Subject: [PATCH 13/21] chore(deps): add google-genai for use gemini to deps --- poetry.lock | 167 ++++++++++++++++++++++++++++++++++++++++++++++--- pyproject.toml | 1 + 2 files changed, 159 insertions(+), 9 deletions(-) diff --git a/poetry.lock b/poetry.lock index db5f0c48d82..7fbb48a7230 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1659,6 +1659,23 @@ gitdb = ">=4.0.1,<5" doc = ["sphinx (==4.3.2)", "sphinx-autodoc-typehints", "sphinx-rtd-theme", "sphinxcontrib-applehelp (>=1.0.2,<=1.0.4)", "sphinxcontrib-devhelp (==1.0.2)", "sphinxcontrib-htmlhelp (>=2.0.0,<=2.0.1)", "sphinxcontrib-qthelp (==1.0.3)", "sphinxcontrib-serializinghtml (==1.1.5)"] test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "typing-extensions"] +[[package]] +name = "google-ai-generativelanguage" +version = "0.6.10" +description = "Google Ai Generativelanguage API client library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google_ai_generativelanguage-0.6.10-py3-none-any.whl", hash = "sha256:854a2bf833d18be05ad5ef13c755567b66a4f4a870f099b62c61fe11bddabcf4"}, + {file = "google_ai_generativelanguage-0.6.10.tar.gz", hash = "sha256:6fa642c964d8728006fe7e8771026fc0b599ae0ebeaf83caf550941e8e693455"}, +] + +[package.dependencies] +google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} +google-auth = ">=2.14.1,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0dev" +proto-plus = ">=1.22.3,<2.0.0dev" +protobuf = ">=3.20.2,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0dev" + [[package]] name = "google-api-core" version = "2.20.0" @@ -1673,6 +1690,14 @@ files = [ [package.dependencies] google-auth = ">=2.14.1,<3.0.dev0" googleapis-common-protos = ">=1.56.2,<2.0.dev0" +grpcio = [ + {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, + {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, +] +grpcio-status = [ + {version = ">=1.33.2,<2.0.dev0", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, + {version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, +] proto-plus = ">=1.22.3,<2.0.0dev" protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" requests = ">=2.18.0,<3.0.0.dev0" @@ -1738,6 +1763,29 @@ files = [ google-auth = "*" httplib2 = ">=0.19.0" +[[package]] +name = "google-generativeai" +version = "0.8.3" +description = "Google Generative AI High level API client library and tools." +optional = false +python-versions = ">=3.9" +files = [ + {file = "google_generativeai-0.8.3-py3-none-any.whl", hash = "sha256:1108ff89d5b8e59f51e63d1a8bf84701cd84656e17ca28d73aeed745e736d9b7"}, +] + +[package.dependencies] +google-ai-generativelanguage = "0.6.10" +google-api-core = "*" +google-api-python-client = "*" +google-auth = ">=2.15.0" +protobuf = "*" +pydantic = "*" +tqdm = "*" +typing-extensions = "*" + +[package.extras] +dev = ["Pillow", "absl-py", "black", "ipython", "nose2", "pandas", "pytype", "pyyaml"] + [[package]] name = "googleapis-common-protos" version = "1.65.0" @@ -1779,6 +1827,89 @@ files = [ {file = "graphql_core-3.2.4-py3-none-any.whl", hash = "sha256:1604f2042edc5f3114f49cac9d77e25863be51b23a54a61a23245cf32f6476f0"}, ] +[[package]] +name = "grpcio" +version = "1.67.0" +description = "HTTP/2-based RPC framework" +optional = false +python-versions = ">=3.8" +files = [ + {file = "grpcio-1.67.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:bd79929b3bb96b54df1296cd3bf4d2b770bd1df6c2bdf549b49bab286b925cdc"}, + {file = "grpcio-1.67.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:16724ffc956ea42967f5758c2f043faef43cb7e48a51948ab593570570d1e68b"}, + {file = "grpcio-1.67.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:2b7183c80b602b0ad816315d66f2fb7887614ead950416d60913a9a71c12560d"}, + {file = "grpcio-1.67.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:efe32b45dd6d118f5ea2e5deaed417d8a14976325c93812dd831908522b402c9"}, + {file = "grpcio-1.67.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe89295219b9c9e47780a0f1c75ca44211e706d1c598242249fe717af3385ec8"}, + {file = "grpcio-1.67.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa8d025fae1595a207b4e47c2e087cb88d47008494db258ac561c00877d4c8f8"}, + {file = "grpcio-1.67.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f95e15db43e75a534420e04822df91f645664bf4ad21dfaad7d51773c80e6bb4"}, + {file = "grpcio-1.67.0-cp310-cp310-win32.whl", hash = "sha256:a6b9a5c18863fd4b6624a42e2712103fb0f57799a3b29651c0e5b8119a519d65"}, + {file = "grpcio-1.67.0-cp310-cp310-win_amd64.whl", hash = "sha256:b6eb68493a05d38b426604e1dc93bfc0137c4157f7ab4fac5771fd9a104bbaa6"}, + {file = "grpcio-1.67.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:e91d154689639932305b6ea6f45c6e46bb51ecc8ea77c10ef25aa77f75443ad4"}, + {file = "grpcio-1.67.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:cb204a742997277da678611a809a8409657b1398aaeebf73b3d9563b7d154c13"}, + {file = "grpcio-1.67.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:ae6de510f670137e755eb2a74b04d1041e7210af2444103c8c95f193340d17ee"}, + {file = "grpcio-1.67.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74b900566bdf68241118f2918d312d3bf554b2ce0b12b90178091ea7d0a17b3d"}, + {file = "grpcio-1.67.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4e95e43447a02aa603abcc6b5e727d093d161a869c83b073f50b9390ecf0fa8"}, + {file = "grpcio-1.67.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0bb94e66cd8f0baf29bd3184b6aa09aeb1a660f9ec3d85da615c5003154bc2bf"}, + {file = "grpcio-1.67.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:82e5bd4b67b17c8c597273663794a6a46a45e44165b960517fe6d8a2f7f16d23"}, + {file = "grpcio-1.67.0-cp311-cp311-win32.whl", hash = "sha256:7fc1d2b9fd549264ae585026b266ac2db53735510a207381be509c315b4af4e8"}, + {file = "grpcio-1.67.0-cp311-cp311-win_amd64.whl", hash = "sha256:ac11ecb34a86b831239cc38245403a8de25037b448464f95c3315819e7519772"}, + {file = "grpcio-1.67.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:227316b5631260e0bef8a3ce04fa7db4cc81756fea1258b007950b6efc90c05d"}, + {file = "grpcio-1.67.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d90cfdafcf4b45a7a076e3e2a58e7bc3d59c698c4f6470b0bb13a4d869cf2273"}, + {file = "grpcio-1.67.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:77196216d5dd6f99af1c51e235af2dd339159f657280e65ce7e12c1a8feffd1d"}, + {file = "grpcio-1.67.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:15c05a26a0f7047f720da41dc49406b395c1470eef44ff7e2c506a47ac2c0591"}, + {file = "grpcio-1.67.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3840994689cc8cbb73d60485c594424ad8adb56c71a30d8948d6453083624b52"}, + {file = "grpcio-1.67.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:5a1e03c3102b6451028d5dc9f8591131d6ab3c8a0e023d94c28cb930ed4b5f81"}, + {file = "grpcio-1.67.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:682968427a63d898759474e3b3178d42546e878fdce034fd7474ef75143b64e3"}, + {file = "grpcio-1.67.0-cp312-cp312-win32.whl", hash = "sha256:d01793653248f49cf47e5695e0a79805b1d9d4eacef85b310118ba1dfcd1b955"}, + {file = "grpcio-1.67.0-cp312-cp312-win_amd64.whl", hash = "sha256:985b2686f786f3e20326c4367eebdaed3e7aa65848260ff0c6644f817042cb15"}, + {file = "grpcio-1.67.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:8c9a35b8bc50db35ab8e3e02a4f2a35cfba46c8705c3911c34ce343bd777813a"}, + {file = "grpcio-1.67.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:42199e704095b62688998c2d84c89e59a26a7d5d32eed86d43dc90e7a3bd04aa"}, + {file = "grpcio-1.67.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:c4c425f440fb81f8d0237c07b9322fc0fb6ee2b29fbef5f62a322ff8fcce240d"}, + {file = "grpcio-1.67.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:323741b6699cd2b04a71cb38f502db98f90532e8a40cb675393d248126a268af"}, + {file = "grpcio-1.67.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:662c8e105c5e5cee0317d500eb186ed7a93229586e431c1bf0c9236c2407352c"}, + {file = "grpcio-1.67.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:f6bd2ab135c64a4d1e9e44679a616c9bc944547357c830fafea5c3caa3de5153"}, + {file = "grpcio-1.67.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:2f55c1e0e2ae9bdd23b3c63459ee4c06d223b68aeb1961d83c48fb63dc29bc03"}, + {file = "grpcio-1.67.0-cp313-cp313-win32.whl", hash = "sha256:fd6bc27861e460fe28e94226e3673d46e294ca4673d46b224428d197c5935e69"}, + {file = "grpcio-1.67.0-cp313-cp313-win_amd64.whl", hash = "sha256:cf51d28063338608cd8d3cd64677e922134837902b70ce00dad7f116e3998210"}, + {file = "grpcio-1.67.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:7f200aca719c1c5dc72ab68be3479b9dafccdf03df530d137632c534bb6f1ee3"}, + {file = "grpcio-1.67.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0892dd200ece4822d72dd0952f7112c542a487fc48fe77568deaaa399c1e717d"}, + {file = "grpcio-1.67.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:f4d613fbf868b2e2444f490d18af472ccb47660ea3df52f068c9c8801e1f3e85"}, + {file = "grpcio-1.67.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c69bf11894cad9da00047f46584d5758d6ebc9b5950c0dc96fec7e0bce5cde9"}, + {file = "grpcio-1.67.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9bca3ca0c5e74dea44bf57d27e15a3a3996ce7e5780d61b7c72386356d231db"}, + {file = "grpcio-1.67.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:014dfc020e28a0d9be7e93a91f85ff9f4a87158b7df9952fe23cc42d29d31e1e"}, + {file = "grpcio-1.67.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d4ea4509d42c6797539e9ec7496c15473177ce9abc89bc5c71e7abe50fc25737"}, + {file = "grpcio-1.67.0-cp38-cp38-win32.whl", hash = "sha256:9d75641a2fca9ae1ae86454fd25d4c298ea8cc195dbc962852234d54a07060ad"}, + {file = "grpcio-1.67.0-cp38-cp38-win_amd64.whl", hash = "sha256:cff8e54d6a463883cda2fab94d2062aad2f5edd7f06ae3ed030f2a74756db365"}, + {file = "grpcio-1.67.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:62492bd534979e6d7127b8a6b29093161a742dee3875873e01964049d5250a74"}, + {file = "grpcio-1.67.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eef1dce9d1a46119fd09f9a992cf6ab9d9178b696382439446ca5f399d7b96fe"}, + {file = "grpcio-1.67.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:f623c57a5321461c84498a99dddf9d13dac0e40ee056d884d6ec4ebcab647a78"}, + {file = "grpcio-1.67.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54d16383044e681f8beb50f905249e4e7261dd169d4aaf6e52eab67b01cbbbe2"}, + {file = "grpcio-1.67.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2a44e572fb762c668e4812156b81835f7aba8a721b027e2d4bb29fb50ff4d33"}, + {file = "grpcio-1.67.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:391df8b0faac84d42f5b8dfc65f5152c48ed914e13c522fd05f2aca211f8bfad"}, + {file = "grpcio-1.67.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cfd9306511fdfc623a1ba1dc3bc07fbd24e6cfbe3c28b4d1e05177baa2f99617"}, + {file = "grpcio-1.67.0-cp39-cp39-win32.whl", hash = "sha256:30d47dbacfd20cbd0c8be9bfa52fdb833b395d4ec32fe5cff7220afc05d08571"}, + {file = "grpcio-1.67.0-cp39-cp39-win_amd64.whl", hash = "sha256:f55f077685f61f0fbd06ea355142b71e47e4a26d2d678b3ba27248abfe67163a"}, + {file = "grpcio-1.67.0.tar.gz", hash = "sha256:e090b2553e0da1c875449c8e75073dd4415dd71c9bde6a406240fdf4c0ee467c"}, +] + +[package.extras] +protobuf = ["grpcio-tools (>=1.67.0)"] + +[[package]] +name = "grpcio-status" +version = "1.67.0" +description = "Status proto mapping for gRPC" +optional = false +python-versions = ">=3.8" +files = [ + {file = "grpcio_status-1.67.0-py3-none-any.whl", hash = "sha256:0e79e2e01ba41a6ca6ed9d7a825323c511fe1653a646f8014c7e3c8132527acc"}, + {file = "grpcio_status-1.67.0.tar.gz", hash = "sha256:c3e5a86fa007e9e263cd5f988a8a907484da4caab582874ea2a4a6092734046b"}, +] + +[package.dependencies] +googleapis-common-protos = ">=1.5.5" +grpcio = ">=1.67.0" +protobuf = ">=5.26.1,<6.0dev" + [[package]] name = "h11" version = "0.14.0" @@ -3502,7 +3633,6 @@ description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs optional = false python-versions = ">=3.8" files = [ - {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"}, {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"}, ] @@ -3513,7 +3643,6 @@ description = "A collection of ASN.1-based protocols modules" optional = false python-versions = ">=3.8" files = [ - {file = "pyasn1_modules-0.4.1-py3-none-any.whl", hash = "sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd"}, {file = "pyasn1_modules-0.4.1.tar.gz", hash = "sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c"}, ] @@ -4314,24 +4443,24 @@ python-versions = ">=3.6" files = [ {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:d92f81886165cb14d7b067ef37e142256f1c6a90a65cd156b063a43da1708cfd"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fff3573c2db359f091e1589c3d7c5fc2f86f5bdb6f24252c2d8e539d4e45f412"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:aa2267c6a303eb483de8d02db2871afb5c5fc15618d894300b88958f729ad74f"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:840f0c7f194986a63d2c2465ca63af8ccbbc90ab1c6001b1978f05119b5e7334"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:024cfe1fc7c7f4e1aff4a81e718109e13409767e4f871443cbff3dba3578203d"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win32.whl", hash = "sha256:c69212f63169ec1cfc9bb44723bf2917cbbd8f6191a00ef3410f5a7fe300722d"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win_amd64.whl", hash = "sha256:cabddb8d8ead485e255fe80429f833172b4cadf99274db39abc080e068cbcc31"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bef08cd86169d9eafb3ccb0a39edb11d8e25f3dae2b28f5c52fd997521133069"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:b16420e621d26fdfa949a8b4b47ade8810c56002f5389970db4ddda51dbff248"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:b5edda50e5e9e15e54a6a8a0070302b00c518a9d32accc2346ad6c984aacd279"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:25c515e350e5b739842fc3228d662413ef28f295791af5e5110b543cf0b57d9b"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_24_aarch64.whl", hash = "sha256:1707814f0d9791df063f8c19bb51b0d1278b8e9a2353abbb676c2f685dee6afe"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:46d378daaac94f454b3a0e3d8d78cafd78a026b1d71443f4966c696b48a6d899"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:09b055c05697b38ecacb7ac50bdab2240bfca1a0c4872b0fd309bb07dc9aa3a9"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win32.whl", hash = "sha256:53a300ed9cea38cf5a2a9b069058137c2ca1ce658a874b79baceb8f892f915a7"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win_amd64.whl", hash = "sha256:c2a72e9109ea74e511e29032f3b670835f8a59bbdc9ce692c5b4ed91ccf1eedb"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ebc06178e8821efc9692ea7544aa5644217358490145629914d8020042c24aa1"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:edaef1c1200c4b4cb914583150dcaa3bc30e592e907c01117c08b13a07255ec2"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:7048c338b6c86627afb27faecf418768acb6331fc24cfa56c93e8c9780f815fa"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d176b57452ab5b7028ac47e7b3cf644bcfdc8cacfecf7e71759f7f51a59e5c92"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_24_aarch64.whl", hash = "sha256:1dc67314e7e1086c9fdf2680b7b6c2be1c0d8e3a8279f2e993ca2a7545fecf62"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3213ece08ea033eb159ac52ae052a4899b56ecc124bb80020d9bbceeb50258e9"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aab7fd643f71d7946f2ee58cc88c9b7bfc97debd71dcc93e03e2d174628e7e2d"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win32.whl", hash = "sha256:5c365d91c88390c8d0a8545df0b5857172824b1c604e867161e6b3d59a827eaa"}, @@ -4339,7 +4468,7 @@ files = [ {file = "ruamel.yaml.clib-0.2.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a5aa27bad2bb83670b71683aae140a1f52b0857a2deff56ad3f6c13a017a26ed"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c58ecd827313af6864893e7af0a3bb85fd529f862b6adbefe14643947cfe2942"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f481f16baec5290e45aebdc2a5168ebc6d35189ae6fea7a58787613a25f6e875"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:3fcc54cb0c8b811ff66082de1680b4b14cf8a81dce0d4fbf665c2265a81e07a1"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_24_aarch64.whl", hash = "sha256:77159f5d5b5c14f7c34073862a6b7d34944075d9f93e681638f6d753606c6ce6"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7f67a1ee819dc4562d444bbafb135832b0b909f81cc90f7aa00260968c9ca1b3"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4ecbf9c3e19f9562c7fdd462e8d18dd902a47ca046a2e64dba80699f0b6c09b7"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:87ea5ff66d8064301a154b3933ae406b0863402a799b16e4a1d24d9fbbcbe0d3"}, @@ -4347,7 +4476,7 @@ files = [ {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win_amd64.whl", hash = "sha256:3f215c5daf6a9d7bbed4a0a4f760f3113b10e82ff4c5c44bec20a68c8014f675"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1b617618914cb00bf5c34d4357c37aa15183fa229b24767259657746c9077615"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a6a9ffd280b71ad062eae53ac1659ad86a17f59a0fdc7699fd9be40525153337"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:665f58bfd29b167039f714c6998178d27ccd83984084c286110ef26b230f259f"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_24_aarch64.whl", hash = "sha256:305889baa4043a09e5b76f8e2a51d4ffba44259f6b4c72dec8ca56207d9c6fe1"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:700e4ebb569e59e16a976857c8798aee258dceac7c7d6b50cab63e080058df91"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e2b4c44b60eadec492926a7270abb100ef9f72798e18743939bdbf037aab8c28"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e79e5db08739731b0ce4850bed599235d601701d5694c36570a99a0c5ca41a9d"}, @@ -4355,7 +4484,7 @@ files = [ {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win_amd64.whl", hash = "sha256:56f4252222c067b4ce51ae12cbac231bce32aee1d33fbfc9d17e5b8d6966c312"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03d1162b6d1df1caa3a4bd27aa51ce17c9afc2046c31b0ad60a0a96ec22f8001"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba64af9fa9cebe325a62fa398760f5c7206b215201b0ec825005f1b18b9bccf"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:9eb5dee2772b0f704ca2e45b1713e4e5198c18f515b52743576d196348f374d3"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:a1a45e0bb052edf6a1d3a93baef85319733a888363938e1fc9924cb00c8df24c"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:da09ad1c359a728e112d60116f626cc9f29730ff3e0e7db72b9a2dbc2e4beed5"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:184565012b60405d93838167f425713180b949e9d8dd0bbc7b49f074407c5a8b"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a75879bacf2c987c003368cf14bed0ffe99e8e85acfa6c0bfffc21a090f16880"}, @@ -4655,6 +4784,26 @@ files = [ {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, ] +[[package]] +name = "tqdm" +version = "4.66.5" +description = "Fast, Extensible Progress Meter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tqdm-4.66.5-py3-none-any.whl", hash = "sha256:90279a3770753eafc9194a0364852159802111925aa30eb3f9d85b0e805ac7cd"}, + {file = "tqdm-4.66.5.tar.gz", hash = "sha256:e1020aef2e5096702d8a025ac7d16b1577279c9d63f8375b63083e9a5f0fcbad"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + [[package]] name = "typer" version = "0.12.5" @@ -5072,4 +5221,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<3.13" -content-hash = "0b367fa80501022efe43dc1beaa7f3da278fb64ffaddece72a0e88b09a0e53a2" +content-hash = "5ca1fda9017adcb0fe53b6b17341a8239897f3d8f0ec238e19d488856491869f" diff --git a/pyproject.toml b/pyproject.toml index 22c0ba71bfe..9e933029b3a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -56,6 +56,7 @@ dash-bootstrap-components = "1.6.0" detect-secrets = {git = "https://github.com/Yelp/detect-secrets.git", rev = "master"} google-api-python-client = "2.147.0" google-auth-httplib2 = ">=0.1,<0.3" +google-generativeai = "^0.8.3" jsonschema = "4.23.0" kubernetes = "31.0.0" microsoft-kiota-abstractions = "1.3.3" From 9efc455ab117e571c66c4db787bc390a398e57e3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rub=C3=A9n=20De=20la=20Torre=20Vico?= Date: Mon, 28 Oct 2024 09:21:05 +0100 Subject: [PATCH 14/21] chore(gemini): change flake8 error --- util/prowler_check_kreator/lib/llms/gemini.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/util/prowler_check_kreator/lib/llms/gemini.py b/util/prowler_check_kreator/lib/llms/gemini.py index bf06781d1b7..498ff4678d0 100644 --- a/util/prowler_check_kreator/lib/llms/gemini.py +++ b/util/prowler_check_kreator/lib/llms/gemini.py @@ -141,10 +141,12 @@ def _prepare_metadata_prompt(self, metadata: dict, context: str) -> list: "Use the following context sources as inspiration to fill the metadata: ", context, "The field CheckType should be filled following the format: 'namespace/category/classifier', where namespace, category, and classifier are the values from the following dict: ", - json.dumps(get_metadata_valid_check_type(metadata["Provider"]), indent=2), + json.dumps( + get_metadata_valid_check_type(metadata.get("Provider")), indent=2 + ), "One example of a valid CheckType value is: 'Software and Configuration Checks/Vulnerabilities/CVE'. If you don't have a valid value for CheckType, you can leave it empty.", - f"The field ResourceType must be one of the following values (if there is not a valid value, you can put '{get_metadata_placeholder_resource_type(metadata["Provider"])}'): ", - ", ".join(get_metadata_valid_resource_type(metadata["Provider"])), + f"The field ResourceType must be one of the following values (if there is not a valid value, you can put '{get_metadata_placeholder_resource_type(metadata.get("Provider"))}'): ", + ", ".join(get_metadata_valid_resource_type(metadata.get("Provider"))), "If you don't have a valid value for ResourceType, you can leave it empty.", f"The field Category must be one or more of the following values: {', '.join(valid_prowler_categories)}.", "I need the answer only with JSON formatted text.", From 0c132fc8079861051d8f7989b4028d592c95257e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rub=C3=A9n=20De=20la=20Torre=20Vico?= Date: Mon, 28 Oct 2024 09:53:35 +0100 Subject: [PATCH 15/21] chore(gemini): change flake8 error --- util/prowler_check_kreator/lib/llms/gemini.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/util/prowler_check_kreator/lib/llms/gemini.py b/util/prowler_check_kreator/lib/llms/gemini.py index 498ff4678d0..0ccefba95fa 100644 --- a/util/prowler_check_kreator/lib/llms/gemini.py +++ b/util/prowler_check_kreator/lib/llms/gemini.py @@ -145,7 +145,7 @@ def _prepare_metadata_prompt(self, metadata: dict, context: str) -> list: get_metadata_valid_check_type(metadata.get("Provider")), indent=2 ), "One example of a valid CheckType value is: 'Software and Configuration Checks/Vulnerabilities/CVE'. If you don't have a valid value for CheckType, you can leave it empty.", - f"The field ResourceType must be one of the following values (if there is not a valid value, you can put '{get_metadata_placeholder_resource_type(metadata.get("Provider"))}'): ", + f"The field ResourceType must be one of the following values, if there is not a valid value you can put '{get_metadata_placeholder_resource_type(metadata.get("Provider"))}': ", ", ".join(get_metadata_valid_resource_type(metadata.get("Provider"))), "If you don't have a valid value for ResourceType, you can leave it empty.", f"The field Category must be one or more of the following values: {', '.join(valid_prowler_categories)}.", From 6d0a6df514a34ae75af63817a4821aa8a9834bf7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rub=C3=A9n=20De=20la=20Torre=20Vico?= Date: Mon, 28 Oct 2024 09:59:14 +0100 Subject: [PATCH 16/21] chore(gemini): change flake8 error --- util/prowler_check_kreator/lib/llms/gemini.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/util/prowler_check_kreator/lib/llms/gemini.py b/util/prowler_check_kreator/lib/llms/gemini.py index 0ccefba95fa..d46bb1ab257 100644 --- a/util/prowler_check_kreator/lib/llms/gemini.py +++ b/util/prowler_check_kreator/lib/llms/gemini.py @@ -134,6 +134,10 @@ def _prepare_metadata_prompt(self, metadata: dict, context: str) -> list: "vulnerability-management", ] + metadata_placeholder_resource_type = get_metadata_placeholder_resource_type( + metadata.get("Provider") + ) + prompt_parts = [ "Your task is to fill the metadata for a new cybersecurity check in Prowler (a Cloud Security tool).", "The metadata is a JSON object with the following fields: ", @@ -145,7 +149,7 @@ def _prepare_metadata_prompt(self, metadata: dict, context: str) -> list: get_metadata_valid_check_type(metadata.get("Provider")), indent=2 ), "One example of a valid CheckType value is: 'Software and Configuration Checks/Vulnerabilities/CVE'. If you don't have a valid value for CheckType, you can leave it empty.", - f"The field ResourceType must be one of the following values, if there is not a valid value you can put '{get_metadata_placeholder_resource_type(metadata.get("Provider"))}': ", + f"The field ResourceType must be one of the following values (if there is not a valid value, you can put '{metadata_placeholder_resource_type}'): ", ", ".join(get_metadata_valid_resource_type(metadata.get("Provider"))), "If you don't have a valid value for ResourceType, you can leave it empty.", f"The field Category must be one or more of the following values: {', '.join(valid_prowler_categories)}.", From 33a258a9c78c94f7dede35943085298bc3dea47d Mon Sep 17 00:00:00 2001 From: Sergio Date: Thu, 7 Nov 2024 12:36:50 -0500 Subject: [PATCH 17/21] chore: revision --- docs/tutorials/prowler-check-kreator.md | 94 +++---------- poetry.lock | 171 ++---------------------- pyproject.toml | 1 - 3 files changed, 26 insertions(+), 240 deletions(-) diff --git a/docs/tutorials/prowler-check-kreator.md b/docs/tutorials/prowler-check-kreator.md index 02b21d8a610..b9c0cd48efa 100644 --- a/docs/tutorials/prowler-check-kreator.md +++ b/docs/tutorials/prowler-check-kreator.md @@ -2,96 +2,32 @@ # Prowler Check Kreator ???+ note - Currently, it is only available for AWS provider. + Currently, this tool is only available for creating checks for the AWS provider. -**Prowler Check Kreator** is a tool that helps you to create new checks for Prowler. It creates the necessary files to add a new check to the Prowler repository. The files that it creates are: +**Prowler Check Kreator** is a utility designed to streamline the creation of new checks for Prowler. This tool generates all necessary files required to add a new check to the Prowler repository. Specifically, it creates: -- Check folder -- Check file -- Metadata file -- Test folder and file +- A dedicated folder for the check. +- The main check script. +- A metadata file with essential details. +- A folder and file structure for testing the check. ## Usage -For use it only run the main file with the following command: +To use the tool, execute the main script with the following command: ```bash -python util/prowler_check_kreator/prowler_check_kreator.py +python util/prowler_check_kreator/prowler_check_kreator.py ``` +Parameters: -In the case that you want to use a shortcut you can add a new function in your shell configuration file: - -???+ note - Set the `PROWLER_PATH` environment variable with the path to the Prowler repository. - -**For Bash shell:** - -Add an alias in your `~/.bashrc` file: - -```bash -function pck() { - CWD=$PWD - cd "$PROWLER_PATH" - - arg1=$1 - arg2=$2 - - # Only pass arguments if they exist - if [ -z "$arg1" ]; then - arg1="" - fi - if [ -z "$arg2" ]; then - arg2="" - fi - - poetry run python ./util/prowler_check_kreator/prowler_check_kreator.py $arg1 $arg2 - cd "$CWD" -} -``` - -**For Zsh shell:** - -Add an alias in your `~/.zshrc` file: - -```bash -function pck() { - CWD=$PWD - cd "$PROWLER_PATH" - - arg1=$1 - arg2=$2 - - # Only pass arguments if they exist - if [ -z "$arg1" ]; then - arg1="" - fi - if [ -z "$arg2" ]; then - arg2="" - fi - - poetry run python ./util/prowler_check_kreator/prowler_check_kreator.py $arg1 $arg2 - cd "$CWD" -} -``` - -**For Fish shell:** - -Add a function in your `~/.config/fish/config.fish` file: - -```bash -function pck - set CWD $PWD - cd $PROWLER_PATH - poetry run python ./util/prowler_check_kreator/prowler_check_kreator.py $argv[1] $argv[2] - cd $CWD -end -``` +- ``: Currently only AWS is supported. +- ``: The name you wish to assign to the new check. ## AI integration +This tool optionally integrates AI to assist in generating the check code and metadata file content. When AI assistance is chosen, the tool uses [Gemini](https://gemini.google.com/) to produce preliminary code and metadata. + ???+ warning - Be careful with the code/information generated by the AI, it could have some errors. - Related to metadata, the AI could generate the metadata file with some wrong links, commands or information, review the information before commit it. - Related to the check code, the AI does not change the services, so probably you need to change some code to make it work, take this tool more than a helper than a full solution. + AI-generated code and metadata might contain errors or require adjustments to align with specific Prowler requirements. Carefully review all AI-generated content before committing. -The tool could use AI to generate the check code and the metadata file. For now the tool only support integration with [Gemini](https://gemini.google.com/), to use it you only need to indicate when the tool ask for it and have set the `GEMINI_API_KEY` environment variable with the Gemini API key. To get your API key refer to the [Gemini documentation](https://ai.google.dev/gemini-api/docs/api-key). +To enable AI assistance, simply confirm when prompted by the tool. Additionally, ensure that the `GEMINI_API_KEY` environment variable is set with a valid Gemini API key. For instructions on obtaining your API key, refer to the [Gemini documentation](https://ai.google.dev/gemini-api/docs/api-key). diff --git a/poetry.lock b/poetry.lock index 5812db30a9d..dbedd5088a8 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. [[package]] name = "about-time" @@ -1659,23 +1659,6 @@ gitdb = ">=4.0.1,<5" doc = ["sphinx (==4.3.2)", "sphinx-autodoc-typehints", "sphinx-rtd-theme", "sphinxcontrib-applehelp (>=1.0.2,<=1.0.4)", "sphinxcontrib-devhelp (==1.0.2)", "sphinxcontrib-htmlhelp (>=2.0.0,<=2.0.1)", "sphinxcontrib-qthelp (==1.0.3)", "sphinxcontrib-serializinghtml (==1.1.5)"] test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "typing-extensions"] -[[package]] -name = "google-ai-generativelanguage" -version = "0.6.10" -description = "Google Ai Generativelanguage API client library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google_ai_generativelanguage-0.6.10-py3-none-any.whl", hash = "sha256:854a2bf833d18be05ad5ef13c755567b66a4f4a870f099b62c61fe11bddabcf4"}, - {file = "google_ai_generativelanguage-0.6.10.tar.gz", hash = "sha256:6fa642c964d8728006fe7e8771026fc0b599ae0ebeaf83caf550941e8e693455"}, -] - -[package.dependencies] -google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} -google-auth = ">=2.14.1,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0dev" -proto-plus = ">=1.22.3,<2.0.0dev" -protobuf = ">=3.20.2,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0dev" - [[package]] name = "google-api-core" version = "2.20.0" @@ -1690,14 +1673,6 @@ files = [ [package.dependencies] google-auth = ">=2.14.1,<3.0.dev0" googleapis-common-protos = ">=1.56.2,<2.0.dev0" -grpcio = [ - {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, - {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, -] -grpcio-status = [ - {version = ">=1.33.2,<2.0.dev0", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, - {version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, -] proto-plus = ">=1.22.3,<2.0.0dev" protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" requests = ">=2.18.0,<3.0.0.dev0" @@ -1763,29 +1738,6 @@ files = [ google-auth = "*" httplib2 = ">=0.19.0" -[[package]] -name = "google-generativeai" -version = "0.8.3" -description = "Google Generative AI High level API client library and tools." -optional = false -python-versions = ">=3.9" -files = [ - {file = "google_generativeai-0.8.3-py3-none-any.whl", hash = "sha256:1108ff89d5b8e59f51e63d1a8bf84701cd84656e17ca28d73aeed745e736d9b7"}, -] - -[package.dependencies] -google-ai-generativelanguage = "0.6.10" -google-api-core = "*" -google-api-python-client = "*" -google-auth = ">=2.15.0" -protobuf = "*" -pydantic = "*" -tqdm = "*" -typing-extensions = "*" - -[package.extras] -dev = ["Pillow", "absl-py", "black", "ipython", "nose2", "pandas", "pytype", "pyyaml"] - [[package]] name = "googleapis-common-protos" version = "1.65.0" @@ -1827,89 +1779,6 @@ files = [ {file = "graphql_core-3.2.4-py3-none-any.whl", hash = "sha256:1604f2042edc5f3114f49cac9d77e25863be51b23a54a61a23245cf32f6476f0"}, ] -[[package]] -name = "grpcio" -version = "1.67.0" -description = "HTTP/2-based RPC framework" -optional = false -python-versions = ">=3.8" -files = [ - {file = "grpcio-1.67.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:bd79929b3bb96b54df1296cd3bf4d2b770bd1df6c2bdf549b49bab286b925cdc"}, - {file = "grpcio-1.67.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:16724ffc956ea42967f5758c2f043faef43cb7e48a51948ab593570570d1e68b"}, - {file = "grpcio-1.67.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:2b7183c80b602b0ad816315d66f2fb7887614ead950416d60913a9a71c12560d"}, - {file = "grpcio-1.67.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:efe32b45dd6d118f5ea2e5deaed417d8a14976325c93812dd831908522b402c9"}, - {file = "grpcio-1.67.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe89295219b9c9e47780a0f1c75ca44211e706d1c598242249fe717af3385ec8"}, - {file = "grpcio-1.67.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa8d025fae1595a207b4e47c2e087cb88d47008494db258ac561c00877d4c8f8"}, - {file = "grpcio-1.67.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f95e15db43e75a534420e04822df91f645664bf4ad21dfaad7d51773c80e6bb4"}, - {file = "grpcio-1.67.0-cp310-cp310-win32.whl", hash = "sha256:a6b9a5c18863fd4b6624a42e2712103fb0f57799a3b29651c0e5b8119a519d65"}, - {file = "grpcio-1.67.0-cp310-cp310-win_amd64.whl", hash = "sha256:b6eb68493a05d38b426604e1dc93bfc0137c4157f7ab4fac5771fd9a104bbaa6"}, - {file = "grpcio-1.67.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:e91d154689639932305b6ea6f45c6e46bb51ecc8ea77c10ef25aa77f75443ad4"}, - {file = "grpcio-1.67.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:cb204a742997277da678611a809a8409657b1398aaeebf73b3d9563b7d154c13"}, - {file = "grpcio-1.67.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:ae6de510f670137e755eb2a74b04d1041e7210af2444103c8c95f193340d17ee"}, - {file = "grpcio-1.67.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74b900566bdf68241118f2918d312d3bf554b2ce0b12b90178091ea7d0a17b3d"}, - {file = "grpcio-1.67.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4e95e43447a02aa603abcc6b5e727d093d161a869c83b073f50b9390ecf0fa8"}, - {file = "grpcio-1.67.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0bb94e66cd8f0baf29bd3184b6aa09aeb1a660f9ec3d85da615c5003154bc2bf"}, - {file = "grpcio-1.67.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:82e5bd4b67b17c8c597273663794a6a46a45e44165b960517fe6d8a2f7f16d23"}, - {file = "grpcio-1.67.0-cp311-cp311-win32.whl", hash = "sha256:7fc1d2b9fd549264ae585026b266ac2db53735510a207381be509c315b4af4e8"}, - {file = "grpcio-1.67.0-cp311-cp311-win_amd64.whl", hash = "sha256:ac11ecb34a86b831239cc38245403a8de25037b448464f95c3315819e7519772"}, - {file = "grpcio-1.67.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:227316b5631260e0bef8a3ce04fa7db4cc81756fea1258b007950b6efc90c05d"}, - {file = "grpcio-1.67.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d90cfdafcf4b45a7a076e3e2a58e7bc3d59c698c4f6470b0bb13a4d869cf2273"}, - {file = "grpcio-1.67.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:77196216d5dd6f99af1c51e235af2dd339159f657280e65ce7e12c1a8feffd1d"}, - {file = "grpcio-1.67.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:15c05a26a0f7047f720da41dc49406b395c1470eef44ff7e2c506a47ac2c0591"}, - {file = "grpcio-1.67.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3840994689cc8cbb73d60485c594424ad8adb56c71a30d8948d6453083624b52"}, - {file = "grpcio-1.67.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:5a1e03c3102b6451028d5dc9f8591131d6ab3c8a0e023d94c28cb930ed4b5f81"}, - {file = "grpcio-1.67.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:682968427a63d898759474e3b3178d42546e878fdce034fd7474ef75143b64e3"}, - {file = "grpcio-1.67.0-cp312-cp312-win32.whl", hash = "sha256:d01793653248f49cf47e5695e0a79805b1d9d4eacef85b310118ba1dfcd1b955"}, - {file = "grpcio-1.67.0-cp312-cp312-win_amd64.whl", hash = "sha256:985b2686f786f3e20326c4367eebdaed3e7aa65848260ff0c6644f817042cb15"}, - {file = "grpcio-1.67.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:8c9a35b8bc50db35ab8e3e02a4f2a35cfba46c8705c3911c34ce343bd777813a"}, - {file = "grpcio-1.67.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:42199e704095b62688998c2d84c89e59a26a7d5d32eed86d43dc90e7a3bd04aa"}, - {file = "grpcio-1.67.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:c4c425f440fb81f8d0237c07b9322fc0fb6ee2b29fbef5f62a322ff8fcce240d"}, - {file = "grpcio-1.67.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:323741b6699cd2b04a71cb38f502db98f90532e8a40cb675393d248126a268af"}, - {file = "grpcio-1.67.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:662c8e105c5e5cee0317d500eb186ed7a93229586e431c1bf0c9236c2407352c"}, - {file = "grpcio-1.67.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:f6bd2ab135c64a4d1e9e44679a616c9bc944547357c830fafea5c3caa3de5153"}, - {file = "grpcio-1.67.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:2f55c1e0e2ae9bdd23b3c63459ee4c06d223b68aeb1961d83c48fb63dc29bc03"}, - {file = "grpcio-1.67.0-cp313-cp313-win32.whl", hash = "sha256:fd6bc27861e460fe28e94226e3673d46e294ca4673d46b224428d197c5935e69"}, - {file = "grpcio-1.67.0-cp313-cp313-win_amd64.whl", hash = "sha256:cf51d28063338608cd8d3cd64677e922134837902b70ce00dad7f116e3998210"}, - {file = "grpcio-1.67.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:7f200aca719c1c5dc72ab68be3479b9dafccdf03df530d137632c534bb6f1ee3"}, - {file = "grpcio-1.67.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0892dd200ece4822d72dd0952f7112c542a487fc48fe77568deaaa399c1e717d"}, - {file = "grpcio-1.67.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:f4d613fbf868b2e2444f490d18af472ccb47660ea3df52f068c9c8801e1f3e85"}, - {file = "grpcio-1.67.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c69bf11894cad9da00047f46584d5758d6ebc9b5950c0dc96fec7e0bce5cde9"}, - {file = "grpcio-1.67.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9bca3ca0c5e74dea44bf57d27e15a3a3996ce7e5780d61b7c72386356d231db"}, - {file = "grpcio-1.67.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:014dfc020e28a0d9be7e93a91f85ff9f4a87158b7df9952fe23cc42d29d31e1e"}, - {file = "grpcio-1.67.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d4ea4509d42c6797539e9ec7496c15473177ce9abc89bc5c71e7abe50fc25737"}, - {file = "grpcio-1.67.0-cp38-cp38-win32.whl", hash = "sha256:9d75641a2fca9ae1ae86454fd25d4c298ea8cc195dbc962852234d54a07060ad"}, - {file = "grpcio-1.67.0-cp38-cp38-win_amd64.whl", hash = "sha256:cff8e54d6a463883cda2fab94d2062aad2f5edd7f06ae3ed030f2a74756db365"}, - {file = "grpcio-1.67.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:62492bd534979e6d7127b8a6b29093161a742dee3875873e01964049d5250a74"}, - {file = "grpcio-1.67.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eef1dce9d1a46119fd09f9a992cf6ab9d9178b696382439446ca5f399d7b96fe"}, - {file = "grpcio-1.67.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:f623c57a5321461c84498a99dddf9d13dac0e40ee056d884d6ec4ebcab647a78"}, - {file = "grpcio-1.67.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54d16383044e681f8beb50f905249e4e7261dd169d4aaf6e52eab67b01cbbbe2"}, - {file = "grpcio-1.67.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2a44e572fb762c668e4812156b81835f7aba8a721b027e2d4bb29fb50ff4d33"}, - {file = "grpcio-1.67.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:391df8b0faac84d42f5b8dfc65f5152c48ed914e13c522fd05f2aca211f8bfad"}, - {file = "grpcio-1.67.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cfd9306511fdfc623a1ba1dc3bc07fbd24e6cfbe3c28b4d1e05177baa2f99617"}, - {file = "grpcio-1.67.0-cp39-cp39-win32.whl", hash = "sha256:30d47dbacfd20cbd0c8be9bfa52fdb833b395d4ec32fe5cff7220afc05d08571"}, - {file = "grpcio-1.67.0-cp39-cp39-win_amd64.whl", hash = "sha256:f55f077685f61f0fbd06ea355142b71e47e4a26d2d678b3ba27248abfe67163a"}, - {file = "grpcio-1.67.0.tar.gz", hash = "sha256:e090b2553e0da1c875449c8e75073dd4415dd71c9bde6a406240fdf4c0ee467c"}, -] - -[package.extras] -protobuf = ["grpcio-tools (>=1.67.0)"] - -[[package]] -name = "grpcio-status" -version = "1.67.0" -description = "Status proto mapping for gRPC" -optional = false -python-versions = ">=3.8" -files = [ - {file = "grpcio_status-1.67.0-py3-none-any.whl", hash = "sha256:0e79e2e01ba41a6ca6ed9d7a825323c511fe1653a646f8014c7e3c8132527acc"}, - {file = "grpcio_status-1.67.0.tar.gz", hash = "sha256:c3e5a86fa007e9e263cd5f988a8a907484da4caab582874ea2a4a6092734046b"}, -] - -[package.dependencies] -googleapis-common-protos = ">=1.5.5" -grpcio = ">=1.67.0" -protobuf = ">=5.26.1,<6.0dev" - [[package]] name = "h11" version = "0.14.0" @@ -3290,8 +3159,8 @@ files = [ [package.dependencies] numpy = [ {version = ">=1.22.4", markers = "python_version < \"3.11\""}, - {version = ">=1.23.2", markers = "python_version == \"3.11\""}, {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, + {version = ">=1.23.2", markers = "python_version == \"3.11\""}, ] python-dateutil = ">=2.8.2" pytz = ">=2020.1" @@ -3633,6 +3502,7 @@ description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs optional = false python-versions = ">=3.8" files = [ + {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"}, {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"}, ] @@ -3643,6 +3513,7 @@ description = "A collection of ASN.1-based protocols modules" optional = false python-versions = ">=3.8" files = [ + {file = "pyasn1_modules-0.4.1-py3-none-any.whl", hash = "sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd"}, {file = "pyasn1_modules-0.4.1.tar.gz", hash = "sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c"}, ] @@ -3791,8 +3662,8 @@ astroid = ">=3.3.4,<=3.4.0-dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = [ {version = ">=0.2", markers = "python_version < \"3.11\""}, - {version = ">=0.3.6", markers = "python_version >= \"3.11\""}, {version = ">=0.3.7", markers = "python_version >= \"3.12\""}, + {version = ">=0.3.6", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, ] isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" mccabe = ">=0.6,<0.8" @@ -4443,24 +4314,24 @@ python-versions = ">=3.6" files = [ {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:d92f81886165cb14d7b067ef37e142256f1c6a90a65cd156b063a43da1708cfd"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fff3573c2db359f091e1589c3d7c5fc2f86f5bdb6f24252c2d8e539d4e45f412"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:aa2267c6a303eb483de8d02db2871afb5c5fc15618d894300b88958f729ad74f"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:840f0c7f194986a63d2c2465ca63af8ccbbc90ab1c6001b1978f05119b5e7334"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:024cfe1fc7c7f4e1aff4a81e718109e13409767e4f871443cbff3dba3578203d"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win32.whl", hash = "sha256:c69212f63169ec1cfc9bb44723bf2917cbbd8f6191a00ef3410f5a7fe300722d"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win_amd64.whl", hash = "sha256:cabddb8d8ead485e255fe80429f833172b4cadf99274db39abc080e068cbcc31"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bef08cd86169d9eafb3ccb0a39edb11d8e25f3dae2b28f5c52fd997521133069"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:b16420e621d26fdfa949a8b4b47ade8810c56002f5389970db4ddda51dbff248"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:b5edda50e5e9e15e54a6a8a0070302b00c518a9d32accc2346ad6c984aacd279"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:25c515e350e5b739842fc3228d662413ef28f295791af5e5110b543cf0b57d9b"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_24_aarch64.whl", hash = "sha256:1707814f0d9791df063f8c19bb51b0d1278b8e9a2353abbb676c2f685dee6afe"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:46d378daaac94f454b3a0e3d8d78cafd78a026b1d71443f4966c696b48a6d899"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:09b055c05697b38ecacb7ac50bdab2240bfca1a0c4872b0fd309bb07dc9aa3a9"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win32.whl", hash = "sha256:53a300ed9cea38cf5a2a9b069058137c2ca1ce658a874b79baceb8f892f915a7"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win_amd64.whl", hash = "sha256:c2a72e9109ea74e511e29032f3b670835f8a59bbdc9ce692c5b4ed91ccf1eedb"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ebc06178e8821efc9692ea7544aa5644217358490145629914d8020042c24aa1"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:edaef1c1200c4b4cb914583150dcaa3bc30e592e907c01117c08b13a07255ec2"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:7048c338b6c86627afb27faecf418768acb6331fc24cfa56c93e8c9780f815fa"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d176b57452ab5b7028ac47e7b3cf644bcfdc8cacfecf7e71759f7f51a59e5c92"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_24_aarch64.whl", hash = "sha256:1dc67314e7e1086c9fdf2680b7b6c2be1c0d8e3a8279f2e993ca2a7545fecf62"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3213ece08ea033eb159ac52ae052a4899b56ecc124bb80020d9bbceeb50258e9"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aab7fd643f71d7946f2ee58cc88c9b7bfc97debd71dcc93e03e2d174628e7e2d"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win32.whl", hash = "sha256:5c365d91c88390c8d0a8545df0b5857172824b1c604e867161e6b3d59a827eaa"}, @@ -4468,7 +4339,7 @@ files = [ {file = "ruamel.yaml.clib-0.2.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a5aa27bad2bb83670b71683aae140a1f52b0857a2deff56ad3f6c13a017a26ed"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c58ecd827313af6864893e7af0a3bb85fd529f862b6adbefe14643947cfe2942"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f481f16baec5290e45aebdc2a5168ebc6d35189ae6fea7a58787613a25f6e875"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_24_aarch64.whl", hash = "sha256:77159f5d5b5c14f7c34073862a6b7d34944075d9f93e681638f6d753606c6ce6"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:3fcc54cb0c8b811ff66082de1680b4b14cf8a81dce0d4fbf665c2265a81e07a1"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7f67a1ee819dc4562d444bbafb135832b0b909f81cc90f7aa00260968c9ca1b3"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4ecbf9c3e19f9562c7fdd462e8d18dd902a47ca046a2e64dba80699f0b6c09b7"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:87ea5ff66d8064301a154b3933ae406b0863402a799b16e4a1d24d9fbbcbe0d3"}, @@ -4476,7 +4347,7 @@ files = [ {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win_amd64.whl", hash = "sha256:3f215c5daf6a9d7bbed4a0a4f760f3113b10e82ff4c5c44bec20a68c8014f675"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1b617618914cb00bf5c34d4357c37aa15183fa229b24767259657746c9077615"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a6a9ffd280b71ad062eae53ac1659ad86a17f59a0fdc7699fd9be40525153337"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_24_aarch64.whl", hash = "sha256:305889baa4043a09e5b76f8e2a51d4ffba44259f6b4c72dec8ca56207d9c6fe1"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:665f58bfd29b167039f714c6998178d27ccd83984084c286110ef26b230f259f"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:700e4ebb569e59e16a976857c8798aee258dceac7c7d6b50cab63e080058df91"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e2b4c44b60eadec492926a7270abb100ef9f72798e18743939bdbf037aab8c28"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e79e5db08739731b0ce4850bed599235d601701d5694c36570a99a0c5ca41a9d"}, @@ -4484,7 +4355,7 @@ files = [ {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win_amd64.whl", hash = "sha256:56f4252222c067b4ce51ae12cbac231bce32aee1d33fbfc9d17e5b8d6966c312"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03d1162b6d1df1caa3a4bd27aa51ce17c9afc2046c31b0ad60a0a96ec22f8001"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba64af9fa9cebe325a62fa398760f5c7206b215201b0ec825005f1b18b9bccf"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:a1a45e0bb052edf6a1d3a93baef85319733a888363938e1fc9924cb00c8df24c"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:9eb5dee2772b0f704ca2e45b1713e4e5198c18f515b52743576d196348f374d3"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:da09ad1c359a728e112d60116f626cc9f29730ff3e0e7db72b9a2dbc2e4beed5"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:184565012b60405d93838167f425713180b949e9d8dd0bbc7b49f074407c5a8b"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a75879bacf2c987c003368cf14bed0ffe99e8e85acfa6c0bfffc21a090f16880"}, @@ -4784,26 +4655,6 @@ files = [ {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, ] -[[package]] -name = "tqdm" -version = "4.66.5" -description = "Fast, Extensible Progress Meter" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tqdm-4.66.5-py3-none-any.whl", hash = "sha256:90279a3770753eafc9194a0364852159802111925aa30eb3f9d85b0e805ac7cd"}, - {file = "tqdm-4.66.5.tar.gz", hash = "sha256:e1020aef2e5096702d8a025ac7d16b1577279c9d63f8375b63083e9a5f0fcbad"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[package.extras] -dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] -notebook = ["ipywidgets (>=6)"] -slack = ["slack-sdk"] -telegram = ["requests"] - [[package]] name = "typer" version = "0.12.5" diff --git a/pyproject.toml b/pyproject.toml index 9f67494117d..7327862a0b1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -56,7 +56,6 @@ dash-bootstrap-components = "1.6.0" detect-secrets = "1.5.0" google-api-python-client = "2.147.0" google-auth-httplib2 = ">=0.1,<0.3" -google-generativeai = "^0.8.3" jsonschema = "4.23.0" kubernetes = "31.0.0" microsoft-kiota-abstractions = "1.3.3" From 1ba48719e7664fc77e5997bfcccf529ac48c9ce2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rub=C3=A9n=20De=20la=20Torre=20Vico?= Date: Fri, 8 Nov 2024 11:42:47 +0100 Subject: [PATCH 18/21] docs(prowler check kreator): add a note about necessary dependencies --- docs/tutorials/prowler-check-kreator.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/tutorials/prowler-check-kreator.md b/docs/tutorials/prowler-check-kreator.md index b9c0cd48efa..c1a53c12350 100644 --- a/docs/tutorials/prowler-check-kreator.md +++ b/docs/tutorials/prowler-check-kreator.md @@ -27,6 +27,9 @@ Parameters: This tool optionally integrates AI to assist in generating the check code and metadata file content. When AI assistance is chosen, the tool uses [Gemini](https://gemini.google.com/) to produce preliminary code and metadata. +???+ note + For this feature to work, you must have the library `google-generativeai` installed in your Python environment. + ???+ warning AI-generated code and metadata might contain errors or require adjustments to align with specific Prowler requirements. Carefully review all AI-generated content before committing. From cc577d1bb1eb3565f5c5d2cfcc81bf4d96089f74 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rub=C3=A9n=20De=20la=20Torre=20Vico?= Date: Fri, 8 Nov 2024 12:18:18 +0100 Subject: [PATCH 19/21] chore(prowler check kreator): change docstrings format follow google recommendations --- .../lib/metadata_types.py | 21 ++++++++---- util/prowler_check_kreator/lib/templates.py | 32 +++++++++++++------ .../prowler_check_kreator.py | 4 --- 3 files changed, 37 insertions(+), 20 deletions(-) diff --git a/util/prowler_check_kreator/lib/metadata_types.py b/util/prowler_check_kreator/lib/metadata_types.py index 11052ea4b8f..9491472e9e7 100644 --- a/util/prowler_check_kreator/lib/metadata_types.py +++ b/util/prowler_check_kreator/lib/metadata_types.py @@ -1,8 +1,11 @@ def get_metadata_valid_check_type(provider: str = "aws") -> list: """Get the valid check types for the provider - Keyword arguments: - provider -- The provider of the service + Args: + provider: The Prowler provider. + + Returns: + A list of valid check types for the given provider. """ check_types = [] @@ -115,8 +118,11 @@ def get_metadata_valid_check_type(provider: str = "aws") -> list: def get_metadata_valid_resource_type(provider: str = "aws") -> set: """Get the valid resource types for the provider - Keyword arguments: - provider -- The provider of the service + Args: + provider: The Prowler provider. + + Returns: + A set of valid resource types for the given provider. """ valid_resource_types = set() @@ -226,8 +232,11 @@ def get_metadata_valid_resource_type(provider: str = "aws") -> set: def get_metadata_placeholder_resource_type(provider: str = "aws") -> str: """Get the placeholder for the resource type for the provider - Keyword arguments: - provider -- The provider of the service + Args: + provider: The Prowler provider. + + Returns: + A placeholder for the resource type for the given provider. """ placeholder = "" diff --git a/util/prowler_check_kreator/lib/templates.py b/util/prowler_check_kreator/lib/templates.py index fd5d61fbd78..bc2bf3c0116 100644 --- a/util/prowler_check_kreator/lib/templates.py +++ b/util/prowler_check_kreator/lib/templates.py @@ -1,10 +1,16 @@ def load_check_template(provider: str, service: str, check_name: str) -> str: - """Load the template for the check file + """Load the template for the check file. - Keyword arguments: - provider -- The provider of the service - service -- The service to check - check_name -- The name of the check + Args: + provider (str): The provider of the service. + service (str): The service to check. + check_name (str): The name of the check. + + Returns: + A check template used when the user does not want to generate the check with AI. + + Raises: + ValueError: If the provider is not implemented yet. """ if provider == "aws": return f""" @@ -37,12 +43,18 @@ def execute(self) -> list[Check_Report_AWS]: def load_test_template(provider: str, service: str, check_name: str) -> str: - """Load the template for the test file + """Load the template for the test file. + + Args: + provider (str): The provider of the service (e.g., "aws"). + service (str): The service to check (e.g., "s3"). + check_name (str): The name of the check (e.g., "check_bucket_encryption"). + + Returns: + A test template used when the user does not want to generate the check with AI. - Keyword arguments: - provider -- The provider of the service - service -- The service to check - check_name -- The name of the check + Raises: + ValueError: If the template for the given provider is not implemented. """ if provider == "aws": return f""" diff --git a/util/prowler_check_kreator/prowler_check_kreator.py b/util/prowler_check_kreator/prowler_check_kreator.py index 52a1ad54535..d7928936680 100644 --- a/util/prowler_check_kreator/prowler_check_kreator.py +++ b/util/prowler_check_kreator/prowler_check_kreator.py @@ -9,10 +9,6 @@ ) -# TODO: Support azure, gcp and kubernetes providers (only need to add check template, test template and metadata types) -# TODO: Add support for other LLMs like OpenAI's GPT or Ollama locally -# TODO: Add support to make configurable checks -# TODO: Improve the check generation with more context class ProwlerCheckKreator: def __init__(self, provider: str, check_name: str): # Validate provider From 84c7b1a30ce54ba1d658ca3abc86f454a249ab63 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rub=C3=A9n=20De=20la=20Torre=20Vico?= Date: Fri, 8 Nov 2024 12:22:14 +0100 Subject: [PATCH 20/21] feat(prowler check kreator): add user input to include Gemini API key --- util/prowler_check_kreator/lib/llms/gemini.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/util/prowler_check_kreator/lib/llms/gemini.py b/util/prowler_check_kreator/lib/llms/gemini.py index d46bb1ab257..2880d5efdc4 100644 --- a/util/prowler_check_kreator/lib/llms/gemini.py +++ b/util/prowler_check_kreator/lib/llms/gemini.py @@ -16,7 +16,11 @@ def __init__(self, model: str = "gemini-1.5-flash"): if os.getenv("GEMINI_API_KEY"): self.api_key = os.getenv("GEMINI_API_KEY") else: - raise Exception("GEMINI_API_KEY environment variable is not set") + self.api_key = input( + "GEMINI_API_KEY is not set, please enter the API key: " + ) + if not self.api_key: + raise Exception("GEMINI_API_KEY is required") if model not in ["gemini-1.5-flash", "gemini-1.5-pro", "gemini-1.0-pro"]: raise Exception("Invalid Gemini AI model") From 5e7765531c24dce89b42cdf0e51dac533a46f145 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rub=C3=A9n=20De=20la=20Torre=20Vico?= Date: Fri, 8 Nov 2024 16:02:36 +0100 Subject: [PATCH 21/21] chore(prowler check kreator): change logic of execution --- util/prowler_check_kreator/lib/llms/gemini.py | 91 +++--- util/prowler_check_kreator/lib/templates.py | 21 +- .../prowler_check_kreator.py | 290 ++++++------------ 3 files changed, 155 insertions(+), 247 deletions(-) diff --git a/util/prowler_check_kreator/lib/llms/gemini.py b/util/prowler_check_kreator/lib/llms/gemini.py index 2880d5efdc4..42e47c8c383 100644 --- a/util/prowler_check_kreator/lib/llms/gemini.py +++ b/util/prowler_check_kreator/lib/llms/gemini.py @@ -1,6 +1,5 @@ import json import os -import re import google.generativeai as genai @@ -74,45 +73,49 @@ def _generate_content(self, prompt_parts: list) -> str: except Exception as e: raise Exception(f"Error generating content with Gemini AI: {e}") - def _prepare_check_prompt(self, check_name: str, check_reference: str) -> list: + def _prepare_check_prompt(self, check_name: str, context: str) -> list: """Prepare the prompt for generating the check.""" - match = re.search( - r"class\s+([A-Za-z_][A-Za-z0-9_]*)\s*\((.*?)\)\s*:", check_reference - ) - if not match: - raise ValueError("No valid class definition found in the test reference.") - class_name = match.group(1) - prompt_parts = [ - f"Your task is to create a new security check called '{check_name}' for Prowler (a Cloud Security tool).", - "The control is a Python class that inherits from the Check class and has only one method called execute.", - "The execute method must return a list of Check_Report_AWS objects.", - "I need the answer only with Python formatted text.", - "Use the following check as inspiration to create the new check: ", - f"{class_name}:", - check_reference, + "You are a AWS cybersecurity engineer working in Prowler, an open-source Cloud Security tool to audit Cloud environments in an automated way.", + f"Your task is to create a new security check called '{check_name}' based on the following context:\n{context}", + "TA check is a Python class that inherits from the Check class and has only one method called execute.\n", + "The execute method must return a list of Check_Report_AWS objects.\n", + "Limit to Python code only.\n", + "Use the following check as inspiration about the format to create the new check:\n", + "ec2_instance_port_ssh_exposed_to_internet:", + "from prowler.lib.check.models import Check, Check_Report_AWS\nfrom prowler.providers.aws.services.ec2.ec2_client import ec2_client\nfrom prowler.providers.aws.services.ec2.lib.instance import get_instance_public_status\nfrom prowler.providers.aws.services.ec2.lib.security_groups import check_security_group\nfrom prowler.providers.aws.services.vpc.vpc_client import vpc_client\n\n\nclass ec2_instance_port_ssh_exposed_to_internet(Check):\n\t# EC2 Instances with SSH port 22 open to the Internet will be flagged as FAIL with a severity of medium if the instance has no public IP, high if the instance has a public IP but is in a private subnet, and critical if the instance has a public IP and is in a public subnet.\n\tdef execute(self):\n\t\tfindings = []\n\t\tcheck_ports = [22]\n\t\tfor instance in ec2_client.instances:\n\t\t\treport = Check_Report_AWS(self.metadata())\n\t\t\treport.region = instance.region\n\t\t\treport.status = 'PASS'\n\t\t\treport.status_extended = f'Instance {instance.id} does not have SSH port 22 open to the Internet.'\n\t\t\treport.resource_id = instance.id\n\t\t\treport.resource_arn = instance.arn\n\t\t\treport.resource_tags = instance.tags\n\t\t\tis_open_port = False\n\t\t\tif instance.security_groups:\n\t\t\t\tfor sg in ec2_client.security_groups.values():\n\t\t\t\t\tif sg.id in instance.security_groups:\n\t\t\t\t\t\tfor ingress_rule in sg.ingress_rules:\n\t\t\t\t\t\t\tif check_security_group(\n\t\t\t\t\t\t\t\tingress_rule, 'tcp', check_ports, any_address=True\n\t\t\t\t\t\t\t):\n\t\t\t\t\t\t\t\t# The port is open, now check if the instance is in a public subnet with a public IP\n\t\t\t\t\t\t\t\treport.status = 'FAIL'\n\t\t\t\t\t\t\t\t(\n\t\t\t\t\t\t\t\t\treport.status_extended,\n\t\t\t\t\t\t\t\t\treport.check_metadata.Severity,\n\t\t\t\t\t\t\t\t) = get_instance_public_status(\n\t\t\t\t\t\t\t\t\tvpc_client.vpc_subnets, instance, 'SSH'\n\t\t\t\t\t\t\t\t)\n\t\t\t\t\t\t\t\tis_open_port = True\n\t\t\t\t\t\t\t\tbreak\n\t\t\t\t\t\tif is_open_port:\n\t\t\t\t\t\t\tbreak\n\t\t\tfindings.append(report)\n\t\treturn findings\n", + "s3_bucket_default_encryption:", + "from prowler.lib.check.models import Check, Check_Report_AWS\nfrom prowler.providers.aws.services.s3.s3_client import s3_client\n\n\nclass s3_bucket_default_encryption(Check):\n\tdef execute(self):\n\t\tfindings = []\n\t\tfor arn, bucket in s3_client.buckets.items():\n\t\t\treport = Check_Report_AWS(self.metadata())\n\t\t\treport.region = bucket.region\n\t\t\treport.resource_id = bucket.name\n\t\t\treport.resource_arn = arn\n\t\t\treport.resource_tags = bucket.tags\n\t\t\tif bucket.encryption:\n\t\t\t\treport.status = 'PASS'\n\t\t\t\treport.status_extended = f'S3 Bucket {bucket.name} has Server Side Encryption with {bucket.encryption}.'\n\t\t\telse:\n\t\t\t\treport.status = 'FAIL'\n\t\t\t\treport.status_extended = f'S3 Bucket {bucket.name} does not have Server Side Encryption enabled.'\n\t\t\tfindings.append(report)\n\t\treturn findings\n", + "bedrock_guardrail_prompt_attack_filter_enabled:", + "from prowler.lib.check.models import Check, Check_Report_AWS\nfrom prowler.providers.aws.services.bedrock.bedrock_client import bedrock_client\n\n\nclass bedrock_guardrail_prompt_attack_filter_enabled(Check):\n\tdef execute(self):\n\t\tfindings = []\n\t\tfor guardrail in bedrock_client.guardrails.values():\n\t\t\treport = Check_Report_AWS(self.metadata())\n\t\t\treport.region = guardrail.region\n\t\t\treport.resource_id = guardrail.id\n\t\t\treport.resource_arn = guardrail.arn\n\t\t\treport.resource_tags = guardrail.tags\n\t\t\treport.status = 'PASS'\n\t\t\treport.status_extended = f'Bedrock Guardrail {guardrail.name} is configured to detect and block prompt attacks with a HIGH strength.'\n\t\t\tif not guardrail.prompt_attack_filter_strength:\n\t\t\t\treport.status = 'FAIL'\n\t\t\t\treport.status_extended = f'Bedrock Guardrail {guardrail.name} is not configured to block prompt attacks.'\n\t\t\telif guardrail.prompt_attack_filter_strength != 'HIGH':\n\t\t\t\treport.status = 'FAIL'\n\t\t\t\treport.status_extended = f'Bedrock Guardrail {guardrail.name} is configured to block prompt attacks but with a filter strength of {guardrail.prompt_attack_filter_strength}, not HIGH.'\n\t\t\tfindings.append(report)\n\n\t\treturn findings", + "cloudwatch_alarm_actions_enabled:", + "from prowler.lib.check.models import Check, Check_Report_AWS\nfrom prowler.providers.aws.services.cloudwatch.cloudwatch_client import (\n\tcloudwatch_client,\n)\n\n\nclass cloudwatch_alarm_actions_enabled(Check):\n\tdef execute(self):\n\t\tfindings = []\n\t\tfor metric_alarm in cloudwatch_client.metric_alarms:\n\t\t\treport = Check_Report_AWS(self.metadata())\n\t\t\treport.region = metric_alarm.region\n\t\t\treport.resource_id = metric_alarm.name\n\t\t\treport.resource_arn = metric_alarm.arn\n\t\t\treport.resource_tags = metric_alarm.tags\n\t\t\treport.status = 'PASS'\n\t\t\treport.status_extended = (\n\t\t\t\tf'CloudWatch metric alarm {metric_alarm.name} has actions enabled.'\n\t\t\t)\n\t\t\tif not metric_alarm.actions_enabled:\n\t\t\t\treport.status = 'FAIL'\n\t\t\t\treport.status_extended = f'CloudWatch metric alarm {metric_alarm.name} does not have actions enabled.'\n\t\t\tfindings.append(report)\n\t\treturn findings", + "awslambda_function_not_publicly_accessible:", + "from prowler.lib.check.models import Check, Check_Report_AWS\nfrom prowler.providers.aws.services.awslambda.awslambda_client import awslambda_client\nfrom prowler.providers.aws.services.iam.lib.policy import is_policy_public\n\n\nclass awslambda_function_not_publicly_accessible(Check):\n\tdef execute(self):\n\t\tfindings = []\n\t\tfor function in awslambda_client.functions.values():\n\t\t\treport = Check_Report_AWS(self.metadata())\n\t\t\treport.region = function.region\n\t\t\treport.resource_id = function.name\n\t\t\treport.resource_arn = function.arn\n\t\t\treport.resource_tags = function.tags\n\n\t\t\treport.status = 'PASS'\n\t\t\treport.status_extended = f'Lambda function {function.name} has a policy resource-based policy not public.'\n\t\t\tif is_policy_public(\n\t\t\t\tfunction.policy,\n\t\t\t\tawslambda_client.audited_account,\n\t\t\t\tis_cross_account_allowed=True,\n\t\t\t):\n\t\t\t\treport.status = 'FAIL'\n\t\t\t\treport.status_extended = f'Lambda function {function.name} has a policy resource-based policy with public access.'\n\n\t\t\tfindings.append(report)\n\n\t\treturn findings", f"{check_name}:", ] return prompt_parts - def _prepare_test_prompt(self, check_name: str, test_reference: str) -> list: + def _prepare_test_prompt(self, check_name: str) -> list: """Prepare the prompt for generating the test.""" - match = re.search(r"class\s+(Test_[A-Za-z_][A-Za-z0-9_]*)\s*:", test_reference) - if not match: - raise ValueError("No valid class definition found in the test reference.") - class_name = match.group(1) - prompt_parts = [ - f"Your task is to create a new test for the security check '{check_name}' in Prowler (a Cloud Security tool).", + "You are a AWS cybersecurity engineer working in Prowler, an open-source Cloud Security tool to audit Cloud environments in an automated way.", + f"Your task is to create a new unit test for the security check '{check_name}'.", "The test must have one or more methods that start with the word 'test'.", "The test methods must use the assert statement to check the results of the check.", "I need the answer only with Python formatted text.", "Use the following test as inspiration to create the new test: ", - f"{class_name}:", - test_reference, - f"Test_{check_name}:", + "ec2_instance_port_ssh_exposed_to_internet:", + "from unittest import mock\n\nfrom boto3 import client, resource\nfrom moto import mock_aws\n\nfrom tests.providers.aws.utils import (\n\tAWS_REGION_EU_WEST_1,\n\tAWS_REGION_US_EAST_1,\n\tset_mocked_aws_provider,\n)\n\n\nclass Test_ec2_instance_port_ssh_exposed_to_internet:\n\t@mock_aws\n\tdef test_no_ec2_instances(self):\n\t\t# Create EC2 Mocked Resources\n\t\tec2_client = client('ec2', region_name=AWS_REGION_US_EAST_1)\n\t\tec2_client.create_vpc(CidrBlock='10.0.0.0/16')\n\n\t\tfrom prowler.providers.aws.services.ec2.ec2_service import EC2\n\t\tfrom prowler.providers.aws.services.vpc.vpc_service import VPC\n\n\t\taws_provider = set_mocked_aws_provider(\n\t\t\t[AWS_REGION_EU_WEST_1, AWS_REGION_US_EAST_1]\n\t\t)\n\n\t\twith mock.patch(\n\t\t\t'prowler.providers.common.provider.Provider.get_global_provider',\n\t\t\treturn_value=aws_provider,\n\t\t), mock.patch(\n\t\t\t'prowler.providers.aws.services.ec2.ec2_instance_port_ssh_exposed_to_internet.ec2_instance_port_ssh_exposed_to_internet.ec2_client',\n\t\t\tnew=EC2(aws_provider),\n\t\t), mock.patch(\n\t\t\t'prowler.providers.aws.services.ec2.ec2_instance_port_ssh_exposed_to_internet.ec2_instance_port_ssh_exposed_to_internet.vpc_client',\n\t\t\tnew=VPC(aws_provider),\n\t\t):\n\t\t\t# Test Check\n\t\t\tfrom prowler.providers.aws.services.ec2.ec2_instance_port_ssh_exposed_to_internet.ec2_instance_port_ssh_exposed_to_internet import (\n\t\t\t\tec2_instance_port_ssh_exposed_to_internet,\n\t\t\t)\n\n\t\t\tcheck = ec2_instance_port_ssh_exposed_to_internet()\n\t\t\tresult = check.execute()\n\n\t\t\tassert len(result) == 0\n\n\t@mock_aws\n\tdef test_ec2_instance_no_port_exposed(self):\n\t\t# Create EC2 Mocked Resources\n\t\tec2_client = client('ec2', region_name=AWS_REGION_US_EAST_1)\n\t\tec2_resource = resource('ec2', region_name=AWS_REGION_US_EAST_1)\n\t\tvpc_id = ec2_client.create_vpc(CidrBlock='10.0.0.0/16')['Vpc']['VpcId']\n\t\tdefault_sg = ec2_client.describe_security_groups(GroupNames=['default'])[\n\t\t\t'SecurityGroups'\n\t\t][0]\n\t\tdefault_sg_id = default_sg['GroupId']\n\t\tec2_client.authorize_security_group_ingress(\n\t\t\tGroupId=default_sg_id,\n\t\t\tIpPermissions=[\n\t\t\t\t{\n\t\t\t\t\t'IpProtocol': 'tcp',\n\t\t\t\t\t'FromPort': 22,\n\t\t\t\t\t'ToPort': 22,\n\t\t\t\t\t'IpRanges': [{'CidrIp': '123.123.123.123/32'}],\n\t\t\t\t}\n\t\t\t],\n\t\t)\n\t\tsubnet_id = ec2_client.create_subnet(VpcId=vpc_id, CidrBlock='10.0.0.0/16')[\n\t\t\t'Subnet'\n\t\t]['SubnetId']\n\t\tinstance_id = ec2_resource.create_instances(\n\t\t\tImageId='ami-12345678',\n\t\t\tMinCount=1,\n\t\t\tMaxCount=1,\n\t\t\tInstanceType='t2.micro',\n\t\t\tSecurityGroupIds=[default_sg_id],\n\t\t\tSubnetId=subnet_id,\n\t\t\tTagSpecifications=[\n\t\t\t\t{'ResourceType': 'instance', 'Tags': [{'Key': 'Name', 'Value': 'test'}]}\n\t\t\t],\n\t\t)[0].id\n\n\t\tfrom prowler.providers.aws.services.ec2.ec2_service import EC2\n\t\tfrom prowler.providers.aws.services.vpc.vpc_service import VPC\n\n\t\taws_provider = set_mocked_aws_provider(\n\t\t\t[AWS_REGION_EU_WEST_1, AWS_REGION_US_EAST_1]\n\t\t)\n\n\t\twith mock.patch(\n\t\t\t'prowler.providers.common.provider.Provider.get_global_provider',\n\t\t\treturn_value=aws_provider,\n\t\t), mock.patch(\n\t\t\t'prowler.providers.aws.services.ec2.ec2_instance_port_ssh_exposed_to_internet.ec2_instance_port_ssh_exposed_to_internet.ec2_client',\n\t\t\tnew=EC2(aws_provider),\n\t\t), mock.patch(\n\t\t\t'prowler.providers.aws.services.ec2.ec2_instance_port_ssh_exposed_to_internet.ec2_instance_port_ssh_exposed_to_internet.vpc_client',\n\t\t\tnew=VPC(aws_provider),\n\t\t):\n\t\t\t# Test Check\n\t\t\tfrom prowler.providers.aws.services.ec2.ec2_instance_port_ssh_exposed_to_internet.ec2_instance_port_ssh_exposed_to_internet import (\n\t\t\t\tec2_instance_port_ssh_exposed_to_internet,\n\t\t\t)\n\n\t\t\tcheck = ec2_instance_port_ssh_exposed_to_internet()\n\t\t\tresult = check.execute()\n\n\t\t\tassert len(result) == 1\n\t\t\tassert result[0].status == 'PASS'\n\t\t\tassert (\n\t\t\t\tresult[0].status_extended\n\t\t\t\t== f'Instance {instance_id} does not have SSH port 22 open to the Internet.'\n\t\t\t)\n\t\t\tassert result[0].resource_id == instance_id\n\t\t\tassert (\n\t\t\t\tresult[0].resource_arn\n\t\t\t\t== f'arn:{aws_provider.identity.partition}:ec2:{AWS_REGION_US_EAST_1}:{aws_provider.identity.account}:instance/{instance_id}'\n\t\t\t)\n\t\t\tassert result[0].resource_tags == [{'Key': 'Name', 'Value': 'test'}]\n\t\t\tassert result[0].region == AWS_REGION_US_EAST_1\n\t\t\tassert result[0].check_metadata.Severity == 'critical'\n\n\t@mock_aws\n\tdef test_ec2_instance_exposed_port_in_private_subnet(self):\n\t\t# Create EC2 Mocked Resources\n\t\tec2_client = client('ec2', region_name=AWS_REGION_US_EAST_1)\n\t\tec2_resource = resource('ec2', region_name=AWS_REGION_US_EAST_1)\n\t\tvpc_id = ec2_client.create_vpc(CidrBlock='10.0.0.0/16')['Vpc']['VpcId']\n\t\tdefault_sg = ec2_client.describe_security_groups(GroupNames=['default'])[\n\t\t\t'SecurityGroups'\n\t\t][0]\n\t\tdefault_sg_id = default_sg['GroupId']\n\t\tec2_client.authorize_security_group_ingress(\n\t\t\tGroupId=default_sg_id,\n\t\t\tIpPermissions=[\n\t\t\t\t{\n\t\t\t\t\t'IpProtocol': 'tcp',\n\t\t\t\t\t'FromPort': 22,\n\t\t\t\t\t'ToPort': 22,\n\t\t\t\t\t'IpRanges': [{'CidrIp': '0.0.0.0/0'}],\n\t\t\t\t}\n\t\t\t],\n\t\t)\n\t\tsubnet_id = ec2_client.create_subnet(VpcId=vpc_id, CidrBlock='10.0.0.0/16')[\n\t\t\t'Subnet'\n\t\t]['SubnetId']\n\t\tinstance_id = ec2_resource.create_instances(\n\t\t\tImageId='ami-12345678',\n\t\t\tMinCount=1,\n\t\t\tMaxCount=1,\n\t\t\tInstanceType='t2.micro',\n\t\t\tSecurityGroupIds=[default_sg_id],\n\t\t\tSubnetId=subnet_id,\n\t\t\tTagSpecifications=[\n\t\t\t\t{'ResourceType': 'instance', 'Tags': [{'Key': 'Name', 'Value': 'test'}]}\n\t\t\t],\n\t\t)[0].id\n\n\t\tfrom prowler.providers.aws.services.ec2.ec2_service import EC2\n\t\tfrom prowler.providers.aws.services.vpc.vpc_service import VPC\n\n\t\taws_provider = set_mocked_aws_provider(\n\t\t\t[AWS_REGION_EU_WEST_1, AWS_REGION_US_EAST_1]\n\t\t)\n\n\t\twith mock.patch(\n\t\t\t'prowler.providers.common.provider.Provider.get_global_provider',\n\t\t\treturn_value=aws_provider,\n\t\t), mock.patch(\n\t\t\t'prowler.providers.aws.services.ec2.ec2_instance_port_ssh_exposed_to_internet.ec2_instance_port_ssh_exposed_to_internet.ec2_client',\n\t\t\tnew=EC2(aws_provider),\n\t\t), mock.patch(\n\t\t\t'prowler.providers.aws.services.ec2.ec2_instance_port_ssh_exposed_to_internet.ec2_instance_port_ssh_exposed_to_internet.vpc_client',\n\t\t\tnew=VPC(aws_provider),\n\t\t):\n\t\t\t# Test Check\n\t\t\tfrom prowler.providers.aws.services.ec2.ec2_instance_port_ssh_exposed_to_internet.ec2_instance_port_ssh_exposed_to_internet import (\n\t\t\t\tec2_instance_port_ssh_exposed_to_internet,\n\t\t\t)\n\n\t\t\tcheck = ec2_instance_port_ssh_exposed_to_internet()\n\t\t\tresult = check.execute()\n\n\t\t\tassert len(result) == 1\n\t\t\tassert result[0].status == 'FAIL'\n\t\t\tassert (\n\t\t\t\tresult[0].status_extended\n\t\t\t\t== f'Instance {instance_id} has SSH exposed to 0.0.0.0/0 but with no public IP address.'\n\t\t\t)\n\t\t\tassert result[0].resource_id == instance_id\n\t\t\tassert (\n\t\t\t\tresult[0].resource_arn\n\t\t\t\t== f'arn:{aws_provider.identity.partition}:ec2:{AWS_REGION_US_EAST_1}:{aws_provider.identity.account}:instance/{instance_id}'\n\t\t\t)\n\t\t\tassert result[0].resource_tags == [{'Key': 'Name', 'Value': 'test'}]\n\t\t\tassert result[0].region == AWS_REGION_US_EAST_1\n\t\t\tassert result[0].check_metadata.Severity == 'medium'", + "s3_bucket_default_encryption:", + "from unittest import mock\n\nfrom boto3 import client\nfrom moto import mock_aws\n\nfrom tests.providers.aws.utils import AWS_REGION_US_EAST_1, set_mocked_aws_provider\n\n\nclass Test_s3_bucket_default_encryption:\n\t@mock_aws\n\tdef test_bucket_no_encryption(self):\n\t\ts3_client_us_east_1 = client('s3', region_name=AWS_REGION_US_EAST_1)\n\t\tbucket_name_us = 'bucket_test_us'\n\t\ts3_client_us_east_1.create_bucket(Bucket=bucket_name_us)\n\n\t\tfrom prowler.providers.aws.services.s3.s3_service import S3\n\n\t\taws_provider = set_mocked_aws_provider([AWS_REGION_US_EAST_1])\n\n\t\twith mock.patch(\n\t\t\t'prowler.providers.common.provider.Provider.get_global_provider',\n\t\t\treturn_value=aws_provider,\n\t\t):\n\t\t\twith mock.patch(\n\t\t\t\t'prowler.providers.aws.services.s3.s3_bucket_default_encryption.s3_bucket_default_encryption.s3_client',\n\t\t\t\tnew=S3(aws_provider),\n\t\t\t):\n\t\t\t\t# Test Check\n\t\t\t\tfrom prowler.providers.aws.services.s3.s3_bucket_default_encryption.s3_bucket_default_encryption import (\n\t\t\t\t\ts3_bucket_default_encryption,\n\t\t\t\t)\n\n\t\t\t\tcheck = s3_bucket_default_encryption()\n\t\t\t\tresult = check.execute()\n\n\t\t\t\tassert len(result) == 1\n\t\t\t\tassert result[0].status == 'FAIL'\n\t\t\t\tassert (\n\t\t\t\t\tresult[0].status_extended\n\t\t\t\t\t== f'S3 Bucket {bucket_name_us} does not have Server Side Encryption enabled.'\n\t\t\t\t)\n\t\t\t\tassert result[0].resource_id == bucket_name_us\n\t\t\t\tassert (\n\t\t\t\t\tresult[0].resource_arn\n\t\t\t\t\t== f'arn:{aws_provider.identity.partition}:s3:::{bucket_name_us}'\n\t\t\t\t)\n\t\t\t\tassert result[0].region == AWS_REGION_US_EAST_1\n\n\t@mock_aws\n\tdef test_bucket_kms_encryption(self):\n\t\ts3_client_us_east_1 = client('s3', region_name=AWS_REGION_US_EAST_1)\n\t\tbucket_name_us = 'bucket_test_us'\n\t\ts3_client_us_east_1.create_bucket(\n\t\t\tBucket=bucket_name_us, ObjectOwnership='BucketOwnerEnforced'\n\t\t)\n\t\tsse_config = {\n\t\t\t'Rules': [\n\t\t\t\t{\n\t\t\t\t\t'ApplyServerSideEncryptionByDefault': {\n\t\t\t\t\t\t'SSEAlgorithm': 'aws:kms',\n\t\t\t\t\t\t'KMSMasterKeyID': '12345678',\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t]\n\t\t}\n\n\t\ts3_client_us_east_1.put_bucket_encryption(\n\t\t\tBucket=bucket_name_us, ServerSideEncryptionConfiguration=sse_config\n\t\t)\n\n\t\tfrom prowler.providers.aws.services.s3.s3_service import S3\n\n\t\taws_provider = set_mocked_aws_provider([AWS_REGION_US_EAST_1])\n\n\t\twith mock.patch(\n\t\t\t'prowler.providers.common.provider.Provider.get_global_provider',\n\t\t\treturn_value=aws_provider,\n\t\t):\n\t\t\twith mock.patch(\n\t\t\t\t'prowler.providers.aws.services.s3.s3_bucket_default_encryption.s3_bucket_default_encryption.s3_client',\n\t\t\t\tnew=S3(aws_provider),\n\t\t\t):\n\t\t\t\t# Test Check\n\t\t\t\tfrom prowler.providers.aws.services.s3.s3_bucket_default_encryption.s3_bucket_default_encryption import (\n\t\t\t\t\ts3_bucket_default_encryption,\n\t\t\t\t)\n\n\t\t\t\tcheck = s3_bucket_default_encryption()\n\t\t\t\tresult = check.execute()\n\n\t\t\t\tassert len(result) == 1\n\t\t\t\tassert result[0].status == 'PASS'\n\t\t\t\tassert (\n\t\t\t\t\tresult[0].status_extended\n\t\t\t\t\t== f'S3 Bucket {bucket_name_us} has Server Side Encryption with aws:kms.'\n\t\t\t\t)\n\t\t\t\tassert result[0].resource_id == bucket_name_us\n\t\t\t\tassert (\n\t\t\t\t\tresult[0].resource_arn\n\t\t\t\t\t== f'arn:{aws_provider.identity.partition}:s3:::{bucket_name_us}'\n\t\t\t\t)\n\t\t\t\tassert result[0].region == AWS_REGION_US_EAST_1", + "cloudwatch_alarm_actions_enabled:", + "from unittest import mock\n\nfrom boto3 import client\nfrom moto import mock_aws\n\nfrom tests.providers.aws.utils import AWS_REGION_US_EAST_1, set_mocked_aws_provider\n\n\nclass Test_cloudwatch_alarm_actions_enabled:\n\t@mock_aws\n\tdef test_no_cloudwatch_alarms(self):\n\t\tcloudwatch_client = client('cloudwatch', region_name=AWS_REGION_US_EAST_1)\n\t\tcloudwatch_client.metric_alarms = []\n\n\t\tfrom prowler.providers.aws.services.cloudwatch.cloudwatch_service import (\n\t\t\tCloudWatch,\n\t\t)\n\n\t\taws_provider = set_mocked_aws_provider([AWS_REGION_US_EAST_1])\n\n\t\twith mock.patch(\n\t\t\t'prowler.providers.common.provider.Provider.get_global_provider',\n\t\t\treturn_value=aws_provider,\n\t\t), mock.patch(\n\t\t\t'prowler.providers.aws.services.cloudwatch.cloudwatch_alarm_actions_enabled.cloudwatch_alarm_actions_enabled.cloudwatch_client',\n\t\t\tnew=CloudWatch(aws_provider),\n\t\t):\n\n\t\t\tfrom prowler.providers.aws.services.cloudwatch.cloudwatch_alarm_actions_enabled.cloudwatch_alarm_actions_enabled import (\n\t\t\t\tcloudwatch_alarm_actions_enabled,\n\t\t\t)\n\n\t\t\tcheck = cloudwatch_alarm_actions_enabled()\n\t\t\tresult = check.execute()\n\n\t\t\tassert len(result) == 0\n\n\t@mock_aws\n\tdef test_cloudwatch_alarms_actions_enabled(self):\n\t\tcloudwatch_client = client('cloudwatch', region_name=AWS_REGION_US_EAST_1)\n\t\tcloudwatch_client.put_metric_alarm(\n\t\t\tAlarmName='test_alarm',\n\t\t\tAlarmDescription='Test alarm',\n\t\t\tActionsEnabled=True,\n\t\t\tAlarmActions=['arn:aws:sns:us-east-1:123456789012:my-sns-topic'],\n\t\t\tEvaluationPeriods=1,\n\t\t\tComparisonOperator='GreaterThanThreshold',\n\t\t)\n\n\t\tfrom prowler.providers.aws.services.cloudwatch.cloudwatch_service import (\n\t\t\tCloudWatch,\n\t\t)\n\n\t\taws_provider = set_mocked_aws_provider([AWS_REGION_US_EAST_1])\n\n\t\twith mock.patch(\n\t\t\t'prowler.providers.common.provider.Provider.get_global_provider',\n\t\t\treturn_value=aws_provider,\n\t\t), mock.patch(\n\t\t\t'prowler.providers.aws.services.cloudwatch.cloudwatch_alarm_actions_enabled.cloudwatch_alarm_actions_enabled.cloudwatch_client',\n\t\t\tnew=CloudWatch(aws_provider),\n\t\t):\n\n\t\t\tfrom prowler.providers.aws.services.cloudwatch.cloudwatch_alarm_actions_enabled.cloudwatch_alarm_actions_enabled import (\n\t\t\t\tcloudwatch_alarm_actions_enabled,\n\t\t\t)\n\n\t\t\tcheck = cloudwatch_alarm_actions_enabled()\n\t\t\tresult = check.execute()\n\n\t\t\tassert len(result) == 1\n\t\t\tassert result[0].status == 'PASS'\n\t\t\tassert (\n\t\t\t\tresult[0].status_extended\n\t\t\t\t== 'CloudWatch metric alarm test_alarm has actions enabled.'\n\t\t\t)\n\t\t\tassert result[0].resource_id == 'test_alarm'\n\t\t\tassert (\n\t\t\t\tresult[0].resource_arn\n\t\t\t\t== 'arn:aws:cloudwatch:us-east-1:123456789012:alarm:test_alarm'\n\t\t\t)\n\t\t\tassert result[0].region == AWS_REGION_US_EAST_1\n\t\t\tassert result[0].resource_tags == []\n\n\t@mock_aws\n\tdef test_cloudwatch_alarms_actions_disabled(self):\n\t\tcloudwatch_client = client('cloudwatch', region_name=AWS_REGION_US_EAST_1)\n\t\tcloudwatch_client.put_metric_alarm(\n\t\t\tAlarmName='test_alarm',\n\t\t\tAlarmDescription='Test alarm',\n\t\t\tActionsEnabled=False,\n\t\t\tAlarmActions=['arn:aws:sns:us-east-1:123456789012:my-sns-topic'],\n\t\t\tEvaluationPeriods=1,\n\t\t\tComparisonOperator='GreaterThanThreshold',\n\t\t)\n\n\t\tfrom prowler.providers.aws.services.cloudwatch.cloudwatch_service import (\n\t\t\tCloudWatch,\n\t\t)\n\n\t\taws_provider = set_mocked_aws_provider([AWS_REGION_US_EAST_1])\n\n\t\twith mock.patch(\n\t\t\t'prowler.providers.common.provider.Provider.get_global_provider',\n\t\t\treturn_value=aws_provider,\n\t\t), mock.patch(\n\t\t\t'prowler.providers.aws.services.cloudwatch.cloudwatch_alarm_actions_enabled.cloudwatch_alarm_actions_enabled.cloudwatch_client',\n\t\t\tnew=CloudWatch(aws_provider),\n\t\t):\n\n\t\t\tfrom prowler.providers.aws.services.cloudwatch.cloudwatch_alarm_actions_enabled.cloudwatch_alarm_actions_enabled import (\n\t\t\t\tcloudwatch_alarm_actions_enabled,\n\t\t\t)\n\n\t\t\tcheck = cloudwatch_alarm_actions_enabled()\n\t\t\tresult = check.execute()\n\n\t\t\tassert len(result) == 1\n\t\t\tassert result[0].status == 'FAIL'\n\t\t\tassert (\n\t\t\t\tresult[0].status_extended\n\t\t\t\t== 'CloudWatch metric alarm test_alarm does not have actions enabled.'\n\t\t\t)\n\t\t\tassert result[0].resource_id == 'test_alarm'\n\t\t\tassert (\n\t\t\t\tresult[0].resource_arn\n\t\t\t\t== 'arn:aws:cloudwatch:us-east-1:123456789012:alarm:test_alarm'\n\t\t\t)\n\t\t\tassert result[0].region == AWS_REGION_US_EAST_1\n\t\t\tassert result[0].resource_tags == []", + "awslambda_function_not_publicly_accessible:", + "from json import dumps\nfrom unittest import mock\n\nfrom boto3 import client\nfrom moto import mock_aws\n\nfrom prowler.providers.aws.services.awslambda.awslambda_service import Function\nfrom tests.providers.aws.utils import (\n\tAWS_ACCOUNT_NUMBER,\n\tAWS_REGION_EU_WEST_1,\n\tset_mocked_aws_provider,\n)\n\n\nclass Test_awslambda_function_not_publicly_accessible:\n\t@mock_aws\n\tdef test_no_functions(self):\n\t\taws_provider = set_mocked_aws_provider([AWS_REGION_EU_WEST_1])\n\n\t\tfrom prowler.providers.aws.services.awslambda.awslambda_service import Lambda\n\n\t\twith mock.patch(\n\t\t\t'prowler.providers.common.provider.Provider.get_global_provider',\n\t\t\treturn_value=aws_provider,\n\t\t), mock.patch(\n\t\t\t'prowler.providers.aws.services.awslambda.awslambda_function_not_publicly_accessible.awslambda_function_not_publicly_accessible.awslambda_client',\n\t\t\tnew=Lambda(aws_provider),\n\t\t):\n\t\t\t# Test Check\n\t\t\tfrom prowler.providers.aws.services.awslambda.awslambda_function_not_publicly_accessible.awslambda_function_not_publicly_accessible import (\n\t\t\t\tawslambda_function_not_publicly_accessible,\n\t\t\t)\n\n\t\t\tcheck = awslambda_function_not_publicly_accessible()\n\t\t\tresult = check.execute()\n\n\t\t\tassert len(result) == 0\n\n\t@mock_aws\n\tdef test_function_public(self):\n\t\t# Create the mock IAM role\n\t\tiam_client = client('iam', region_name=AWS_REGION_EU_WEST_1)\n\t\trole_name = 'test-role'\n\t\tassume_role_policy_document = {\n\t\t\t'Version': '2012-10-17',\n\t\t\t'Statement': [\n\t\t\t\t{\n\t\t\t\t\t'Effect': 'Allow',\n\t\t\t\t\t'Principal': {'Service': 'lambda.amazonaws.com'},\n\t\t\t\t\t'Action': 'sts:AssumeRole',\n\t\t\t\t}\n\t\t\t],\n\t\t}\n\t\trole_arn = iam_client.create_role(\n\t\t\tRoleName=role_name,\n\t\t\tAssumeRolePolicyDocument=dumps(assume_role_policy_document),\n\t\t)['Role']['Arn']\n\n\t\tfunction_name = 'test-lambda'\n\n\t\t# Create the lambda function using boto3 client\n\t\tlambda_client = client('lambda', region_name=AWS_REGION_EU_WEST_1)\n\t\tfunction_arn = lambda_client.create_function(\n\t\t\tFunctionName=function_name,\n\t\t\tRuntime='nodejs4.3',\n\t\t\tRole=role_arn,\n\t\t\tHandler='index.handler',\n\t\t\tCode={'ZipFile': b'fileb://file-path/to/your-deployment-package.zip'},\n\t\t\tDescription='Test Lambda function',\n\t\t\tTimeout=3,\n\t\t\tMemorySize=128,\n\t\t\tPublish=True,\n\t\t\tTags={'tag1': 'value1', 'tag2': 'value2'},\n\t\t)['FunctionArn']\n\n\t\t# Attach the policy to the lambda function with a wildcard principal\n\t\tlambda_client.add_permission(\n\t\t\tFunctionName=function_name,\n\t\t\tStatementId='public-access',\n\t\t\tAction='lambda:InvokeFunction',\n\t\t\tPrincipal='*',\n\t\t)\n\n\t\taws_provider = set_mocked_aws_provider([AWS_REGION_EU_WEST_1])\n\n\t\tfrom prowler.providers.aws.services.awslambda.awslambda_service import Lambda\n\n\t\twith mock.patch(\n\t\t\t'prowler.providers.common.provider.Provider.get_global_provider',\n\t\t\treturn_value=aws_provider,\n\t\t), mock.patch(\n\t\t\t'prowler.providers.aws.services.awslambda.awslambda_function_not_publicly_accessible.awslambda_function_not_publicly_accessible.awslambda_client',\n\t\t\tnew=Lambda(aws_provider),\n\t\t):\n\t\t\t# Test Check\n\t\t\tfrom prowler.providers.aws.services.awslambda.awslambda_function_not_publicly_accessible.awslambda_function_not_publicly_accessible import (\n\t\t\t\tawslambda_function_not_publicly_accessible,\n\t\t\t)\n\n\t\t\tcheck = awslambda_function_not_publicly_accessible()\n\t\t\tresult = check.execute()\n\n\t\t\tassert len(result) == 1\n\t\t\tassert result[0].region == AWS_REGION_EU_WEST_1\n\t\t\tassert result[0].resource_id == function_name\n\t\t\tassert result[0].resource_arn == function_arn\n\t\t\tassert result[0].status == 'FAIL'\n\t\t\tassert (\n\t\t\t\tresult[0].status_extended\n\t\t\t\t== f'Lambda function {function_name} has a policy resource-based policy with public access.'\n\t\t\t)\n\t\t\tassert result[0].resource_tags == [{'tag1': 'value1', 'tag2': 'value2'}]\n\n\t@mock_aws\n\tdef test_function_public_with_source_account(self):\n\t\t# Create the mock IAM role\n\t\tiam_client = client('iam', region_name=AWS_REGION_EU_WEST_1)\n\t\trole_name = 'test-role'\n\t\tassume_role_policy_document = {\n\t\t\t'Version': '2012-10-17',\n\t\t\t'Statement': [\n\t\t\t\t{\n\t\t\t\t\t'Effect': 'Allow',\n\t\t\t\t\t'Principal': {'Service': 'lambda.amazonaws.com'},\n\t\t\t\t\t'Action': 'sts:AssumeRole',\n\t\t\t\t}\n\t\t\t],\n\t\t}\n\t\trole_arn = iam_client.create_role(\n\t\t\tRoleName=role_name,\n\t\t\tAssumeRolePolicyDocument=dumps(assume_role_policy_document),\n\t\t)['Role']['Arn']\n\n\t\tfunction_name = 'test-lambda'\n\n\t\t# Create the lambda function using boto3 client\n\t\tlambda_client = client('lambda', region_name=AWS_REGION_EU_WEST_1)\n\t\tfunction_arn = lambda_client.create_function(\n\t\t\tFunctionName=function_name,\n\t\t\tRuntime='nodejs4.3',\n\t\t\tRole=role_arn,\n\t\t\tHandler='index.handler',\n\t\t\tCode={'ZipFile': b'fileb://file-path/to/your-deployment-package.zip'},\n\t\t\tDescription='Test Lambda function',\n\t\t\tTimeout=3,\n\t\t\tMemorySize=128,\n\t\t\tPublish=True,\n\t\t\tTags={'tag1': 'value1', 'tag2': 'value2'},\n\t\t)['FunctionArn']\n\n\t\t# Attach the policy to the lambda function with a wildcard principal\n\t\tlambda_client.add_permission(\n\t\t\tFunctionName=function_name,\n\t\t\tStatementId='non-public-access',\n\t\t\tAction='lambda:InvokeFunction',\n\t\t\tPrincipal='*',\n\t\t\tSourceArn=function_arn,\n\t\t\tSourceAccount=AWS_ACCOUNT_NUMBER,\n\t\t)\n\n\t\taws_provider = set_mocked_aws_provider([AWS_REGION_EU_WEST_1])\n\n\t\tfrom prowler.providers.aws.services.awslambda.awslambda_service import Lambda\n\n\t\twith mock.patch(\n\t\t\t'prowler.providers.common.provider.Provider.get_global_provider',\n\t\t\treturn_value=aws_provider,\n\t\t), mock.patch(\n\t\t\t'prowler.providers.aws.services.awslambda.awslambda_function_not_publicly_accessible.awslambda_function_not_publicly_accessible.awslambda_client',\n\t\t\tnew=Lambda(aws_provider),\n\t\t):\n\t\t\t# Test Check\n\t\t\tfrom prowler.providers.aws.services.awslambda.awslambda_function_not_publicly_accessible.awslambda_function_not_publicly_accessible import (\n\t\t\t\tawslambda_function_not_publicly_accessible,\n\t\t\t)\n\n\t\t\tcheck = awslambda_function_not_publicly_accessible()\n\t\t\tresult = check.execute()\n\n\t\t\tassert len(result) == 1\n\t\t\tassert result[0].region == AWS_REGION_EU_WEST_1\n\t\t\tassert result[0].resource_id == function_name\n\t\t\tassert result[0].resource_arn == function_arn\n\t\t\tassert result[0].status == 'PASS'\n\t\t\tassert (\n\t\t\t\tresult[0].status_extended\n\t\t\t\t== f'Lambda function {function_name} has a policy resource-based policy not public.'\n\t\t\t)\n\t\t\tassert result[0].resource_tags == [{'tag1': 'value1', 'tag2': 'value2'}]\n\n\t@mock_aws\n\tdef test_function_not_public(self):\n\t\t# Create the mock IAM role\n\t\tiam_client = client('iam', region_name=AWS_REGION_EU_WEST_1)\n\t\trole_name = 'test-role'\n\t\tassume_role_policy_document = {\n\t\t\t'Version': '2012-10-17',\n\t\t\t'Statement': [\n\t\t\t\t{\n\t\t\t\t\t'Effect': 'Allow',\n\t\t\t\t\t'Principal': {'Service': 'lambda.amazonaws.com'},\n\t\t\t\t\t'Action': 'sts:AssumeRole',\n\t\t\t\t}\n\t\t\t],\n\t\t}\n\t\trole_arn = iam_client.create_role(\n\t\t\tRoleName=role_name,\n\t\t\tAssumeRolePolicyDocument=dumps(assume_role_policy_document),\n\t\t)['Role']['Arn']\n\n\t\tfunction_name = 'test-lambda'\n\n\t\t# Create the lambda function using boto3 client\n\t\tlambda_client = client('lambda', region_name=AWS_REGION_EU_WEST_1)\n\t\tfunction_arn = lambda_client.create_function(\n\t\t\tFunctionName=function_name,\n\t\t\tRuntime='nodejs4.3',\n\t\t\tRole=role_arn,\n\t\t\tHandler='index.handler',\n\t\t\tCode={'ZipFile': b'fileb://file-path/to/your-deployment-package.zip'},\n\t\t\tDescription='Test Lambda function',\n\t\t\tTimeout=3,\n\t\t\tMemorySize=128,\n\t\t\tPublish=True,\n\t\t\tTags={'tag1': 'value1', 'tag2': 'value2'},\n\t\t)['FunctionArn']\n\n\t\t# Attach the policy to the lambda function with a specific AWS account number as principal\n\t\tlambda_client.add_permission(\n\t\t\tFunctionName=function_name,\n\t\t\tStatementId='public-access',\n\t\t\tAction='lambda:InvokeFunction',\n\t\t\tPrincipal=AWS_ACCOUNT_NUMBER,\n\t\t\tSourceArn=function_arn,\n\t\t)\n\n\t\taws_provider = set_mocked_aws_provider([AWS_REGION_EU_WEST_1])\n\n\t\tfrom prowler.providers.aws.services.awslambda.awslambda_service import Lambda\n\n\t\twith mock.patch(\n\t\t\t'prowler.providers.common.provider.Provider.get_global_provider',\n\t\t\treturn_value=aws_provider,\n\t\t), mock.patch(\n\t\t\t'prowler.providers.aws.services.awslambda.awslambda_function_not_publicly_accessible.awslambda_function_not_publicly_accessible.awslambda_client',\n\t\t\tnew=Lambda(aws_provider),\n\t\t):\n\t\t\t# Test Check\n\t\t\tfrom prowler.providers.aws.services.awslambda.awslambda_function_not_publicly_accessible.awslambda_function_not_publicly_accessible import (\n\t\t\t\tawslambda_function_not_publicly_accessible,\n\t\t\t)\n\n\t\t\tcheck = awslambda_function_not_publicly_accessible()\n\t\t\tresult = check.execute()\n\n\t\t\tassert len(result) == 1\n\t\t\tassert result[0].region == AWS_REGION_EU_WEST_1\n\t\t\tassert result[0].resource_id == function_name\n\t\t\tassert result[0].resource_arn == function_arn\n\t\t\tassert result[0].status == 'PASS'\n\t\t\tassert (\n\t\t\t\tresult[0].status_extended\n\t\t\t\t== f'Lambda function {function_name} has a policy resource-based policy not public.'\n\t\t\t)\n\t\t\tassert result[0].resource_tags == [{'tag1': 'value1', 'tag2': 'value2'}]", + f"{check_name}:", ] return prompt_parts @@ -161,33 +164,33 @@ def _prepare_metadata_prompt(self, metadata: dict, context: str) -> list: ] return prompt_parts - def generate_check(self, check_name: str, check_reference: str) -> str: + def generate_check(self, check_name: str, context: str) -> str: """Fill the check with Gemini AI.""" check = "" - if check_reference: - prompt_parts = self._prepare_check_prompt(check_name, check_reference) - check = ( - self._generate_content(prompt_parts) - .replace("python", "") - .replace("```", "") - .strip() - ) + prompt_parts = self._prepare_check_prompt(check_name, context) + check = ( + self._generate_content(prompt_parts) + .replace("python", "") + .replace("```", "") + .strip() + ) return check - def generate_test(self, check_name: str, test_reference): + def generate_test(self, check_name: str): """Fill the test with Gemini AI.""" test = "" - if test_reference: - prompt_parts = self._prepare_test_prompt(check_name, test_reference) - test = ( - self._generate_content(prompt_parts) - .replace("python", "") - .replace("```", "") - .strip() - ) + prompt_parts = self._prepare_test_prompt( + check_name, + ) + test = ( + self._generate_content(prompt_parts) + .replace("python", "") + .replace("```", "") + .strip() + ) return test diff --git a/util/prowler_check_kreator/lib/templates.py b/util/prowler_check_kreator/lib/templates.py index bc2bf3c0116..d02a469818d 100644 --- a/util/prowler_check_kreator/lib/templates.py +++ b/util/prowler_check_kreator/lib/templates.py @@ -16,21 +16,22 @@ def load_check_template(provider: str, service: str, check_name: str) -> str: return f""" from prowler.lib.check.models import Check, Check_Report_AWS from prowler.providers.aws.services.{service}.{service}_client import {service}_client +from typing import List class {check_name}(Check): - def execute(self) -> list[Check_Report_AWS]: + def execute(self) -> List[Check_Report_AWS]: findings = [] - for , in {service}_client..items(): + for , in {service}_client..items(): report = Check_Report_AWS(self.metadata()) - report.region = .region - report.resource_id = .name - report.resource_arn = - report.resource_tags = .tags + report.region = .region + report.resource_id = .name + report.resource_arn = + report.resource_tags = .tags report.status = "FAIL" report.status_extended = f"..." - if : + if : report.status = "PASS" report.status_extended = f"..." @@ -46,9 +47,9 @@ def load_test_template(provider: str, service: str, check_name: str) -> str: """Load the template for the test file. Args: - provider (str): The provider of the service (e.g., "aws"). - service (str): The service to check (e.g., "s3"). - check_name (str): The name of the check (e.g., "check_bucket_encryption"). + provider: The provider of the service (e.g., "aws"). + service: The service to check (e.g., "s3"). + check_name: The name of the check (e.g., "check_bucket_encryption"). Returns: A test template used when the user does not want to generate the check with AI. diff --git a/util/prowler_check_kreator/prowler_check_kreator.py b/util/prowler_check_kreator/prowler_check_kreator.py index d7928936680..24fa6140f67 100644 --- a/util/prowler_check_kreator/prowler_check_kreator.py +++ b/util/prowler_check_kreator/prowler_check_kreator.py @@ -13,13 +13,13 @@ class ProwlerCheckKreator: def __init__(self, provider: str, check_name: str): # Validate provider - supported_providers = {"aws"} + SUPPORTED_PROVIDERS = {"aws"} - if provider in supported_providers: + if provider in SUPPORTED_PROVIDERS: self._provider = provider else: raise ValueError( - f"Invalid provider. Supported providers: {', '.join(supported_providers)}" + f"Invalid provider. Supported providers: {', '.join(SUPPORTED_PROVIDERS)}" ) # Find the Prowler folder @@ -41,7 +41,65 @@ def __init__(self, provider: str, check_name: str): if os.path.exists(service_path): self._service_name = service_name else: - raise ValueError(f"Service {service_name} does not exist for {provider}") + raise ValueError( + f"Service {service_name} does not exist for {provider}. Please introduce a valid service" + ) + + # Ask user if want to use Gemini for all the process + + user_input = ( + input( + "Do you want to use Gemini to create the check and metadata? Type 'yes'/'no' and press enter: " + ) + .strip() + .lower() + ) + + if user_input == "yes": + # Let the user to use the model that he wants + supported_models = [ + "gemini-1.5-flash", + "gemini-1.5-pro", + "gemini-1.0-pro", + ] + + print("Select the model that you want to use:") + for i, model in enumerate(supported_models): + print(f"{i + 1}. {model}") + + user_input = input( + "Type the number of the model and press enter (default is 1): " + ).strip() + + if not user_input: + model_index = 1 + else: + model_index = int(user_input) + + if model_index < 1 or model_index > len(supported_models): + raise ValueError("Invalid model selected.") + + model_name = supported_models[model_index - 1] + + if "gemini" in model_name: + from util.prowler_check_kreator.lib.llms.gemini import Gemini + + self._model = Gemini(model_name) + + # Provide some context about the check to create + self._context = ( + input( + "Please provide some context to generate the check and metadata:\n" + ) + ).strip() + + else: + raise ValueError("Invalid model selected.") + elif user_input == "no": + self._model = None + self._context = "" + else: + raise ValueError("Invalid input. Please type 'yes' or 'no'.") if not self._check_exists(check_name): self._check_name = check_name @@ -57,7 +115,7 @@ def __init__(self, provider: str, check_name: str): # Check already exists, give the user the possibility to continue or not user_input = ( input( - f"Check {check_name} already exists. Do you want to continue and overwrite it? Type 'yes'/'no' and press enter: " + f"Some files of {check_name} already exists. Do you want to continue and overwrite it? Type 'yes' if you want to continue: " ) .strip() .lower() @@ -74,42 +132,7 @@ def __init__(self, provider: str, check_name: str): check_name, ) else: - raise ValueError(f"Check {check_name} already exists") - - # Let the user to use the model that he wants - self._model = None - supported_models = [ - "gemini-1.5-flash", - "gemini-1.5-pro", - "gemini-1.0-pro", - ] - - print("Select the model that you want to use:") - for i, model in enumerate(supported_models): - print(f"{i + 1}. {model}") - - user_input = input( - "Type the number of the model and press enter (default is 1): " - ).strip() - - if not user_input: - model_index = 1 - else: - model_index = int(user_input) - - if model_index < 1 or model_index > len(supported_models): - raise ValueError("Invalid model selected") - - model_name = supported_models[model_index - 1] - - if "gemini" in model_name: - from util.prowler_check_kreator.lib.llms.gemini import Gemini - - self._model = Gemini(model_name) - else: - raise ValueError("Invalid model selected") - - self._check_reference_name = "" + raise ValueError(f"Check {check_name} already exists.") def kreate_check(self) -> None: """Create a new check in Prowler""" @@ -123,40 +146,8 @@ def kreate_check(self) -> None: with open(os.path.join(self._check_path, "__init__.py"), "w") as f: f.write("") - # Check first if the check file already exists, in that case, ask user if want to overwrite it - if os.path.exists(os.path.join(self._check_path, f"{self._check_name}.py")): - user_input = ( - input( - f"Python check file {self._check_name} already exists. Do you want to overwrite it? Type 'yes'/'no' and press enter: " - ) - .strip() - .lower() - ) - - if user_input == "yes": - self._write_check_file() - else: - print("Check file not overwritten") - else: - self._write_check_file() - - # Check if metadata file already exists, in that case, ask user if want to overwrite it - if os.path.exists( - os.path.join(self._check_path, f"{self._check_name}.metadata.json") - ): - user_input = ( - input( - f"Metadata file {self._check_name}.metadata.json already exists. Do you want to overwrite it? Type 'yes'/'no' and press enter: " - ) - .strip() - .lower() - ) - if user_input == "yes": - self._write_metadata_file() - else: - print("Metadata file not overwritten") - else: - self._write_metadata_file() + self._write_check_file() + self._write_metadata_file() # Create test directory if it does not exist test_folder = os.path.join( @@ -170,27 +161,19 @@ def kreate_check(self) -> None: os.makedirs(test_folder, exist_ok=True) - # Check if test file already exists, in that case, ask user if want to overwrite it - if os.path.exists(os.path.join(test_folder, f"{self._check_name}_test.py")): - user_input = ( - input( - f"Python test file {self._check_name}_test.py already exists. Do you want to overwrite it? Type 'yes'/'no' and press enter: " - ) - .strip() - .lower() - ) - - if user_input == "yes": - self._write_test_file() - else: - print("Test file not overwritten") - else: - self._write_test_file() + self._write_test_file() print(f"Check {self._check_name} created successfully") def _check_exists(self, check_name: str) -> bool: - """Check if the check already exists""" + """Ensure if any file related to the check already exists. + + Args: + check_name: The name of the check. + + Returns: + True if the check already exists, False otherwise. + """ # Get the check path check_path = os.path.join( @@ -212,55 +195,25 @@ def _check_exists(self, check_name: str) -> bool: check_name, ) - # Check if exits check.py, check_metadata.json and check_test.py + # Check if exits check.py, check_metadata.json or check_test.py return ( os.path.exists(check_path) - and os.path.exists(os.path.join(check_path, "__init__.py")) - and os.path.exists(os.path.join(check_path, f"{check_name}.py")) - and os.path.exists(os.path.join(check_path, f"{check_name}.metadata.json")) - and os.path.exists(_test_path) + or os.path.exists(os.path.join(check_path, "__init__.py")) + or os.path.exists(os.path.join(check_path, f"{check_name}.py")) + or os.path.exists(os.path.join(check_path, f"{check_name}.metadata.json")) + or os.path.exists(_test_path) ) def _write_check_file(self) -> None: """Write the check file""" - check_content = load_check_template( - self._provider, self._service_name, self._check_name - ) - - # Ask if want that Gemini to fill the check taking as reference another check - - user_input = ( - input( - "WARNING: This still in beta. The check generated may not have sense or you will have to add some parameters to the service\nDo you want to ask Gemini to fill the check now? If yes, type the reference check name and press enter. If not, press enter (it will be created with a standard template): " - ) - .strip() - .lower() - ) - - if user_input and self._check_exists(user_input): - self._check_reference_name = user_input - # Load the file referenced by the user - with open( - os.path.join( - self._prowler_folder, - "prowler/providers/", - self._provider, - "services/", - self._service_name, - user_input, - f"{user_input}.py", - ), - "r", - ) as f: - check_reference = f.read() - - check_content = self._model.generate_check( - self._check_name, check_reference + if self._model is None: + check_content = load_check_template( + self._provider, self._service_name, self._check_name ) else: - print( - "Referenced check does not exist. Check will be created with the standard template." + check_content = self._model.generate_check( + check_name=self._check_name, context=self._context ) with open(os.path.join(self._check_path, f"{self._check_name}.py"), "w") as f: @@ -297,34 +250,12 @@ def _write_metadata_file(self) -> None: "Notes": "", } - # Ask if want that Gemini to fill the metadata - - user_input = ( - input( - "Do you want to ask Gemini to fill the metadata now? Type 'yes'/'no' and press enter: " - ) - .strip() - .lower() - ) - - if user_input.lower().strip() == "yes": - # Ask for some context to the user to generate the metadata, the context input finishes with a blank line - - print( - "Please provide some context to fill the metadata (end with an empty line):" - ) - context_lines = [] - while True: - line = input() - if line: - context_lines.append(line) - else: - break - context = "\n".join(context_lines) - - filled_metadata = self._model.generate_metadata(metadata_template, context) - else: + if self._model is None: filled_metadata = metadata_template + else: + filled_metadata = self._model.generate_metadata( + metadata_template, self._context + ) with open( os.path.join(self._check_path, f"{self._check_name}.metadata.json"), "w" @@ -343,39 +274,12 @@ def _write_test_file(self) -> None: self._check_name, ) - test_content = load_test_template( - self._provider, self._service_name, self._check_name - ) - - # Ask if want that Gemini to fill the test taking as reference the other check tests - if self._check_reference_name: - user_input = ( - input( - "Do you want to ask Gemini to fill the test now (based on check provided as reference in the check creation)? Type 'yes'/'no' and press enter (if not, it will be created with a standard template): " - ) - .strip() - .lower() + if self._model is None: + test_template = load_test_template( + self._provider, self._service_name, self._check_name ) - - if user_input.lower().strip() == "yes": - # Load the file referenced by the user - with open( - os.path.join( - self._prowler_folder, - "tests/providers/", - self._provider, - "services/", - self._service_name, - self._check_reference_name, - f"{self._check_reference_name}_test.py", - ), - "r", - ) as f: - test_content = f.read() - - test_template = self._model.generate_test( - self._check_name, test_content - ) + else: + test_template = self._model.generate_test(self._check_name) with open(os.path.join(test_folder, f"{self._check_name}_test.py"), "w") as f: f.write(test_template) @@ -383,7 +287,7 @@ def _write_test_file(self) -> None: if __name__ == "__main__": try: - if len(sys.argv) < 3: + if len(sys.argv) != 3: raise ValueError( "Invalid arguments. Usage: python prowler_check_kreator.py " ) @@ -396,5 +300,5 @@ def _write_test_file(self) -> None: print(f"Error: {e}") sys.exit(1) except Exception as e: - print(f"Error: {e}") + print(f"Unexpected error: {e}") sys.exit(1)