From 1191322c5be8598ceb4eb16765a8f74d9492f93a Mon Sep 17 00:00:00 2001 From: Marcos Casado Date: Mon, 1 Jan 2024 21:06:15 +0100 Subject: [PATCH 01/34] Prueba Marcos Front-end --- infohound/infohound_config.sample.py | 1 + infohound/static/infohound/js/index.js | 36 +++++++++++++++------- infohound/tasks.py | 12 ++++++-- infohound/tool/data_sources/google_data.py | 5 +++ infohound/tool/retriever_modules/people.py | 8 +++++ infohound/utils.py | 2 ++ infohound/views.py | 4 +++ 7 files changed, 55 insertions(+), 13 deletions(-) diff --git a/infohound/infohound_config.sample.py b/infohound/infohound_config.sample.py index f202376..09cbd3f 100644 --- a/infohound/infohound_config.sample.py +++ b/infohound/infohound_config.sample.py @@ -14,3 +14,4 @@ LEAK_LOOKUP_KEY = "" GOOGLE_API_KEY = "" GOOGLE_ID = "" +OPENAI_API_KEY = "" diff --git a/infohound/static/infohound/js/index.js b/infohound/static/infohound/js/index.js index 7aca97d..da6c94c 100644 --- a/infohound/static/infohound/js/index.js +++ b/infohound/static/infohound/js/index.js @@ -310,18 +310,14 @@ function loadTasks() { "findEmailsTask", "findEmailsFromURLsTask", "findSocialProfilesByEmailTask"] data.forEach(task => { const card = document.createElement('div'); - card.className = 'card shadow mb-3'; + card.className = 'col-md-4 p-3'; b = ` -
- -
+ `; pb = ""; if(task.state == "PENDING") { b = ` -
- -
+ ` pb = `
@@ -362,17 +358,35 @@ function loadTasks() { ` } + ai_badge = ""; + if(task.ai) { + ai_badge = ` + + AI Powered + + ` + } + card.innerHTML = ` -
+
+
-
+
${h5} + ${ai_badge}

${task.description}

- ${pb}
- ${b} +
+
+
+ ${b} +
+
+
+ ${pb}
+
`; if (task.type == "Retrieve") { taskRetrievalContainer.appendChild(card); diff --git a/infohound/tasks.py b/infohound/tasks.py index dede042..3381aed 100644 --- a/infohound/tasks.py +++ b/infohound/tasks.py @@ -4,6 +4,9 @@ import trio import importlib +# ------------------------------------- # +# ------------- RETRIEVAL ------------- # +# ------------------------------------- # @shared_task(bind=True, name="get_whois_info") def getWhoisInfoTask(self, domain): @@ -50,9 +53,14 @@ def executeDorksTask(self, domain): def findEmailsFromDorksTask(self, domain): emails.findEmailsFromDorks(domain) +@shared_task(bind=True, name="find_people_from_google") +def findPeopleFromGoogle(self, domain): + people.findPeopleFromGoogle(domain) - -# -------------ANALYSIS------------- +# ------------------------------------- # +# ------------- ANALYSIS -------------- # +# ------------------------------------- # + @shared_task(bind=True, name="subdomain_take_over_analysis") def subdomainTakeOverAnalysisTask(self, domain): domain_analysis.subdomainTakeOverAnalysis(domain) diff --git a/infohound/tool/data_sources/google_data.py b/infohound/tool/data_sources/google_data.py index 92f68f4..eba13cd 100644 --- a/infohound/tool/data_sources/google_data.py +++ b/infohound/tool/data_sources/google_data.py @@ -50,6 +50,11 @@ def getUrls(query): #- files #- url +def discoverPeople (domain): + url_base = f"" + # + # TO-DO + # def discoverEmails(domain): emails = [] diff --git a/infohound/tool/retriever_modules/people.py b/infohound/tool/retriever_modules/people.py index 4a7dabc..1e38e68 100644 --- a/infohound/tool/retriever_modules/people.py +++ b/infohound/tool/retriever_modules/people.py @@ -5,6 +5,14 @@ from infohound.tool.data_sources import google_data, bing_data from infohound.models import Domain,People,Emails,Usernames +#ADDED BY M.CASADO - PROOF OF CONCEPT# +def findPeopleFromGoogle(domain_id): + domain = Domain.objects.get(id=domain_id).domain + results = google_data.discoverPeople(domain) + # + # TO-DO + # + def findSocialProfilesByEmail(domain_id): queryset = Emails.objects.filter(people_id__isnull=True, domain_id=domain_id) domain = Domain.objects.get(id=domain_id).domain diff --git a/infohound/utils.py b/infohound/utils.py index 8c6281d..a48e116 100644 --- a/infohound/utils.py +++ b/infohound/utils.py @@ -10,6 +10,7 @@ def load_tasks(domain_id): tasks = [ + # RETRIEVAL {"name_id":"getWhoisInfoTask","name":"Get Whois Info", "description":"Get revelant information from Whois register.", "type":"Retrieve"}, {"name_id":"getDNSRecordsTask","name":"Get DNS Records", "description":"This task queries the DNS.", "type":"Retrieve"}, {"name_id":"getSubdomainsTask","name":"Get Subdomains", "description":"This task uses Alienvault OTX API, CRT.sh and HackerTarget as data sources to discover cached subdomains.", "type":"Retrieve"}, @@ -21,6 +22,7 @@ def load_tasks(domain_id): {"name_id":"findEmailsFromURLsTask","name":"Find Emails From Urls", "description":"Sometimes, the discoverd URLs can contain sentive information. This tasks retrive all the emails from URL paths.", "type":"Retrieve"}, {"name_id":"executeDorksTask","name":"Execute dorks", "description":"It will execute the dorks defined in the dorks folder. Remember to grup the dorks by categories (filename) so you can later understand the objectives of the dorks.", "type":"Retrieve"}, {"name_id":"findEmailsFromDorksTask","name":"Find Emails From Dorks", "description":"By default, InfoHound has some dorks defined in order to discover emails. This task will look for them in the results obtained by the execution of the dorks.", "type":"Retrieve"}, + {"name_id":"findPeopleFromGoogle","name":"Find People From Google", "description":"Uses the Google JSON API to find people who work in the company asociated to the domain", "type":"Retrieve"}, # ANALYSIS {"name_id":"subdomainTakeOverAnalysisTask","name":"Check Subdomains Take-Over", "description":"It performes some checks to determine if a subdomain can be taken over.", "type":"Analysis"}, {"name_id":"canBeSpoofedTask","name":"Check If Domain Can Be Spoofed", "description":"It checks if a domain, from the emails InfoHound has discovered, can be spoofed. This could be used by attackers to impersonate a person and send emails as hime/her.", "type":"Analysis"}, diff --git a/infohound/views.py b/infohound/views.py index 5631208..f32ae46 100644 --- a/infohound/views.py +++ b/infohound/views.py @@ -11,11 +11,13 @@ from infohound.models import Domain, People, Files, Emails, Subdomains, URLs, Dorks, Results, Usernames, Tasks import infohound.tasks import infohound.utils +import infohound.infohound_config from django.utils import timezone import infohound.tool.retriever_modules.dorks as dorks from django.db import IntegrityError from django.utils.safestring import mark_safe + def index(request): return render(request, 'index.html') @@ -203,6 +205,8 @@ def get_available_tasks(request): data["description"] = entry.description data["type"] = entry.task_type data["custom"] = entry.custom + # if infohound.infohound_config.OPENAI_API_KEY == "": + data["ai"] = False if entry.last_execution: data["last_execution"] = entry.last_execution.strftime("%d/%m/%y %H:%M") if entry.celery_id: From 17c1fdbab02a713637b4d96f3526f7652bac1799 Mon Sep 17 00:00:00 2001 From: Marcos Casado Date: Wed, 31 Jan 2024 18:41:06 +0100 Subject: [PATCH 02/34] LLM --- docker-compose.yml | 19 ++++++ infohound/models.py | 3 + infohound/static/infohound/js/index.js | 62 ++++++++----------- infohound/tasks.py | 8 ++- infohound/tool/ai_assistant/ollama.py | 15 +++++ .../tool/analysis_modules/people_analisys.py | 17 +++++ infohound/tool/data_sources/google_data.py | 48 ++++++++++++-- infohound/tool/infohound_utils.py | 2 - infohound/tool/retriever_modules/people.py | 14 +++-- infohound/utils.py | 19 +++--- infohound/views.py | 11 ++-- infohound_project/settings.py | 3 + requirements.txt | 1 + 13 files changed, 157 insertions(+), 65 deletions(-) create mode 100644 infohound/tool/ai_assistant/ollama.py create mode 100644 infohound/tool/analysis_modules/people_analisys.py diff --git a/docker-compose.yml b/docker-compose.yml index 39e734d..0c71192 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -12,6 +12,7 @@ services: - db - redis - celery_worker + - ollama environment: - POSTGRES_USER=postgres - POSTGRES_PASSWORD=postgres @@ -19,6 +20,7 @@ services: - REDIS_HOST=redis - REDIS_PORT=6379 command: sh -c "python manage.py makemigrations infohound && python manage.py migrate && python manage.py runserver 0.0.0.0:8000" + celery_worker: build: context: . @@ -35,10 +37,12 @@ services: - REDIS_HOST=redis - REDIS_PORT=6379 command: sh -c "celery -A infohound_project worker --loglevel=info" + redis: image: redis:latest ports: - '6378:6379' + db: image: postgres:12 volumes: @@ -47,5 +51,20 @@ services: - POSTGRES_USER=postgres - POSTGRES_PASSWORD=postgres - POSTGRES_DB=infohound_db + + ollama: + image: ollama/ollama:latest + ports: + - '11434:11434' + environment: + - gpus=all + deploy: + resources: + reservations: + devices: + - driver: nvidia + count: 1 + capabilities: [gpu] volumes: postgres_data: + diff --git a/infohound/models.py b/infohound/models.py index a9f409f..5eb5f07 100644 --- a/infohound/models.py +++ b/infohound/models.py @@ -12,6 +12,9 @@ class People(models.Model): phones = models.JSONField(default=list, null=True) social_profiles = models.JSONField(default=list) source = models.CharField(max_length=255) + ocupation_summary = models.TextField(default="This profile doesn't have a description yet. You can use the profile analysis task to employ an AI-powered tool that examines the metadata and creates a description for you.") + raw_metadata = models.TextField (default=None) + url_img = models.TextField(default="https://static.thenounproject.com/png/994628-200.png") domain = models.ForeignKey(Domain, on_delete=models.CASCADE) # TO-DO: change spoofable to allow 3 states diff --git a/infohound/static/infohound/js/index.js b/infohound/static/infohound/js/index.js index da6c94c..cbd2b7d 100644 --- a/infohound/static/infohound/js/index.js +++ b/infohound/static/infohound/js/index.js @@ -186,34 +186,36 @@ function loadPeople() { person_name = person.name.length == 0 ? "[Not found]" : person_name card.innerHTML = ` -
-
- - - - +
+
+
+ +
+
+
${person_name}
+
+ + ${person.emails} + + ${person.phones} + + ${person.keys} + + ${person.accounts}
-
-
-
${person_name}
-
- - ${person.emails} - - ${person.phones} - - ${person.keys} - - ${person.accounts} -
-
-
- ${socialIcons} -
-
${person.id}
-
+
+ ${person.ocupation_summary}
+
+
+ +
+
+ ${person.id} +
+
+
`; col.appendChild(card) cardContainer.append(col); @@ -358,22 +360,12 @@ function loadTasks() { ` } - ai_badge = ""; - if(task.ai) { - ai_badge = ` - - AI Powered - - ` - } - card.innerHTML = `
${h5} - ${ai_badge}

${task.description}

diff --git a/infohound/tasks.py b/infohound/tasks.py index 3381aed..ab0773a 100644 --- a/infohound/tasks.py +++ b/infohound/tasks.py @@ -1,5 +1,5 @@ from infohound.tool.retriever_modules import domains,subdomains,urls,files,emails,people,dorks -from infohound.tool.analysis_modules import domain_analysis,email_analysis,files_analysis,usernames_analysis +from infohound.tool.analysis_modules import domain_analysis,email_analysis,files_analysis,usernames_analysis,people_analisys from celery import shared_task import trio import importlib @@ -54,7 +54,7 @@ def findEmailsFromDorksTask(self, domain): emails.findEmailsFromDorks(domain) @shared_task(bind=True, name="find_people_from_google") -def findPeopleFromGoogle(self, domain): +def findPeopleFromGoogleTask(self, domain): people.findPeopleFromGoogle(domain) # ------------------------------------- # @@ -97,6 +97,10 @@ def findRegisteredSitesTask(self, domain): def checkBreachTask(self, domain): email_analysis.checkBreach(domain) +@shared_task(bind=True, name="summarize_profile") +def summarize_profile(self, domain): + people_analisys.summarize_profile(domain) + # --------------CUSTOM-------------- @shared_task(bind=True, name="custom_task") diff --git a/infohound/tool/ai_assistant/ollama.py b/infohound/tool/ai_assistant/ollama.py new file mode 100644 index 0000000..0b7780a --- /dev/null +++ b/infohound/tool/ai_assistant/ollama.py @@ -0,0 +1,15 @@ +from langchain.llms import Ollama +from infohound_project import settings + +def ollama_flexible_prompt(in_prompt): + BASE_URL = "http://172.26.0.3:11434" + MODEL = "llama2" + + ollama = Ollama(base_url=BASE_URL, model=MODEL) + + try: + res = ollama(in_prompt) + return res + except Exception as e: + print(f"Error en la llamada a Ollama: {e}") + return None \ No newline at end of file diff --git a/infohound/tool/analysis_modules/people_analisys.py b/infohound/tool/analysis_modules/people_analisys.py new file mode 100644 index 0000000..121d798 --- /dev/null +++ b/infohound/tool/analysis_modules/people_analisys.py @@ -0,0 +1,17 @@ +import time +from infohound.models import People +from infohound.tool.ai_assistant import ollama + +def summarize_profile(domain_id): + queryset = People.objects.filter(domain_id=domain_id) + + for entry in queryset.iterator(): + try: + summarize_prompt = "Summarize the ocupation of the person in just one pharagraph given the following data: " + raw_data = entry.raw_metadata + print ("Executing AI-Powered Profile Analisis of: " + entry.name) + entry.ocupation_summary = ollama.ollama_flexible_prompt(summarize_prompt + raw_data) + print ("Summary: " +entry.ocupation_summary) + entry.save() + except Exception as e: + print(f"Error inesperado: {str(e)}") \ No newline at end of file diff --git a/infohound/tool/data_sources/google_data.py b/infohound/tool/data_sources/google_data.py index eba13cd..cb9ef4c 100644 --- a/infohound/tool/data_sources/google_data.py +++ b/infohound/tool/data_sources/google_data.py @@ -1,6 +1,7 @@ import requests import json import html +import time import urllib.parse import infohound.tool.infohound_utils as infohound_utils from bs4 import BeautifulSoup @@ -50,11 +51,48 @@ def getUrls(query): #- files #- url -def discoverPeople (domain): - url_base = f"" - # - # TO-DO - # +def discoverPeople (query): + start = 1 + total_results = 0 + total_gathered = 0 + limit = False + results = True + people = [] + + print("Testing query: " + query) + + while results and start < 100 and not limit: + payload = {"key":API_KEY,"cx":ID,"start":start,"q":query} + res = requests.get("https://www.googleapis.com/customsearch/v1",params=payload) + data = json.loads(res.text) + if "error" in data: + print(data["error"]["status"]) + limit = True + else: + if start == 1: + total_results = data["searchInformation"]["totalResults"] + if "items" in data: + for item in data["items"]: + try: + url = item["link"] + first_name = item["pagemap"]["metatags"][0]["profile:first_name"] + last_name = item["pagemap"]["metatags"][0]["profile:last_name"] + url_img = item["pagemap"]["cse_image"][0]["src"] + name = f"{first_name} {last_name}" + people.append((name,url,json.dumps(item),url_img)) + print("Added: " + name) + total_gathered = total_gathered + 1 + except KeyError as e: + print(f"Error: La clave '{e.args[0]}' no existe en la estructura de datos.") + except Exception as e: + print(f"Error inesperado: {str(e)}") + else: + results = False + start = start + 10 + time.sleep(1) + + print("Found "+str(total_results)+" and added "+str(total_gathered)) + return (people) def discoverEmails(domain): emails = [] diff --git a/infohound/tool/infohound_utils.py b/infohound/tool/infohound_utils.py index 7f90968..300fe8f 100644 --- a/infohound/tool/infohound_utils.py +++ b/infohound/tool/infohound_utils.py @@ -33,8 +33,6 @@ def extractSocialInfo(text): if t is not None: data.append(t.group(0)) - - # Twitter regex = r"(http(s)?:\/\/)?([\w]+\.)?twitter\.com\/[^&\/?\"\%]*" t = re.search(regex, text) diff --git a/infohound/tool/retriever_modules/people.py b/infohound/tool/retriever_modules/people.py index 1e38e68..4a8e749 100644 --- a/infohound/tool/retriever_modules/people.py +++ b/infohound/tool/retriever_modules/people.py @@ -5,13 +5,17 @@ from infohound.tool.data_sources import google_data, bing_data from infohound.models import Domain,People,Emails,Usernames -#ADDED BY M.CASADO - PROOF OF CONCEPT# + def findPeopleFromGoogle(domain_id): domain = Domain.objects.get(id=domain_id).domain - results = google_data.discoverPeople(domain) - # - # TO-DO - # + company = domain.split(".")[0] + query = f'intitle:"{company}" site:"linkedin.com"' + results = google_data.discoverPeople(query) + for result in results: + try: + People.objects.get_or_create(name=result[0], social_profiles=results[1], raw_metadata=result[2], url_img=result[3], source="Google", domain_id=domain_id) + except IntegrityError as e: + pass def findSocialProfilesByEmail(domain_id): queryset = Emails.objects.filter(people_id__isnull=True, domain_id=domain_id) diff --git a/infohound/utils.py b/infohound/utils.py index a48e116..5688b01 100644 --- a/infohound/utils.py +++ b/infohound/utils.py @@ -11,18 +11,18 @@ def load_tasks(domain_id): tasks = [ # RETRIEVAL - {"name_id":"getWhoisInfoTask","name":"Get Whois Info", "description":"Get revelant information from Whois register.", "type":"Retrieve"}, + {"name_id":"getWhoisInfoTask","name":"Get Whois Information", "description":"Get revelant information from Whois register.", "type":"Retrieve"}, {"name_id":"getDNSRecordsTask","name":"Get DNS Records", "description":"This task queries the DNS.", "type":"Retrieve"}, {"name_id":"getSubdomainsTask","name":"Get Subdomains", "description":"This task uses Alienvault OTX API, CRT.sh and HackerTarget as data sources to discover cached subdomains.", "type":"Retrieve"}, - {"name_id":"getSubdomainsFromURLSTask","name":"Get Subdomains From URLs", "description":"Once some tasks have been performed, the URLs table will have a lot of entries. This task will check all the URLS in order to find new subdomains.", "type":"Retrieve"}, + {"name_id":"getSubdomainsFromURLSTask","name":"Get Subdomains from URLs", "description":"Once some tasks have been performed, the URLs table will have a lot of entries. This task will check all the URLS in order to find new subdomains.", "type":"Retrieve"}, {"name_id":"getURLsTask","name":"Get URLs", "description":"It searches all URLs cached by Wayback Machine and saves them into the database. This will later help to discover other data entities like files or subdomains.", "type":"Retrieve"}, - {"name_id":"getFilesFromURLsTask","name":"Get Files from URLs", "description":"It loops through the URLs database table in order to find files and store them to the Files database table to analyse them later. The files that will be retrieved are: doc, docx, ppt, pptx, pps, ppsx, xls, xlsx, odt, ods, odg, odp, sxw, sxc, sxi, pdf, wpd, svg, indd, rdp, ica, zip, rar", "type":"Retrieve"}, - {"name_id":"findEmailsTask","name":"Find Email", "description":"It lookes for emails using queries to Google and Bing.", "type":"Retrieve"}, - {"name_id":"findSocialProfilesByEmailTask","name":"Find people from emails", "description":"Once some emails have been found it can be useful to discover the person behind them. Also, it finds usernames from that people.", "type":"Retrieve"}, - {"name_id":"findEmailsFromURLsTask","name":"Find Emails From Urls", "description":"Sometimes, the discoverd URLs can contain sentive information. This tasks retrive all the emails from URL paths.", "type":"Retrieve"}, - {"name_id":"executeDorksTask","name":"Execute dorks", "description":"It will execute the dorks defined in the dorks folder. Remember to grup the dorks by categories (filename) so you can later understand the objectives of the dorks.", "type":"Retrieve"}, + {"name_id":"getFilesFromURLsTask","name":"Get Files from URLs", "description":"It loops through the URLs database table in order to find files and store them to the Files database. The files that will be retrieved are: doc, docx, ppt, pptx, pps, ppsx, xls, xlsx, odt, ods, odg, odp, sxw, sxc, sxi, pdf, wpd, svg, indd, rdp, ica, zip, rar", "type":"Retrieve"}, + {"name_id":"findEmailsTask","name":"Find Emails", "description":"It lookes for emails using queries to Google and Bing.", "type":"Retrieve"}, + {"name_id":"findSocialProfilesByEmailTask","name":"Find People from Emails", "description":"Once some emails have been found it can be useful to discover the person behind them. Also, it finds usernames from that people.", "type":"Retrieve"}, + {"name_id":"findEmailsFromURLsTask","name":"Find Emails From URLs", "description":"Sometimes, the discoverd URLs can contain sentive information. This tasks retrive all the emails from URL paths.", "type":"Retrieve"}, + {"name_id":"executeDorksTask","name":"Execute Dorks", "description":"It will execute the dorks defined in the dorks folder. Remember to grup the dorks by categories (filename) so you can later understand the objectives of the dorks.", "type":"Retrieve"}, {"name_id":"findEmailsFromDorksTask","name":"Find Emails From Dorks", "description":"By default, InfoHound has some dorks defined in order to discover emails. This task will look for them in the results obtained by the execution of the dorks.", "type":"Retrieve"}, - {"name_id":"findPeopleFromGoogle","name":"Find People From Google", "description":"Uses the Google JSON API to find people who work in the company asociated to the domain", "type":"Retrieve"}, + {"name_id":"findPeopleFromGoogleTask","name":"Find People From Google", "description":"Uses the Google JSON API to find people who work in the company asociated to the domain", "type":"Retrieve"}, # ANALYSIS {"name_id":"subdomainTakeOverAnalysisTask","name":"Check Subdomains Take-Over", "description":"It performes some checks to determine if a subdomain can be taken over.", "type":"Analysis"}, {"name_id":"canBeSpoofedTask","name":"Check If Domain Can Be Spoofed", "description":"It checks if a domain, from the emails InfoHound has discovered, can be spoofed. This could be used by attackers to impersonate a person and send emails as hime/her.", "type":"Analysis"}, @@ -32,7 +32,8 @@ def load_tasks(domain_id): {"name_id":"getEmailsFromMetadataTask","name":"Get Emails From Metadata", "description":"As some metadata can contain emails, this will retrive all o them and save it to the database.", "type":"Analysis"}, {"name_id":"getEmailsFromFilesContentTask","name":"Get Emails From Files Content", "description":"Usually emails can be included in corporate files so this task will retrive all the emails from the downloaded files content.", "type":"Analysis"}, {"name_id":"findRegisteredSitesTask","name":"Find Registered Services using emails", "description":"It is possible to find services or social networks where an emaill has been used to create an account. This task will check if an email InfoHound has discovered has an account in: Twitter, Adobe, Facebook, Imgur, Mewe, Parler, Rumble, Snapchat, Wordpress and/or Duolingo", "type":"Analysis"}, - {"name_id":"checkBreachTask","name":"Check Breach", "description":"This task checks Firefox Monitor service to see if an email has been found in a data breach. Although it is a free service, it has a limitation of 10 queries per day. If Leak-Lookup API key is set, it also checks it.", "type":"Analysis"}] + {"name_id":"checkBreachTask","name":"Check Breach", "description":"This task checks Firefox Monitor service to see if an email has been found in a data breach. Although it is a free service, it has a limitation of 10 queries per day. If Leak-Lookup API key is set, it also checks it.", "type":"Analysis"}, + {"name_id":"summarize_profile","name":"AI-Powered Profile Analisys", "description":"You can use the profile analysis task to employ an AI-powered tool that examines the metadata and creates a description for you.", "type":"Analysis"}] for task in tasks: try: Tasks.objects.get_or_create(tid=task["name_id"], name=task["name"], description=task["description"], task_type=task["type"], custom=False, domain_id=domain_id) diff --git a/infohound/views.py b/infohound/views.py index f32ae46..bc5dd9e 100644 --- a/infohound/views.py +++ b/infohound/views.py @@ -81,6 +81,8 @@ def people_all(request): p["name"] = entry.name p["phones"] = len(entry.phones) p["accounts"] = 0 + p["ocupation_summary"] = entry.ocupation_summary + p["url_img"] = entry.url_img user = Usernames.objects.filter(people=entry, domain_id=domain_id) emails = Emails.objects.filter(people=entry, domain_id=domain_id) for em in user.iterator(): @@ -194,7 +196,6 @@ def get_emails_stats(request, domain_id): def get_available_tasks(request): infohound.utils.load_tasks(request.GET['domain_id']) infohound.utils.load_custom_tasks(request.GET['domain_id']) - tasks = [] queryset = Tasks.objects.filter(domain_id=request.GET['domain_id']).order_by('id') for entry in queryset.iterator(): @@ -205,12 +206,8 @@ def get_available_tasks(request): data["description"] = entry.description data["type"] = entry.task_type data["custom"] = entry.custom - # if infohound.infohound_config.OPENAI_API_KEY == "": - data["ai"] = False - if entry.last_execution: - data["last_execution"] = entry.last_execution.strftime("%d/%m/%y %H:%M") - if entry.celery_id: - data["state"] = AsyncResult(entry.celery_id).state + data["last_execution"] = entry.last_execution.strftime("%d/%m/%y %H:%M") if (entry.last_execution) else None + data["state"] = AsyncResult(entry.celery_id).state if (entry.celery_id) else None tasks.append(data) return JsonResponse(tasks, safe=False) diff --git a/infohound_project/settings.py b/infohound_project/settings.py index 63a4eca..6e4520e 100644 --- a/infohound_project/settings.py +++ b/infohound_project/settings.py @@ -134,3 +134,6 @@ #------------- CELERY ------------- CELERY_BROKER_URL = 'redis://redis:6379' CELERY_RESULT_BACKEND = 'redis://redis:6379' + +#------------- OLLAMA ------------- +OLLAMA_URL = 'ollama://ollama:11434' diff --git a/requirements.txt b/requirements.txt index ad4a4b8..d1cc55b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -21,3 +21,4 @@ svgwrite==1.4.3 textract==1.6.5 trio==0.22.0 networkx +langchain From 56f0f8ac108651b0f3fe5b164f64990485f7d0c2 Mon Sep 17 00:00:00 2001 From: Marcos Casado Date: Wed, 31 Jan 2024 18:59:45 +0100 Subject: [PATCH 03/34] FIX socialIcons Frontend --- infohound/static/infohound/js/index.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/infohound/static/infohound/js/index.js b/infohound/static/infohound/js/index.js index cbd2b7d..84f9557 100644 --- a/infohound/static/infohound/js/index.js +++ b/infohound/static/infohound/js/index.js @@ -208,7 +208,7 @@ function loadPeople() {

- + ${socialIcons}
${person.id} From 1e796f1b4282e0b5ed87406ba366eeb38d7e06f6 Mon Sep 17 00:00:00 2001 From: Xavier Marrugat Date: Tue, 6 Feb 2024 12:55:56 +0100 Subject: [PATCH 04/34] Delete OPENAI API Key parameter --- infohound/infohound_config.sample.py | 1 - 1 file changed, 1 deletion(-) diff --git a/infohound/infohound_config.sample.py b/infohound/infohound_config.sample.py index 09cbd3f..f202376 100644 --- a/infohound/infohound_config.sample.py +++ b/infohound/infohound_config.sample.py @@ -14,4 +14,3 @@ LEAK_LOOKUP_KEY = "" GOOGLE_API_KEY = "" GOOGLE_ID = "" -OPENAI_API_KEY = "" From baf16c580f84dde462056965fe57abca939517af Mon Sep 17 00:00:00 2001 From: Xavier Marrugat Date: Tue, 6 Feb 2024 13:02:48 +0100 Subject: [PATCH 05/34] Update utils.py --- infohound/utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/infohound/utils.py b/infohound/utils.py index 5688b01..e4bead2 100644 --- a/infohound/utils.py +++ b/infohound/utils.py @@ -11,13 +11,13 @@ def load_tasks(domain_id): tasks = [ # RETRIEVAL - {"name_id":"getWhoisInfoTask","name":"Get Whois Information", "description":"Get revelant information from Whois register.", "type":"Retrieve"}, + {"name_id":"getWhoisInfoTask","name":"Get Whois Information", "description":"Get relevant information from Whois register.", "type":"Retrieve"}, {"name_id":"getDNSRecordsTask","name":"Get DNS Records", "description":"This task queries the DNS.", "type":"Retrieve"}, {"name_id":"getSubdomainsTask","name":"Get Subdomains", "description":"This task uses Alienvault OTX API, CRT.sh and HackerTarget as data sources to discover cached subdomains.", "type":"Retrieve"}, {"name_id":"getSubdomainsFromURLSTask","name":"Get Subdomains from URLs", "description":"Once some tasks have been performed, the URLs table will have a lot of entries. This task will check all the URLS in order to find new subdomains.", "type":"Retrieve"}, {"name_id":"getURLsTask","name":"Get URLs", "description":"It searches all URLs cached by Wayback Machine and saves them into the database. This will later help to discover other data entities like files or subdomains.", "type":"Retrieve"}, {"name_id":"getFilesFromURLsTask","name":"Get Files from URLs", "description":"It loops through the URLs database table in order to find files and store them to the Files database. The files that will be retrieved are: doc, docx, ppt, pptx, pps, ppsx, xls, xlsx, odt, ods, odg, odp, sxw, sxc, sxi, pdf, wpd, svg, indd, rdp, ica, zip, rar", "type":"Retrieve"}, - {"name_id":"findEmailsTask","name":"Find Emails", "description":"It lookes for emails using queries to Google and Bing.", "type":"Retrieve"}, + {"name_id":"findEmailsTask","name":"Find Emails", "description":"It looks for emails using queries to Google and Bing.", "type":"Retrieve"}, {"name_id":"findSocialProfilesByEmailTask","name":"Find People from Emails", "description":"Once some emails have been found it can be useful to discover the person behind them. Also, it finds usernames from that people.", "type":"Retrieve"}, {"name_id":"findEmailsFromURLsTask","name":"Find Emails From URLs", "description":"Sometimes, the discoverd URLs can contain sentive information. This tasks retrive all the emails from URL paths.", "type":"Retrieve"}, {"name_id":"executeDorksTask","name":"Execute Dorks", "description":"It will execute the dorks defined in the dorks folder. Remember to grup the dorks by categories (filename) so you can later understand the objectives of the dorks.", "type":"Retrieve"}, From f55dca5756b6d0692d03e0f72cce22737f84d076 Mon Sep 17 00:00:00 2001 From: Xavier Marrugat Date: Tue, 6 Feb 2024 13:11:23 +0100 Subject: [PATCH 06/34] Update ollama.py --- infohound/tool/ai_assistant/ollama.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/infohound/tool/ai_assistant/ollama.py b/infohound/tool/ai_assistant/ollama.py index 0b7780a..c088144 100644 --- a/infohound/tool/ai_assistant/ollama.py +++ b/infohound/tool/ai_assistant/ollama.py @@ -1,4 +1,4 @@ -from langchain.llms import Ollama +from langchain_community.llms import Ollama from infohound_project import settings def ollama_flexible_prompt(in_prompt): @@ -12,4 +12,4 @@ def ollama_flexible_prompt(in_prompt): return res except Exception as e: print(f"Error en la llamada a Ollama: {e}") - return None \ No newline at end of file + return None From 445602ff8d363afd921a7a73e85caf101dd59399 Mon Sep 17 00:00:00 2001 From: Xavier Marrugat Date: Tue, 6 Feb 2024 13:11:44 +0100 Subject: [PATCH 07/34] Update requirements.txt --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index d1cc55b..6e6e132 100644 --- a/requirements.txt +++ b/requirements.txt @@ -21,4 +21,4 @@ svgwrite==1.4.3 textract==1.6.5 trio==0.22.0 networkx -langchain +langchain_community From f28aa2e0e9b1e1bf6406e86c3512102bb828979f Mon Sep 17 00:00:00 2001 From: Xavier Marrugat Date: Tue, 6 Feb 2024 13:18:18 +0100 Subject: [PATCH 08/34] Update views.py --- infohound/views.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/infohound/views.py b/infohound/views.py index bc5dd9e..a51bcbb 100644 --- a/infohound/views.py +++ b/infohound/views.py @@ -11,7 +11,6 @@ from infohound.models import Domain, People, Files, Emails, Subdomains, URLs, Dorks, Results, Usernames, Tasks import infohound.tasks import infohound.utils -import infohound.infohound_config from django.utils import timezone import infohound.tool.retriever_modules.dorks as dorks from django.db import IntegrityError @@ -417,4 +416,4 @@ def export_all_to_CSV(request, domain_id): return JsonResponse(data, status=200) - \ No newline at end of file + From fcc173717c0942d21c23f60bb8e6295d81209848 Mon Sep 17 00:00:00 2001 From: Xavier Marrugat Date: Tue, 6 Feb 2024 13:20:27 +0100 Subject: [PATCH 09/34] Update docker-compose.yml --- docker-compose.yml | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 0c71192..ddaf99f 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -56,15 +56,16 @@ services: image: ollama/ollama:latest ports: - '11434:11434' - environment: - - gpus=all - deploy: - resources: - reservations: - devices: - - driver: nvidia - count: 1 - capabilities: [gpu] + # Uncomment if you want to use GPU. More info: https://ollama.ai/blog/ollama-is-now-available-as-an-official-docker-image + #environment: + # - gpus=all + #deploy: + # resources: + # reservations: + # devices: + # - driver: nvidia + # count: 1 + # capabilities: [gpu] volumes: postgres_data: From 0e5261bf8c48fcaad0d9846893c7f7e58f92675a Mon Sep 17 00:00:00 2001 From: Xavier Marrugat Date: Tue, 6 Feb 2024 13:50:46 +0100 Subject: [PATCH 10/34] Update models.py --- infohound/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/infohound/models.py b/infohound/models.py index 5eb5f07..eb115a7 100644 --- a/infohound/models.py +++ b/infohound/models.py @@ -11,10 +11,10 @@ class People(models.Model): name = models.CharField(max_length=255) phones = models.JSONField(default=list, null=True) social_profiles = models.JSONField(default=list) - source = models.CharField(max_length=255) ocupation_summary = models.TextField(default="This profile doesn't have a description yet. You can use the profile analysis task to employ an AI-powered tool that examines the metadata and creates a description for you.") raw_metadata = models.TextField (default=None) url_img = models.TextField(default="https://static.thenounproject.com/png/994628-200.png") + source = models.CharField(max_length=255) domain = models.ForeignKey(Domain, on_delete=models.CASCADE) # TO-DO: change spoofable to allow 3 states From 7297e6c5ce974f8e06b0674d5ff95e6387198d6c Mon Sep 17 00:00:00 2001 From: Xavier Marrugat Date: Tue, 6 Feb 2024 14:12:08 +0100 Subject: [PATCH 11/34] Update people.py --- infohound/tool/retriever_modules/people.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/infohound/tool/retriever_modules/people.py b/infohound/tool/retriever_modules/people.py index 4a8e749..2208ef0 100644 --- a/infohound/tool/retriever_modules/people.py +++ b/infohound/tool/retriever_modules/people.py @@ -13,7 +13,7 @@ def findPeopleFromGoogle(domain_id): results = google_data.discoverPeople(query) for result in results: try: - People.objects.get_or_create(name=result[0], social_profiles=results[1], raw_metadata=result[2], url_img=result[3], source="Google", domain_id=domain_id) + People.objects.get_or_create(name=result[0], social_profiles=result[1], raw_metadata=result[2], url_img=result[3], source="Google", domain_id=domain_id) except IntegrityError as e: pass From 970dd8cc05901aec926cc398b5ed98bfbaa2adea Mon Sep 17 00:00:00 2001 From: Xavier Marrugat Date: Tue, 6 Feb 2024 14:18:57 +0100 Subject: [PATCH 12/34] Update google_data.py --- infohound/tool/data_sources/google_data.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/infohound/tool/data_sources/google_data.py b/infohound/tool/data_sources/google_data.py index cb9ef4c..b914e02 100644 --- a/infohound/tool/data_sources/google_data.py +++ b/infohound/tool/data_sources/google_data.py @@ -83,9 +83,9 @@ def discoverPeople (query): print("Added: " + name) total_gathered = total_gathered + 1 except KeyError as e: - print(f"Error: La clave '{e.args[0]}' no existe en la estructura de datos.") + print(f"Error: The key '{e.args[0]}' is not present in the results.") except Exception as e: - print(f"Error inesperado: {str(e)}") + print(f"Unexpected error: {str(e)}") else: results = False start = start + 10 @@ -222,4 +222,4 @@ def discoverSocialMediaByDorks(domain,email): return data - \ No newline at end of file + From bdc65c9d75fbd1d956ac13d11b397cefdaab58e0 Mon Sep 17 00:00:00 2001 From: Xavier Marrugat Date: Tue, 6 Feb 2024 15:02:54 +0100 Subject: [PATCH 13/34] Update index.js --- infohound/static/infohound/js/index.js | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/infohound/static/infohound/js/index.js b/infohound/static/infohound/js/index.js index 84f9557..9bf63b7 100644 --- a/infohound/static/infohound/js/index.js +++ b/infohound/static/infohound/js/index.js @@ -210,9 +210,7 @@ function loadPeople() {
${socialIcons}
-
- ${person.id} -
+
${person.id}
From c04ad6f256d2d31c941f2456e1b4aa3808a9d0d9 Mon Sep 17 00:00:00 2001 From: Xavier Marrugat Date: Tue, 6 Feb 2024 15:13:19 +0100 Subject: [PATCH 14/34] Update people.py --- infohound/tool/retriever_modules/people.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/infohound/tool/retriever_modules/people.py b/infohound/tool/retriever_modules/people.py index 2208ef0..959fbbe 100644 --- a/infohound/tool/retriever_modules/people.py +++ b/infohound/tool/retriever_modules/people.py @@ -13,7 +13,7 @@ def findPeopleFromGoogle(domain_id): results = google_data.discoverPeople(query) for result in results: try: - People.objects.get_or_create(name=result[0], social_profiles=result[1], raw_metadata=result[2], url_img=result[3], source="Google", domain_id=domain_id) + People.objects.get_or_create(name=result[0], social_profiles=[result[1]], raw_metadata=result[2], url_img=result[3], source="Google", domain_id=domain_id) except IntegrityError as e: pass From 7c5e22cdf476b2833634e5d5bca8fab73a2e9880 Mon Sep 17 00:00:00 2001 From: Xavier Marrugat Date: Tue, 6 Feb 2024 15:48:41 +0100 Subject: [PATCH 15/34] Update people.py --- infohound/tool/retriever_modules/people.py | 135 ++++++++------------- 1 file changed, 52 insertions(+), 83 deletions(-) diff --git a/infohound/tool/retriever_modules/people.py b/infohound/tool/retriever_modules/people.py index 959fbbe..b810bb0 100644 --- a/infohound/tool/retriever_modules/people.py +++ b/infohound/tool/retriever_modules/people.py @@ -7,88 +7,57 @@ def findPeopleFromGoogle(domain_id): - domain = Domain.objects.get(id=domain_id).domain - company = domain.split(".")[0] - query = f'intitle:"{company}" site:"linkedin.com"' - results = google_data.discoverPeople(query) - for result in results: - try: - People.objects.get_or_create(name=result[0], social_profiles=[result[1]], raw_metadata=result[2], url_img=result[3], source="Google", domain_id=domain_id) - except IntegrityError as e: - pass + domain = Domain.objects.get(id=domain_id).domain + company = domain.split(".")[0] + query = f'intitle:"{company}" site:"linkedin.com"' + results = google_data.discoverPeople(query) + for result in results: + try: + p, created = People.objects.get_or_create(name=result[0], social_profiles=[result[1]], raw_metadata=result[2], url_img=result[3], source="Google", domain_id=domain_id) + if result[1]: + username = result[1].split('/')[4] + username = re.split('-\\d+', username)[0] + u, created = Usernames.objects.get_or_create(people=p, username=username, source="Google", domain_id=domain_id) + except IntegrityError as e: + pass def findSocialProfilesByEmail(domain_id): - queryset = Emails.objects.filter(people_id__isnull=True, domain_id=domain_id) - domain = Domain.objects.get(id=domain_id).domain - for entry in queryset.iterator(): - usernames_data = [] - email = entry.email - print("Testing: " + email) - - # TO-DO: check if Bing works - #for l in bing_data.discoverSocialMedia(domain,email): - # if l not in data: - # data.append(l) - - results = google_data.discoverSocialMediaByDorks(domain,email) - if results["links"] != []: - for link in results["links"]: - try: - username = link.split("/")[-1] - if "linkedin" in link: - username = re.split('-\\d+', username)[0] - u, created = Usernames.objects.get_or_create(username=username, source="Google", domain_id=domain_id) - if created: - usernames_data.append(username) - except IntegrityError as e: - pass - - try: - p, created = People.objects.get_or_create(name=results["name"], social_profiles=results["links"], source="Google", domain_id=domain_id) - except IntegrityError as e: - pass - - try: - u, created = Usernames.objects.get_or_create(username=email.split("@")[0], source="Google", domain_id=domain_id) - usernames_data.append(email.split("@")[0]) - except IntegrityError as e: - pass - - Emails.objects.filter(email=email, domain_id=domain_id).update(people=p) - Usernames.objects.filter(username__in=usernames_data, domain_id=domain_id).update(people=p) - - - -# LEGACY FUNCTION -""" -def findSocialProfilesByEmail(domain_id): - queryset = Emails.objects.filter(people_id__isnull=True, domain_id=domain_id) - domain = Domain.objects.get(id=domain_id).domain - for entry in queryset.iterator(): - usernames_data = [] - email = entry.email - print("Testing: " + email) - - # TO-DO: check if Bing works - #for l in bing_data.discoverSocialMedia(domain,email): - # if l not in data: - # data.append(l) - - results = google_data.discoverSocialMedia(domain,email) - if results["links"] != []: - for link in results["links"]: - username = Usernames(username=link.split("/")[-1], source="Google", domain_id=domain_id) - usernames_data.append(username) - try: - p, created = People.objects.get_or_create(name=results["name"], social_profiles=results["links"], source="Google", domain_id=domain_id) - except IntegrityError as e: - pass - - Emails.objects.filter(email=email).update(people=p) - try: - for cred in usernames_data: - cred.people = p - Usernames.objects.bulk_create(usernames_data) - except IntegrityError as e: - pass -""" + queryset = Emails.objects.filter(people_id__isnull=True, domain_id=domain_id) + domain = Domain.objects.get(id=domain_id).domain + for entry in queryset.iterator(): + usernames_data = [] + email = entry.email + print("Testing: " + email) + + # TO-DO: check if Bing works + #for l in bing_data.discoverSocialMedia(domain,email): + # if l not in data: + # data.append(l) + + results = google_data.discoverSocialMediaByDorks(domain,email) + if results["links"] != []: + for link in results["links"]: + try: + username = link.split("/")[-1] + if "linkedin" in link: + username = link.split("/")[4] + username = re.split('-\\d+', username)[0] + u, created = Usernames.objects.get_or_create(username=username, source="Google", domain_id=domain_id) + if created: + usernames_data.append(username) + except IntegrityError as e: + pass + + try: + p, created = People.objects.get_or_create(name=results["name"], social_profiles=results["links"], source="Google", domain_id=domain_id) + except IntegrityError as e: + pass + + try: + u, created = Usernames.objects.get_or_create(username=email.split("@")[0], source="Google", domain_id=domain_id) + usernames_data.append(email.split("@")[0]) + except IntegrityError as e: + pass + + Emails.objects.filter(email=email, domain_id=domain_id).update(people=p) + Usernames.objects.filter(username__in=usernames_data, domain_id=domain_id).update(people=p) From 7f736d389ae8ecc71fa827ab21bcf3eaec5a1503 Mon Sep 17 00:00:00 2001 From: Xavier Marrugat Date: Tue, 6 Feb 2024 16:21:06 +0100 Subject: [PATCH 16/34] Update people_analisys.py --- infohound/tool/analysis_modules/people_analisys.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/infohound/tool/analysis_modules/people_analisys.py b/infohound/tool/analysis_modules/people_analisys.py index 121d798..1d48e6d 100644 --- a/infohound/tool/analysis_modules/people_analisys.py +++ b/infohound/tool/analysis_modules/people_analisys.py @@ -14,4 +14,4 @@ def summarize_profile(domain_id): print ("Summary: " +entry.ocupation_summary) entry.save() except Exception as e: - print(f"Error inesperado: {str(e)}") \ No newline at end of file + print(f"Unexpected error: {str(e)}") From 614d15205cfbfdcaab77e7ddd58352ec26e1deaf Mon Sep 17 00:00:00 2001 From: Xavier Marrugat Date: Tue, 6 Feb 2024 16:22:02 +0100 Subject: [PATCH 17/34] Update ollama.py --- infohound/tool/ai_assistant/ollama.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/infohound/tool/ai_assistant/ollama.py b/infohound/tool/ai_assistant/ollama.py index c088144..eeaab85 100644 --- a/infohound/tool/ai_assistant/ollama.py +++ b/infohound/tool/ai_assistant/ollama.py @@ -1,11 +1,8 @@ from langchain_community.llms import Ollama -from infohound_project import settings +from infohound_project.settings import OLLAMA_URL, OLLAMA_MODEL def ollama_flexible_prompt(in_prompt): - BASE_URL = "http://172.26.0.3:11434" - MODEL = "llama2" - - ollama = Ollama(base_url=BASE_URL, model=MODEL) + ollama = Ollama(base_url=OLLAMA_URL, model=OLLAMA_MODEL) try: res = ollama(in_prompt) From fa375aa56d5863a1138866f7efdaf828d6960843 Mon Sep 17 00:00:00 2001 From: Xavier Marrugat Date: Tue, 6 Feb 2024 16:23:00 +0100 Subject: [PATCH 18/34] Update settings.py --- infohound_project/settings.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/infohound_project/settings.py b/infohound_project/settings.py index 6e4520e..89c315d 100644 --- a/infohound_project/settings.py +++ b/infohound_project/settings.py @@ -136,4 +136,5 @@ CELERY_RESULT_BACKEND = 'redis://redis:6379' #------------- OLLAMA ------------- -OLLAMA_URL = 'ollama://ollama:11434' +OLLAMA_URL = 'http://ollama:11434' +OLLAMA_MODEL = 'llama2' From e140edcf95016bebbbda068326ba99a0eaddd861 Mon Sep 17 00:00:00 2001 From: Xavier Marrugat Date: Tue, 6 Feb 2024 16:58:55 +0100 Subject: [PATCH 19/34] Update requirements.txt --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 6e6e132..64d747f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -21,4 +21,4 @@ svgwrite==1.4.3 textract==1.6.5 trio==0.22.0 networkx -langchain_community +ollama From 7123413ec2d0a623906baccb0c085294566c0165 Mon Sep 17 00:00:00 2001 From: Xavier Marrugat Date: Tue, 6 Feb 2024 17:00:42 +0100 Subject: [PATCH 20/34] Update ollama.py --- infohound/tool/ai_assistant/ollama.py | 28 +++++++++++++++++++-------- 1 file changed, 20 insertions(+), 8 deletions(-) diff --git a/infohound/tool/ai_assistant/ollama.py b/infohound/tool/ai_assistant/ollama.py index eeaab85..5994b01 100644 --- a/infohound/tool/ai_assistant/ollama.py +++ b/infohound/tool/ai_assistant/ollama.py @@ -1,12 +1,24 @@ -from langchain_community.llms import Ollama -from infohound_project.settings import OLLAMA_URL, OLLAMA_MODEL +from ollama import Client +from infohound_project.settings import OLLAMA_URL,OLLAMA_MODEL -def ollama_flexible_prompt(in_prompt): - ollama = Ollama(base_url=OLLAMA_URL, model=OLLAMA_MODEL) +def check_or_pull_model(client): + models = client.list() + present = False + for model in models["models"]: + if OLLAMA_MODEL == model["name"].split(":")[0]: + present = True + if not present: + client.pull(OLLAMA_MODEL) +def ollama_flexible_prompt(in_prompt): + client = Client(host=OLLAMA_URL) + check_or_pull_model(client) + desc = None try: - res = ollama(in_prompt) - return res + res = client.generate(model=OLLAMA_MODEL,prompt=in_prompt) except Exception as e: - print(f"Error en la llamada a Ollama: {e}") - return None + print(f"Could not call Ollama instance: {e}") + + if "response" in res: + desc = res["response"].strip() + return desc From cfb2029fe0ede25cf610d2932b91dd3ebf1df2da Mon Sep 17 00:00:00 2001 From: Xavier Marrugat Date: Wed, 7 Feb 2024 09:57:45 +0100 Subject: [PATCH 21/34] Update people_analisys.py --- infohound/tool/analysis_modules/people_analisys.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/infohound/tool/analysis_modules/people_analisys.py b/infohound/tool/analysis_modules/people_analisys.py index 1d48e6d..6dc5789 100644 --- a/infohound/tool/analysis_modules/people_analisys.py +++ b/infohound/tool/analysis_modules/people_analisys.py @@ -3,15 +3,15 @@ from infohound.tool.ai_assistant import ollama def summarize_profile(domain_id): - queryset = People.objects.filter(domain_id=domain_id) - + queryset = People.objects.filter(domain_id=domain_id, ocupation_summary__contains="This profile doesn't have a description yet") + for entry in queryset.iterator(): try: - summarize_prompt = "Summarize the ocupation of the person in just one pharagraph given the following data: " + summarize_prompt = "Summarize the ocupation of the person in just 150 words given the following data: " raw_data = entry.raw_metadata print ("Executing AI-Powered Profile Analisis of: " + entry.name) entry.ocupation_summary = ollama.ollama_flexible_prompt(summarize_prompt + raw_data) print ("Summary: " +entry.ocupation_summary) entry.save() except Exception as e: - print(f"Unexpected error: {str(e)}") + print(f"Error inesperado: {str(e)}") From 3adc7e6dedfdf387960441df946f404201119f16 Mon Sep 17 00:00:00 2001 From: Xavier Marrugat Date: Wed, 7 Feb 2024 12:28:26 +0100 Subject: [PATCH 22/34] Update google_data.py --- infohound/tool/data_sources/google_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/infohound/tool/data_sources/google_data.py b/infohound/tool/data_sources/google_data.py index b914e02..54981f7 100644 --- a/infohound/tool/data_sources/google_data.py +++ b/infohound/tool/data_sources/google_data.py @@ -144,7 +144,7 @@ def discoverSocialMedia(domain,email): scope = email.split("@")[1] url = f"https://www.google.com/search?q='{username}' {scope}" - cookies = {"CONSENT": "YES+srp.gws"} + cookies = {"CONSENT": "YES+","SOCS":"CAISHAgCEhJnd3NfMjAyNDAxMzEtMF9SQzQaAmVzIAEaBgiAkIuuBg"} try: user_agent = infohound_utils.getUserAgents() From 1e6ebc4a74b084b92006acca366334f5f0c68ac6 Mon Sep 17 00:00:00 2001 From: Xavier Marrugat Date: Wed, 7 Feb 2024 12:41:23 +0100 Subject: [PATCH 23/34] Update models.py --- infohound/models.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/infohound/models.py b/infohound/models.py index eb115a7..aa5102a 100644 --- a/infohound/models.py +++ b/infohound/models.py @@ -9,10 +9,10 @@ class Domain(models.Model): class People(models.Model): name = models.CharField(max_length=255) - phones = models.JSONField(default=list, null=True) + phones = models.JSONField(null=True,default=list) social_profiles = models.JSONField(default=list) ocupation_summary = models.TextField(default="This profile doesn't have a description yet. You can use the profile analysis task to employ an AI-powered tool that examines the metadata and creates a description for you.") - raw_metadata = models.TextField (default=None) + raw_metadata = models.TextField(null=True,default=None) url_img = models.TextField(default="https://static.thenounproject.com/png/994628-200.png") source = models.CharField(max_length=255) domain = models.ForeignKey(Domain, on_delete=models.CASCADE) From 75cba21ed208838f29fb71cbf520ac8b61e169e3 Mon Sep 17 00:00:00 2001 From: Xavier Marrugat Date: Wed, 7 Feb 2024 12:42:42 +0100 Subject: [PATCH 24/34] Update people.py --- infohound/tool/retriever_modules/people.py | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/infohound/tool/retriever_modules/people.py b/infohound/tool/retriever_modules/people.py index b810bb0..d0148f2 100644 --- a/infohound/tool/retriever_modules/people.py +++ b/infohound/tool/retriever_modules/people.py @@ -47,17 +47,15 @@ def findSocialProfilesByEmail(domain_id): usernames_data.append(username) except IntegrityError as e: pass - try: p, created = People.objects.get_or_create(name=results["name"], social_profiles=results["links"], source="Google", domain_id=domain_id) + try: + u, created = Usernames.objects.get_or_create(username=email.split("@")[0], source="Google", domain_id=domain_id) + usernames_data.append(email.split("@")[0]) + + Emails.objects.filter(email=email, domain_id=domain_id).update(people=p) + Usernames.objects.filter(username__in=usernames_data, domain_id=domain_id).update(people=p) + except IntegrityError as e: + pass except IntegrityError as e: pass - - try: - u, created = Usernames.objects.get_or_create(username=email.split("@")[0], source="Google", domain_id=domain_id) - usernames_data.append(email.split("@")[0]) - except IntegrityError as e: - pass - - Emails.objects.filter(email=email, domain_id=domain_id).update(people=p) - Usernames.objects.filter(username__in=usernames_data, domain_id=domain_id).update(people=p) From bfff3ad2a3f32b4de936b450038e923279846323 Mon Sep 17 00:00:00 2001 From: Xavier Marrugat Date: Wed, 7 Feb 2024 13:35:33 +0100 Subject: [PATCH 25/34] Update utils.py --- infohound/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/infohound/utils.py b/infohound/utils.py index e4bead2..35575d8 100644 --- a/infohound/utils.py +++ b/infohound/utils.py @@ -22,7 +22,7 @@ def load_tasks(domain_id): {"name_id":"findEmailsFromURLsTask","name":"Find Emails From URLs", "description":"Sometimes, the discoverd URLs can contain sentive information. This tasks retrive all the emails from URL paths.", "type":"Retrieve"}, {"name_id":"executeDorksTask","name":"Execute Dorks", "description":"It will execute the dorks defined in the dorks folder. Remember to grup the dorks by categories (filename) so you can later understand the objectives of the dorks.", "type":"Retrieve"}, {"name_id":"findEmailsFromDorksTask","name":"Find Emails From Dorks", "description":"By default, InfoHound has some dorks defined in order to discover emails. This task will look for them in the results obtained by the execution of the dorks.", "type":"Retrieve"}, - {"name_id":"findPeopleFromGoogleTask","name":"Find People From Google", "description":"Uses the Google JSON API to find people who work in the company asociated to the domain", "type":"Retrieve"}, + {"name_id":"findPeopleFromGoogleTask","name":"Find People From Google", "description":"Uses the Google JSON API to find people who work in the company asociated to the domain.", "type":"Retrieve"}, # ANALYSIS {"name_id":"subdomainTakeOverAnalysisTask","name":"Check Subdomains Take-Over", "description":"It performes some checks to determine if a subdomain can be taken over.", "type":"Analysis"}, {"name_id":"canBeSpoofedTask","name":"Check If Domain Can Be Spoofed", "description":"It checks if a domain, from the emails InfoHound has discovered, can be spoofed. This could be used by attackers to impersonate a person and send emails as hime/her.", "type":"Analysis"}, From b07a8902749bb842229ee823555896e8dde1e66c Mon Sep 17 00:00:00 2001 From: Xavier Marrugat Date: Wed, 7 Feb 2024 13:55:52 +0100 Subject: [PATCH 26/34] Delete infohound_diagram.jpg --- infohound_diagram.jpg | Bin 36635 -> 0 bytes 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 infohound_diagram.jpg diff --git a/infohound_diagram.jpg b/infohound_diagram.jpg deleted file mode 100644 index e16f4fcf6016224611b103223b0857dbf4b7aff2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 36635 zcmce-1z40z+W@?Pfr<(S78ZiXK-~_L?GCngL3F#jS48XtMG*wV#sm>jvEwL+*r$3@nC?#C@};w`X@}~lxcqQ zK{LrSAS4S-!qiH=EQC-R=YaggajWbOoyGj;aFFrf@yMS5r_ZYT*{iWwG)7f>FN_E* z<*JwuC`2?AXTw;Te3*)3Wl@BFq76ljld$Lxm)cKb zNwrpt$0F6h>_U}UrWKL_be@8VS1CCzj)vxe;E56kmSeX;B4?`E5jT8f0ruK+60Cy4|Wf$O)ZY$kObg30|lGA2K zT9HZ}$wpOZ+$y{e!o&a^gi4>=<8$a0cA%m1*@y-&38?u^a1tCUpjc1@ak7$no+k;4f(kwk^(SoD0i z2Bsj>0929zWq^6mW}vEvFcCx*)9n%=pg%xFwHrmIl08BuhhnGbm|B8|h^1lu9GDhB zH?o~VqaVg2;nWzL3+S+8wS1~1PVUC3)dr#vsrA_$A_P^*MPVU;UP%xELrjAk9b~CG zPR7I7%}BZ*Muf&8#2&pD;$uJ<25uZ(jyBufL^K>jwtGbW^(&umXkzDik6y8h4xvp##iH%63Y$ z3_v3|8rY1L!X;`VQ5wgT831ZB(d&`=l}@zVVUq}yJgCB~@G4DIC|BuV#>ru3K40x7 z@!e{v+lo;6Ep)X5?6Om_?$dMER!pz2d-9jVE;IOgne1ZT2ghnVy z4g??^*=(Xy!Dby_=oizR+V)jwBo?6=Npv_hHaSzz12Dx*3J*c?yLd(p&@$jCDxuFS zS7P8~6pTuxaAghyTq+a6sU!-SMMBA~1}}q3AUPdwt%E@}k*R1WmLO2Dp$r=n7yzb| z>KG6YPUi)q%w{BSZ=)wgF?|TJ_J@EL`cj`6x~C$ zx^aFigrqd;`DPSWibt>zW+IGFLm+J`w8CpeQVs3?0@L$taS)MkBscKO zz`OK*ky8j$`gANWoXs@JkO-Ao1S3eea->bDLc)yznp(`XWBFXai*FDUrDOw4s5Dzi z1d&=KZMO-eN}@6=eH^U<3ASr!FrOC?2p{X`!c}rK-jA2bL|PHgX+=5X#FjV~Qp3VA z0Hu&94252Sv#J&d(6Qs+4O_Ai#3DiIsBRNP! zLmb+tBk@E+z1e_eqqI^w%|_-ps93ERZt~L%VyTygQL|hgsSoXT<54h|Tf!4#Bqo9g zU|{FDgcPvQc~>4hKI&O7-~F9gD0RJ1~uJDqjKqXkHX0G(C8u< zPR{pIsB|mKfP_hPaYhl+iv!~%wDwH}0|vK;BMJ)QkjEj+e1(@IFgVma8Xd})L+B8J zk!ta}Tzr*9=n`U-5~o^)qX1J;`dv&f9_sfg+8tYr#+X%1i4})~qEu!b5=924i*v$Z zc)o%JgcPHgW5?K>29AT~;~V8VG+IdFdW|-g*Kgws%{D5M?9_0TKBU>GAXsE_yIEsJ z$=Ng=n}b%!^zHh03i_ zZz1`Re7`f!a;{APuwYvaT9?$Mw~;JryO5$l2$c|-3&%3iJu0V?FA-9aLc2); zW10zSU7ViJQ_`hy1k8pK(b;+xLg|2t?Q}ezX;8UTETx@lvq1?0k(sV%;;AB=lWLaf z6-I$Y;DR8%b^?*B1Z1GqzC|t1u&xV>@5}l6^Wt$~BuvHeJhFWVCRj_fs@!;BQgpaU zD|d_SC}o^atrW)T_V+LcI$kLVZ4%sd9oZ@_yL zdW=f~OhM}7VIeNOl*%OV`6xEdjzUBI9;==LBxyL5mF89Yy$lP@%)(kZ7QfQswMjS( ziAbok(tRdBkdBbFD4HA1S0XSH172k$(*+Pd8SW+ODRRW=$W9iPYlbMLR3yRc zVv?9%IN0MQsXgj;7gTAfc%RF~)ggcw0C{@I6Uq>IG*E;{#HAY`dM?7HmTL591`0tS z0WP8yi4Agf*~Doq4~i|KAX-^xPZ;VP^> zPQ%c`^(>*$t`ph(7`IBx!l;yL9)jwE@-QAE2Eul$fn-X+bC@YGl*RQGX! zz>oDK7;#)ISg#P^z=$|JQpATMRYDz6$})=O5HDOwp_$A!Ia?=y8k8`#Vd@ZOD-uO_uoL_$tk=Z{Y(@$3S}jn& z1A=ubP%af-$`C2zz;=_$rPa7xEGpQ@BoX)|gFp^vG6@W(SxgejtY#b#nQ%-$9f@gU@uf^^h(?Fb1B^<5fSw|G7YR1abZY}(=Xu* z?d_h#paebygCN^6JSo=U=2(cvIH`lBCCb@89YgQMdIemeSisT)%(zNL zoK64%X6xcAT~waHtkMGqOcH~OanP+4n#`}3m9HWl0)!H8flMeK;<|UL?7B_Q&1ryxW%o2@I`#7 zQ7)Hxl{Pg@#4#A~R<%t}miR0XHkPj9qUb7|ngxMbRDP6;Zfnoi3BVs5@HD}t45D2x zP@^DnwHwODp-3vWk7{+vy?ULDA+z#j9)ZaRAOq$D0c*5gARjgAxJ)_P#d2Vcz}0~m zZDLY5Hn5bbWXfa|D^FD*9zyGNdh8{%9A?m4zEaq5)(LTx>xPuihvWZ*Fs^L z?GP!1VWntwd^w%VhQfgJ8VPQr8QEr%iR6Yzj4&Xtr?8P6HU(i8+4usOSxGZEuow!= z?~jwBguu+4Os-TYg$mR^mqg}3uuKHnL!|jN1Z{glqQ*lI8kV#D9)QDxBRp(i5GK3* zbf9*|aR~sv+i8GHeC_d9!=|XA1Q)+OAyKn&93d9$$5?-I_WJM~u-a+H01R+mq)KmQ zaYQJzh5_uCI08dN0S<(~8AhUjV_-IkN~5vKs2&4eN5K(k=!U$GA1?`~VlkV|00@^PH;@r18&PiXsdVl59N=CF zileG!ST)s!U~zC}BUYpW+*1N1E*!4hOIE0uY^hnK5W*<9c8^p-EF>HjN#JM*OiO$6 zEwho`J~12%vys?t6mX$IVrsy6tV-<#wlGrr<%_@FL{(yj8mrQXXlk(+t;G592Dyie z!I?2|;K-?#`_uvy#z_-WB|y~okZp3R%_0*K#n$#<>=K#bNTUSja}bfXcAt}>jbuEP zuZvTfA##`;qo89s?O7&EWyjmJXe?R5P#WPHG!U7EW{to_MdS5knhM8epp{~yS0ymB zL@G2I(|#+VZog!(as5cMn5V~k)hsBUVi#lWOq|3F7jn>W6rKQ48&JToaV!VjL}ppq z^@hcW%^sE$CFe3AZV0*Eh8S{;nk?WV2so=U9471Jy%o2yAx4Q=1E4Nv_9Gi*Dpi+=xBFT%y5Up5~ z8V*R5#6e)8P>PzTb^8Ff1<JPtRSv4>e1p&Y*YA(VA z2JUQ-d=^?Ohq*m+nV@}*G$_oYhU)z^sE*)3d(;*O6ousQ_%aStsfLI)w#`)O{XFEBA)$T`hI22>S7?f5lg6;;c{xEbm3Mqs@fIBUmR?&V>2?Nqm z(qDIzKQ1bP_Mg|TFyi-G&;n40AEhHuI(P2WxpUVp!NFa+_UzgX_~{wmBeVzb3GdUt zcdy?4`;3U_-#=o+=*VG_qaiUdkg1cV#>JTe{;v$a{|*WZ4iE-S3JM4V1%?F#g#~;+ z2SR}YK!Jflz|HaBrBmmQ!9g9m1h!u%w?lpb1#}GR&?&HU@b|l*kRSjW8WakU)j#-~ z_D5ytZ+YpwLddSyKU#rhs&_qaoy%VJ&q)8;3O+kDHBn3Qy|NB^_b4PFZL1*#KEkb)I%Ctz zcijSKSEVnWmLmH!TB-ueFE>>-v5S5dQW zeLO3#+%WU#@ur$@`+Zfr?j1~Ba`|xv&(A~NpIw$%cI8SVbIE_jd}d|EtD%}GzN$ zT0r%mN^0-ag92K(3-A6rE&+@zFbLEgvFxi(Z~ck1KYMnsTvqKhP(VL)7w^9z4EV@b z9sMWR=#~fn)c&ObUNkHlFp6Pk{_IN%FKz>Y`i7ZPenI-R8g`A60_uBbB<%Fh-k}SJ zC4xW!g3L|7V*OGN7~EkA01w~!XJ44&s0J88qW=ZwpQ=Ac_#6Hw{eQ>*Z}`8E|70!D z3@>l5bsk~&HDTHnZs;e$T&WA!Zv1fXvsFv;_|}O00zq_v-gd?i&vj;m&0f*VIN;FPoM(uK?{hZqzjpD_ z2bVzN{?b-`?5<%b**>e%S&};LMCO<+w?Lqt^PB!&F{tw&*C#zV9|Wu)6#OhI7Sy%i zAZVEKHAr4jJo!y=(akyh`OjNj%_{{t7pKoHQ{YAKKB$U<4xnbhj*UMFM3=6+ zuk3ucyQZr<|k3;*7CQ17%055#SgR)-AOwOo3wLoH8zzhGbWu@}ZC5r^upex6NZb=~@G!<;v#@{y5O7iVbpy!d#TfBQWmxm)5ivCkp$ ze`+0i{w#0D6=T_at)hC%)pY~Mb*SC7DXatnTkt|+?fr<*1lND7O%KxS9RF_7w^g8y zpP2@+SVwTXUe)J$a`$x2Sig>|l&ye3!RHtyDIJ`>hJiW?f0_j#lfNt@;A3)1*KG%v zB^MoiJAe6C#LcUct1I2J8#~~`-~Tp6Gj+;^@yuzWlk-2e`tD6dO5ilil5?dauI^qW zFCZ>iw*|?3adqjy*9k{`88UYT0z~^bgI0K#K-E8Uq6DzQ_p-S6wRZ1$``^u9_ga?>lxOdEwLZ z$1bnb6lJlq%Fj3XUugDZ%sSK9Z}z7?x|mOQ(Rnl0ZRoY&X2tvw2P7B%n5eElUx*#Q zv~Avzpg&bcw7_{Q72kLOU3v}I4@ExP=> zwc$N7?+rJJWAeIR97MLyC@f|=KIFEvI|opH7)g8O&+;?gwF5ijhcqALe+|4^_tZ4K znezi4#G&sz7#y}Id?0+KsUmgBw$g$a_}($cx|>hzc+u9 zl$-PH(gz?=??Ef7?yrQ;FTMi$DOV5(pE~u(?j3FSXHwt2y_?k2a10c6@3&ZQXKlW4rj;%jo@&rp5lE$Pi}u82Xce zu^`ZZ!g>dN#gXRu z=mW@DaH^Sk>IMFqvcfqaHgFY`2qXwWEcL`G8pMrvkD@QvH-&G%wGK2U@?JZ)_6Xhi z!nQv5ObB-M^y&SAj?8M!{?lUq-ZF+0Zi2d6i!VKBrvS>cjOU)U+2f*s$wVNOO<8A@ zpx`%sVorlVbJzS37N~1u`E+shzWbLeH?)4+K*-1db%KH?=pX zQsZI(hJUTU-O~jN&@XyFJl-*F_Bz<%*e<&)YnRTC?w~x@v#LEdv}3f3;6E$C7P|@$ zVugT#ow;{O!sQhmouYEelW3xrTG?=fc5UNYYOyjnCHz8c?!4gVonZ}4^8(%2u7_)W z5AIU)pD_dk``4I%g8#F$;aAFkf&VM{uc+HmQGX9w>w07?rTarkFDpxl=E00jEc`zOY~Ji2m@p8mCKY!z1>`)kK9^?++* zp|!uXLFld-hyNWis6$d=x1E3PE=ANlna+r61R3?|_uwDkla}hVH|KP9hYbAHF!GPYW&E-lZmP_Py-! zSJRJQ?r~~YFf2VJ|Vj0T}?MYhx=}wX%{~h$T{>AFW z%sI07?A8E_MX4OoS1$T^8%E|m^}(zm^bIz#uv%GF1ZfJrC*Yi4X4Cdde8e< zO&Yj*V0Gb8{ddr3Om(T{Mu&qXme0VqRPIJq&zRpTLXJ$>J8Jyzi!G(kji-N$-&y)7 zbVBv!OY7`J(A|ftdAq5@`~D#)B^Wqo=l8#L<AWlsY)nQ{)V+CF9- zd#Z7(pD_N4yK&W;h>nB5gDQ(+AGWpjn6UL34WsKY#!5APcyzS3X<6y0NJdp&7;($B zlI7c>F}$$X#Or6~yK-@Kcktt@2*!5G@1-%|Z}}}1+f_r)rHa=OX7oXf##LV@7j z+i>9CXv)P)*6c~@sttF<6-)O|Y(8FG7@v`JI2pC^3^S+q$9du2=>Zc3OHj<@%(GYg z!a9vw_T*U5vP8wRZC~Z`jxEOKenU#0GVY$Rmm(^&3hq7#K5{sp9&q``5QAX+;P3 zcCVdZgGENlP4)dz59PYvWzs*&+@}URZ!qB>bwBL!?G#8!`k|*r@A)U*(WD#=BY)z# zkNoXlLtdAS0^eP1EZ#S8^PYYuPrap3G7hI?ZlTwzK0G{Gx9f6qK6mEeV{`NFjlc6_TUrus%THbVq(9L!bI~AFH4E;r18bI^`JMEsqRduZJV=a$X$F^8|B8}70`%nh?T9q zkAKMTJnE{FvY1qJVEFj;4J{S@bJJ4kCv?W!pGvD2y?D3z=n1X4NIGcG|Ef`xH-FW4 zQ0@)-Szb#>k8ugl!TK!Qcu#y=$nqNx+tQ9E4Zk{a)ya{sxMr1o8sGp?;u>dKWRqXZT14p*E8SXe-GT;vyHgFT11}wGFDVO zRda)0{K0fa`xNb~*Fhj%ZI>K7j)-HOXLt)IE~3|++9OzBix|0*tK zyJO}AR)(^1Tr+dx@qLUDMSBY=h>MtRk1k|QU8ekfDC)wcJC=;vft>D9`6V2A7(D7j zaK+2Ap56Mly#J@wZ-sFVVaJ}FY|`rtGBe}u9q!a;3kiwG4uz%p& zdFrPni$-^;S%Bf3Ag}%H_=s-5S{IWY^J%qwKH@tG;ix~N; z#pl;XPeRLA+J^GB_E1fj=binqa~&z{6o zu6=fEp?&7ldHuc~Lk0{RmT;Iq+J8Xw@>BD(mHdIFgC~v5Toqar$)Mfa1JTcmu~ZGJ zecOiEq}(Pp>&~6l6594n-x@YCXiJJmd36FhuOzHe0@?p4Y2}gu!D}D(9mQE#d{49( z?SH&A%h;~;6e%M|p*}`q_22fJJoGW{T{wh-e^j|T??0>priVJ}IJeHa{uSfX^0Y)m z(lzwhkdbq5Ghw;b@1U^TJ>LxOv+(7OZsjsdh(rpVIK|!{Mr}+Fn=2ZM3X9o z8VA>PE4p#(`02ePXGW*0er>sSCtM zs!qVhCFBPtQh#w>*eOL@$0;Ru9D6@=C$N{lnE3ql z;%SGcQW|0pu+Jx*t&o#bVwtb!Po|AtpMXV*`gIO}pWzdt`!pv|i|DWS-CJ&5y?e60 z&{KOZ(AkqQgwoJZd~%;FGvp4) z?5;gdA4&g4PlqoiUdS!$dvwA8weV$)IkL_^zTxyqD5COXk88U=TfUAM)Kpt}oJiVV zP}S7jbmZN2Mb5X1ZKSh!!YJ64PDhr}-)<_a2zIvwzR|b#I=lumDbN?eT~v`kOuRlB z#d?MasZ(;bi%J8kpzM8RgS)2notjZ<|0g#dNPPZgHhKEg(5D|dUl@Afk)56D{VnRN z`_MVUz1+PGV+>D+$bK{@f6hYK0$6BMjxC10AeCY^WGK9`vbn+fr&UHq53L1#`}z>IWV z4_LFb+gRQZ(Jf2v#-t;vyWk;=E4}(}aW6*hicWF_FM490vu@y`GXuJWOOBj1_0i>g zywA|Aj=%YoJpBz$)45K3=sTziaugAtw-26f_u}4Bkoyd|{fyDux27!aG|&;7f0#36 zXSich|HJ6wTUjNpF1(`^oORzO){*G1&#ELwP_%18m}!>VIW4jNx8j9jQsdeszI(MH z$^4x)#Dibsx268Ee^&qKuw$Q+eD@x1eH%gkecU$h!pn^Hs=&>ny}ugRuY1F%-XGl& zpLjbi=7xv(8!oEeEBDtn9i+`Fxi_!wNb$_`dl@A!-z%2{7ws<%{zFoiL%_<>fbm%d9cEk{Sp&(B<>#ZiP==b8`r)E4Ib_VylbV0K*Z`Czi zP|2iFbaT%HMfx&#^}72?Lt6Gh;yirzod%?dQ;;-#_SN294vtz^&-I!-g@W z_btwa_xhwhs|jn}KN4iqL~OXXP6|$&LKvTZF?Zsv;jLT0?s;eW4%()0Cf>_?z?ge* za%g(|q1!Cp%$I3|K{JNcE$K7TKDR^WL$m41M381~J8Vnjh^D;_ z-MZJ2BF;-i&Ag8j&6_gk3>_~w-uw|j>jTfKc3vWcVn=l@WPfZU$=bu z4q86$$Zy)^m%}yP@!18FU7eCG)}++pC65wTy&XPtVmHmIvV=~F{RhOQ?mGyLG_4>H zY*?J#E#O-Gbq%9UTQc|5=xOSjIj_=OV>{k=_b)E#AcKugy`dQpomBU5>^o+P z+}PDUwVU2W9MBxpe`};0+C?qpjd)7S2ENfW!tYzfN;$LjOVicbl+MwzV?lds`sXF- z{&*{m`*y4ChmD@RydM>5j?8{oclY_;oG*wbb{3;nCNhoWZXURP3{SJJ>x}87{ZpGE zfzHJ>jz5|<+}@dZe#oj#f!x=ksO5E!C+)nvf#oY1dGC&G^w`9<9fpacC$Cjzqo$aH zwkbEd|90Y4XV&csm{|z8-+s=2X<6a~r_gFJ_k9xN6>zS{4r=2U-l-MutWyKZV-CTYw`c&C>&=|zD zH|gQ=({i2MtwTRnXKHw!@ZWrNma-#sY#{NZUOhkK;2cIV*U{ITQKmBVKZ<&0Bt zv2EwBTw2x=IinstV|z|R#nG>GR>=0}G7PQQG`DMqaZ4d1@E;h_TczL9Q{OY@-X2mO9rSTa=>$IVTJgpS%29-hgF&Z* z-dqcM*1Pa|zcV$xmahmOGA`jDZ}lzu+V{UV#5{S#H1(+3{3JazE==`VuMS7tysXPLe~wjY0e zBhjlXnhd|aVW43^t~vf>f_qy2(fYJqYyS|bOBIj*;eyv2Tj!xj56hA^4c$py_P)s# zJ|Ft5uAu4a_;=GIs0m^6XkSldTKtUx7a@(^0>kIo%r###q_9xSDr%2wSVvWP{;NmF zX4jI}@iw15oRU6@zpt-T7oYNcz;E~dJ_&5NGLIyvNqtjISXqE!Q)A@B_K0@7%oCn5R;0)q`DKVAqwn@sPyPhmU3Ztg`SD@az}g#60+XuXyZ&W0 z8jLAFuLvvbI0OW}(tyjIv?ixK{>G7))#QYU!W5n8$*lRGC$$RzzUtWY7{a3Z{V^LZ z_kI$$JL>Kpo_fsrH>SI8Fj0`X>lY;~!j>RSyL)Oysa!Yq z`kQT~+Z0W8J&wkQu5K*My0iMCWq;Y|7S*Y~*(DFynya}?=P@SYM$=!(&fpTv*)K^W z=Khg6zzs*8+qrKGQ5(Yb-@H^|THavL>glw)h?=eiP9r|L!n|a45%%dF-0{z&L>pCI z-!^SdoSiZD>i&hk_ne}sm3{wPkWdm^R>Cf59jQc`YxgYg*`8E>V-H%LWH=Lf8YOP> z(ederu>n-jtXvgwUi4f=Wo{*kP|kfgFuSZM&W(fwK212b-IctFKzW5T5sCqfnv1z*_eSveK8<@dXDBO`(!;@7*5K$eXjIZ>Ih zXhMYFj9*%OVqnM!$m)Rk^W;sFSN|0!b+6~)uRTpaIQ;2w9G>~GXj!+;)0NQHowV_! zf+Jbs2X=q{awECB`vrE-7s4d|_gZ!GpSx$28=C!e4GdGJ3~ofVDCZcW3zL{#5%-e&YXGOm;fi;^r<9c^5h zH8tnOv#*@al=D;1uJMc1j|(dw2?iv(H#FSXp$xv2T~5JnsNgZ#sIjARTdsZ`G2(L& zG_WOTo@{;H--{XlB<|3R@W-x%qN$9PpY|Cx&dBP))Xh0dUVu1I)pOhNiWw9BxF9aL z9FZ-*FjRSJRPYA+rxRyd$dMhlBu{!Wgvu~clboFLRiPULR?QmmemobvVtjUya3SSl zTYPKDZ*!;I{?9%)%$_ExZmAmn!e0AUdTBXwP|?+_^s+O(X&ueowt~J~ol63XxVy1IW5m%-~g#@0*jojtrocXw*W6-=MdlNnXKFZZE>p&?r{w(1(9xe0%T- zyq&f2N!#Re*PUMZRwDcOv~z<0)#3F&J?1h(TjkJF;>+3jkWF*#d;0CIf+6tVx+mf? z##V3_`nqz8hST~sjK&>bI7+lCziUgM&Hn*(aK&ZOocTKs=!Zq_e)nwZ=}RfcQy#3- zprU@c(K&ei`qhXg$e8@m64TYn$5dq%0U6$|9{)m+=Fg{%xfyru4`Hebh%tx-v1F|1eW)>omL5h+-?4q`w^xb&unWFFkvrO(aCA<4}Ta{w#Fm3p< z8b|g_?DJeYXTRD!=`p2)Gw-N=EVu)9W|Hyt(zw%sEiIu>mAAq{6Vk|Q;d{}b#6(;~Q_hyt9p6;6 zY+qY0@7A@n4`MBXn9}W0`eWwUG*G{+Rf{K${MPn&^8KOn$Ett$&$MgX5;yj{2?(eM zaM_spmnS+t&;Q!>-;%6Yop?NUY4oZWufmoEWV}75e7&^6CY)2YF0U9`R`y~-UsL5J zaQU6A?316X&3)g0oraqSyPslvMn*xd$$4&O=!``o;n4ilDt4&#gspS8+y94$vaU~D z;n8RJ=dQ>kB^!p1e!3KvfFnosH?)OKH9T4>*tfNfYAy3_I<#&8u4CMSF0iz1UxT;V zHXWW_w)Fm#v9mPpqf-#y7JmJ-X?nKd;^}Ua{#e>`=i_Nx1}=aOKA^q-WFB@{nydW! zsAzt+K8eQtX3NMN6PDazPd1L4He^j7O8wT+j68M7uA5I6^)9JjgYLGOe-u=2U)?dj zT4lb_n)&9^xCBQxQN-`J&Vvuy{=aE(PClNV#($PRAH3oubi4E;D3=$_;T~c+U^9V3hD6_3u zyY0ZazOgv&Oy#`vj7%7ZC!f3ddKBEg|4cZ7uX1cDRzwq=uK5cN{(ke%w|iZtfxhjW zxqH)UHW;LDt*MJ*aoAlTHKoT|E~Yt|%5jr6e1;cLzdjWilOE|`u3T@LUi>z7&!MTd zgYNTJkb7%BmEWozioSbH@pRDt07bIA_pJ{0jKWFG4)Yrn*QX79)<;sh?OO5uYmD_* z2Yl@#-+MS@DHpRO^4NQ&_R$G{*XHN1G8lh!8F)8jMb)F?>Tyx5^o5SD;h;vCdCjeg zft!aDX2zxt6>UG#XI+IUTg@pLD2c*Noc4NF$(j(@_3VB-;lC57T3UGz-qV|2)f6I@ z_k0z(FeI&h*mPs2bf8V8yAV|*xf$678{>L7_P(}J^{A4U^Cm};Xt9Ja@3G;(A@l( zOR=0zE!67D$4`Z?p!|%YSN96jFP4RN$a<+!fxNW&n-8asR?7o-4!ZE$&hExcxl(o< zy=&Nv`=mp7*ub>U1MO?SKF}ZEH4~gjUA6e(ls|s=eP~p)6ow5t3-2`LJ80PKbBzPc zE563vS-4rz(mAO3J813KM}IDIr__KPNtwBHRtt4_*WIXht6dS1PYsJNGz_Y*tU%|^ za*@ zGF?#+dc@OVWpV}R$t}a$n5qZRGumO%Lyy0RTNk#Ql6hzWvrjq!)x|`3v&pjd)AVQ` z^i4H1v66l4?$J6@{CyH4@oU!p=B3W^;@i=EPQ`b=6(1fc;r68juBoKIqTBjDdjUUk z0CTL@oq3YpIR}iC`p_v4+x8@XxwSs}g1*D(pm#f%SxXnKp0uuRp=(2a%0%0Mew$;I zpKlMGd4p3r&TYFibdBlE;&XR_*N@-cw2gi}EBd|XdLSfs$Cad!oPOAxx*EgMUPUME z@+o0O5mj$57KJ6gB_~dq0}0>Wz-$^P@XdNWf?6;sPcb$!W@+fb$s_OW={*=fS)jZ7 z(%q{(^8S~APbs(0e+Ru;S`>kOQlB_}{uRUGHD5-o{&c3If9u{G>m93h)sNZMZwTvX z6aC4I>7wVpd!ezXuk*fx1`>jvOVkM`_MchwsA|=|i6pXf1Hb#%j2p)`U+*}{IpJlL zzOd*^(W{}G@1S+RvqSWL?xpXb>^_DQc_)V#ZrD}Qf6=#P?|q5!`-`5QXGNl?nGQCk zAM_NUPW36+JF>JQa(Aai=Gx?>PkS1^gPO8kX%}WAR=;aIuRNJ^!T*(W@{s-y3%iT) z#3zt$kyclhoUgbWb|E4qv?rt^>rp}e7{rd;tc*Lyx0SqJSbvgKEnQW1^4p+A&d@pc zDmQ!wNiMlAluJ89ZkI${0g+$e3-V~x=-@y^_Uh1c)SELTIqm|^>Mv(rTv^is?X>sJ znllA!jfvt>^uFULBRYP89BU5QAwZfZzgsHVb38CewrwUI+Tr%>=Bl@+UsUW##0)O} zu(1ssHR%$xaX{(?#`tcplFdPMpj_@Asjq~adw;6Zb<7H5aKU`+j` ztMIDQ*$t~Q)zd~#EL59szU)CxKc?Q$EXsUXf47QUR^X;TFUTsYo)q0$O~2hZcukuj zy7AqC`1rK%px1XU;;+COO^uqd6OQK}Kr9~pWqR!5%IWeL<90LeZ1ABo&uXXbiBFuL zJpbC4u?Kxo3jB~B4XK?WC8|@A9d2FUaQD;g9m#_CyA5AcM_q--tsM1XM(6!AG|82z z)3Rp_QPA&5;?FpMXb^N%*vW79=@Y&Mi8xO1qzIAl{34x89FM>L^ zB?h(}3F0n3F(uvZ9GV_7bK$s}RO-j`n(v^opU=B+-45(X955^|XzQH0jf2U;m?yK- zj@4~_mGi#C9PfuGJ5O@EGghC0#-vkL3>X|?3EAcdB|nSmu0G>!yuN+0GxOyY@9?=( zQ6^TFy9oY!LSFud^trFb+T0w0(%#?S*)kxyx>H)r zyNfZ~8^2wJD@4(y?u%JDzWAltE5CN;M4UZ>8-C1uAZ%!t-Fw-2&DRrA+`TJ;#*TG_ zVqeYQMQ=)T`=)^?4QtX4`%SSMo9fq}Z436knbf^&Ox%a6goCXQfiI0bBYs}@dHLxf zmlo0cU!W}MZ2pXEyVEUyae^v(#_0a3#$L6_L(5Av_MW2h0i=OrFDCkf;C00A6_HM%3rpp^M>&~al znK$ZN{-izMdhN=YJLgOF1oKc#zh_~?x17CaB`=gIiYiWDYsu-WXdaLlKZf)sEk7g+ zxYu97={&YDwRYabC3ik2ryZJLo|9GBHnHZ!k#C8m&l=_LpMLV!Z!KJ2yYX@3|Igq- z2i9pPHYu%#8h@WEdN54?Zq|V(gC1_^(xLe9$L!gsTgMxjx(IJ#x3rC$5+f^oi*GL} z{wAArgn)UuWdlWaK(r!mPv9FMRVpYC4xU((D(HH}+jMu$4t3p*y!;KkE6wNMh|7|? zFNci0A%8SzQ&=S~WWgxRYVoNFZ*SL44VwMAwc9yKPjK6t@M)KR^IREL<73PT%V}9O zqT-9#LO$IP@M+QYkyPu==~ovV7{m6w9`vgI*Lj>usZahA{|krz5yH*fDGFUd^{Vsw zhqN&Y!{Jw3rB?jmM)8@O$9nV54kEknw9G$Su>I_ufLXU2NAIgYo%pV;(ujK9??m$c z-0ojGzMAoW2ELfSwDdvhh+~{j#O>8r#dmU%%CK99T9$ijd)#Y?r+~;)Xb)zUF4;ds z_U>WKoOP^M4^Z{Tke~Y7?)ZB(+*m`!=zES@rfr+q{Ncf?f&1obc6D8OV0s6uXaZ(W z{H#?pYuNdMO@;esO(