Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix(graph): de-duplicate dorks and vulnerabilities #188

Merged
merged 4 commits into from
Sep 4, 2024
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
173 changes: 111 additions & 62 deletions web/api/serializers.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from collections import defaultdict
from dashboard.models import *
from django.contrib.humanize.templatetags.humanize import (naturalday,
naturaltime)
naturaltime)
AnonymousWP marked this conversation as resolved.
Show resolved Hide resolved
from django.db.models import F, JSONField, Value
from recon_note.models import *
from reNgine.common_func import *
Expand Down Expand Up @@ -573,11 +574,14 @@ def get_children(self, history):
many=True,
context={'scan_history': history})

processed_subdomains = self.process_subdomains(subdomain_serializer.data)

email = Email.objects.filter(emails__in=scan_history)
email_serializer = VisualiseEmailSerializer(email, many=True)

dork = Dork.objects.filter(dorks__in=scan_history)
dork_serializer = VisualiseDorkSerializer(dork, many=True)
processed_dorks = self.process_dorks(dork_serializer.data)

employee = Employee.objects.filter(employees__in=scan_history)
employee_serializer = VisualiseEmployeeSerializer(employee, many=True)
Expand All @@ -587,69 +591,68 @@ def get_children(self, history):

return_data = []

if subdomain_serializer.data:
if processed_subdomains:
return_data.append({
'description': 'Subdomains',
'children': subdomain_serializer.data})

if email_serializer.data or employee_serializer.data or dork_serializer.data or metainfo:
osint_data = []
if email_serializer.data:
osint_data.append({
'description': 'Emails',
'children': email_serializer.data})
if employee_serializer.data:
osint_data.append({
'description': 'Employees',
'children': employee_serializer.data})
if dork_serializer.data:
osint_data.append({
'description': 'Dorks',
'children': dork_serializer.data})

if metainfo:
metainfo_data = []
usernames = (
metainfo
.annotate(description=F('author'))
.values('description')
.distinct()
.annotate(children=Value([], output_field=JSONField()))
.filter(author__isnull=False)
)

if usernames:
metainfo_data.append({
'description': 'Usernames',
'children': usernames})

software = (
metainfo
.annotate(description=F('producer'))
.values('description')
.distinct()
.annotate(children=Value([], output_field=JSONField()))
.filter(producer__isnull=False)
)

if software:
metainfo_data.append({
'description': 'Software',
'children': software})

os = (
metainfo
.annotate(description=F('os'))
.values('description')
.distinct()
.annotate(children=Value([], output_field=JSONField()))
.filter(os__isnull=False)
)

if os:
metainfo_data.append({
'description': 'OS',
'children': os})
'children': processed_subdomains})

osint_data = []
if email_serializer.data:
osint_data.append({
'description': 'Emails',
'children': email_serializer.data})
if employee_serializer.data:
osint_data.append({
'description': 'Employees',
'children': employee_serializer.data})
if processed_dorks:
osint_data.append({
'description': 'Dorks',
'children': processed_dorks})

if metainfo:
metainfo_data = []
usernames = (
metainfo
.annotate(description=F('author'))
.values('description')
.distinct()
.annotate(children=Value([], output_field=JSONField()))
.filter(author__isnull=False)
)

if usernames:
metainfo_data.append({
'description': 'Usernames',
'children': usernames})

software = (
metainfo
.annotate(description=F('producer'))
.values('description')
.distinct()
.annotate(children=Value([], output_field=JSONField()))
.filter(producer__isnull=False)
)

if software:
metainfo_data.append({
'description': 'Software',
'children': software})

os = (
metainfo
.annotate(description=F('os'))
.values('description')
.distinct()
.annotate(children=Value([], output_field=JSONField()))
.filter(os__isnull=False)
)

if os:
metainfo_data.append({
'description': 'OS',
'children': os})

if metainfo:
osint_data.append({
Expand All @@ -660,8 +663,54 @@ def get_children(self, history):
'description':'OSINT',
'children': osint_data})

if osint_data:
return_data.append({
'description':'OSINT',
'children': osint_data})

return return_data

def process_subdomains(self, subdomains):
for subdomain in subdomains:
if 'children' in subdomain:
vuln_dict = defaultdict(list)
for child in subdomain['children']:
if child.get('description') == 'Vulnerabilities':
for vuln_severity in child['children']:
severity = vuln_severity['description']
for vuln in vuln_severity['children']:
vuln_key = (vuln['description'], severity)
if vuln_key not in vuln_dict:
vuln_dict[vuln_key] = vuln

# Reconstruct vulnerabilities structure without duplicates
new_vuln_structure = []
for severity in ['Critical', 'High', 'Medium', 'Low', 'Informational', 'Unknown']:
severity_vulns = [v for k, v in vuln_dict.items() if k[1] == severity]
if severity_vulns:
new_vuln_structure.append({
'description': severity,
'children': severity_vulns
})

# Replace old structure with new
subdomain['children'] = [child for child in subdomain['children'] if child.get('description') != 'Vulnerabilities']
if new_vuln_structure:
subdomain['children'].append({
'description': 'Vulnerabilities',
'children': new_vuln_structure
})

return subdomains

def process_dorks(self, dorks):
unique_dorks = {}
for dork in dorks:
dork_key = (dork['description'], dork.get('dork_type', ''))
if dork_key not in unique_dorks:
unique_dorks[dork_key] = dork

return list(unique_dorks.values())

class SubdomainChangesSerializer(serializers.ModelSerializer):

Expand Down
53 changes: 43 additions & 10 deletions web/api/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import socket
import subprocess
from ipaddress import IPv4Network
from collections import defaultdict

import requests
import validators
Expand Down Expand Up @@ -1407,16 +1408,48 @@ def get(self, request, format=None):


class VisualiseData(APIView):
def get(self, request, format=None):
req = self.request
scan_id = req.query_params.get('scan_id')
if scan_id:
mitch_data = ScanHistory.objects.filter(id=scan_id)
serializer = VisualiseDataSerializer(mitch_data, many=True)
return Response(serializer.data)
else:
return Response()

def get(self, request, format=None):
req = self.request
scan_id = req.query_params.get('scan_id')
if scan_id:
mitch_data = ScanHistory.objects.filter(id=scan_id)
serializer = VisualiseDataSerializer(mitch_data, many=True)

# Data processing to remove duplicates
processed_data = self.process_visualisation_data(serializer.data)

return Response(processed_data)
else:
return Response()

def process_visualisation_data(self, data):
if not data:
return []

processed_data = data[0] # Assuming there's only one element in data
subdomains = processed_data.get('subdomains', [])

# Use a dictionary to group vulnerabilities by subdomain
vuln_by_subdomain = defaultdict(list)

for subdomain in subdomains:
subdomain_name = subdomain['name']
vulnerabilities = subdomain.get('vulnerabilities', [])

# Group unique vulnerabilities
unique_vulns = {}
for vuln in vulnerabilities:
vuln_key = (vuln['name'], vuln['severity'])
if vuln_key not in unique_vulns:
unique_vulns[vuln_key] = vuln

vuln_by_subdomain[subdomain_name].extend(unique_vulns.values())

# Update subdomains with unique vulnerabilities
for subdomain in subdomains:
subdomain['vulnerabilities'] = vuln_by_subdomain[subdomain['name']]

return processed_data

class ListTechnology(APIView):
def get(self, request, format=None):
Expand Down
17 changes: 14 additions & 3 deletions web/static/custom/mitch.js
AnonymousWP marked this conversation as resolved.
Show resolved Hide resolved
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,15 @@ function visualise_scan_results(scan_id)
$.getJSON(`/api/queryAllScanResultVisualise/?scan_id=${scan_id}&format=json`, function(data) {
$('#visualisation-loader').empty();
$('#visualisation-filter').show();
var treeData = data[0];

// Vérifier si data est un tableau ou un objet
var treeData = Array.isArray(data) ? data[0] : data;

// Vérifier si treeData existe et a des enfants
if (!treeData || !treeData.children || treeData.children.length === 0) {
$('#visualisation-loader').html('<p>Aucune donnée à visualiser.</p>');
return;
}

// Calculate total nodes, max label length
var totalNodes = 0;
Expand All @@ -55,7 +63,10 @@ function visualise_scan_results(scan_id)
var duration = 750;
var root;

var subdomain_count = data[0]['children'][0]['children'].length;
// Trouver le nœud 'Subdomains' dans les enfants
var subdomainsNode = treeData.children.find(child => child.description === 'Subdomains');
var subdomain_count = subdomainsNode ? subdomainsNode.children.length : 0;

// size of the diagram
var viewerWidth = screen_width - 100;
var viewerHeight = screen_height + 500;
Expand Down Expand Up @@ -518,6 +529,6 @@ function visualise_scan_results(scan_id)

}).fail(function(){
$('#visualisation-loader').empty();
$("#visualisation-loader").append(`<h5 class="text-danger">Sorry, could not visualize.</h5>`);
$("#visualisation-loader").append(`<h5 class="text-danger">Désolé, impossible de visualiser.</h5>`);
});;
}
Loading