-
-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy pathextractor.py
69 lines (64 loc) · 2.43 KB
/
extractor.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
# Extractor Lite
# This script is for example only
# How to create local scrapper for import>xml export>csv
import os
import csv
import uuid
import requests
from datetime import datetime
import xml.etree.ElementTree as ET
# URL for the GHDB XML file
URL = "https://gitlab.com/exploit-database/exploitdb/-/raw/main/ghdb.xml"
# Check if internet connection is available
def is_internet_available():
try:
requests.get("http://github.com/volkansah")
return True
except:
return False
# Download the GHDB XML file if internet is available
def download_xml():
if is_internet_available():
print("Internet connection available. Downloading GHDB XML file...")
response = requests.get(URL)
if response.status_code == 200:
with open("ghdb.xml", "wb") as f:
f.write(response.content)
else:
print(f"Failed to download GHDB XML file. Status code: {response.status_code}")
else:
print("Internet connection not available. Looking for local GHDB XML file...")
if os.path.isfile("ghdb.xml"):
print("Local GHDB XML file found.")
else:
print("Local GHDB XML file not found. Do you want to create a new one? (y/n)")
answer = input()
if answer.lower() == "y":
with open("ghdb.xml", "w") as f:
f.write("<ghdb>\n</ghdb>")
print("New GHDB XML file created.")
else:
print("No GHDB XML file created. Exiting...")
exit()
# Extract the relevant data from the GHDB XML file and write it to a CSV file
def extract_data():
tree = ET.parse("ghdb.xml")
root = tree.getroot()
with open("extractor_data.csv", "w", newline="") as csvfile:
fieldnames = ["id", "name", "query", "date"]
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()
for entry in root.findall("entry"):
query = entry.find("query").text
if "inurl:" in query:
query = query.replace("inurl:", "")
name = entry.find("shortDescription").text
date = datetime.strptime(entry.find("date").text, "%Y-%m-%d")
id = str(uuid.uuid4())
writer.writerow({"id": id, "name": name, "query": query, "date": date})
# Main function
def main():
download_xml()
extract_data()
if __name__ == "__main__":
main()