From 7cd181804cca8cd5d00760c22a03e0ffb875b17e Mon Sep 17 00:00:00 2001 From: Alexandre ZANNI <16578570+noraj@users.noreply.github.com> Date: Sun, 3 Oct 2021 23:54:08 +0200 Subject: [PATCH 001/262] not exposing postgres publicly --- docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index c0601829..c9e60722 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -10,7 +10,7 @@ services: - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} - POSTGRES_PORT=${POSTGRES_PORT} ports: - - "5432:5432" + - "127.0.0.1:5432:5432" volumes: - postgres_data:/var/lib/postgresql/data/ networks: From ccb6065f710560a6aced30876e091695c9b6b6cb Mon Sep 17 00:00:00 2001 From: Alexandre ZANNI <16578570+noraj@users.noreply.github.com> Date: Mon, 4 Oct 2021 00:00:22 +0200 Subject: [PATCH 002/262] do not expose the internal container either --- docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index c9e60722..8063377b 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -100,7 +100,7 @@ services: - tool_config:/root/.config - static_volume:/usr/src/app/staticfiles/ ports: - - "8000:8000" + - "127.0.0.1:8000:8000" depends_on: - db - celery From 90bbbfbed8ae0f611cef17bee93e821c4a60fc4c Mon Sep 17 00:00:00 2001 From: AnonymousWP <50231698+AnonymousWP@users.noreply.github.com> Date: Tue, 18 Apr 2023 10:39:57 +0200 Subject: [PATCH 003/262] ops(migrations): automate migration process --- Makefile | 3 +++ install.sh | 1 + make.bat | 2 ++ 3 files changed, 6 insertions(+) diff --git a/Makefile b/Makefile index 7e266075..930a7886 100644 --- a/Makefile +++ b/Makefile @@ -27,6 +27,9 @@ build: ## Build all services. username: ## Generate Username (Use only after make up). ${COMPOSE_PREFIX_CMD} docker-compose ${COMPOSE_ALL_FILES} exec web python3 manage.py createsuperuser +migrate: ## Apply migrations + ${COMPOSE_PREFIX_CMD} docker-compose ${COMPOSE_ALL_FILES} exec web python3 manage.py migrate + pull: ## Pull Docker images. docker login docker.pkg.github.com ${COMPOSE_PREFIX_CMD} docker-compose ${COMPOSE_ALL_FILES} pull diff --git a/install.sh b/install.sh index 4fe4be57..55feeff1 100755 --- a/install.sh +++ b/install.sh @@ -123,6 +123,7 @@ if [ "${failed}" -eq 0 ]; then echo "Creating an account" echo "#########################################################################" make username + make migrate tput setaf 2 && printf "\n%s\n" "Thank you for installing reNgine, happy recon!!" else diff --git a/make.bat b/make.bat index 0a99cb6c..a1579788 100644 --- a/make.bat +++ b/make.bat @@ -15,6 +15,8 @@ if "%1" == "up" docker-compose %COMPOSE_ALL_FILES% up -d --build %SERVICES% if "%1" == "build" docker-compose %COMPOSE_ALL_FILES% build %SERVICES% :: Generate Username (Use only after make up). if "%1" == "username" docker-compose %COMPOSE_ALL_FILES% exec web python3 manage.py createsuperuser +:: Apply migrations +if "%1" == "migrate" docker-compose %COMPOSE_ALL_FILES% exec web python3 manage.py migrate :: Pull Docker images. if "%1" == "pull" docker login docker.pkg.github.com & docker-compose %COMPOSE_ALL_FILES% pull :: Down all services. From 9c9fdecadcf4e2696e46beeb5f834402c55212bb Mon Sep 17 00:00:00 2001 From: AnonymousWP <50231698+AnonymousWP@users.noreply.github.com> Date: Tue, 18 Apr 2023 11:13:12 +0200 Subject: [PATCH 004/262] fix(migrations): add `sudo` Without sudo, it results in an error --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 930a7886..f0a1a6d1 100644 --- a/Makefile +++ b/Makefile @@ -28,7 +28,7 @@ username: ## Generate Username (Use only after make up). ${COMPOSE_PREFIX_CMD} docker-compose ${COMPOSE_ALL_FILES} exec web python3 manage.py createsuperuser migrate: ## Apply migrations - ${COMPOSE_PREFIX_CMD} docker-compose ${COMPOSE_ALL_FILES} exec web python3 manage.py migrate + ${COMPOSE_PREFIX_CMD} sudo docker-compose ${COMPOSE_ALL_FILES} exec web python3 manage.py migrate pull: ## Pull Docker images. docker login docker.pkg.github.com From 1656539c564a38e3514ffadc1f37b3cbcfcc102e Mon Sep 17 00:00:00 2001 From: AnonymousWP <50231698+AnonymousWP@users.noreply.github.com> Date: Tue, 18 Apr 2023 12:20:46 +0200 Subject: [PATCH 005/262] fix(migrations): revert sudo and use `echo` Using `make migrate` right away did not work, tried multiple things. --- Makefile | 2 +- install.sh | 6 +----- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/Makefile b/Makefile index f0a1a6d1..930a7886 100644 --- a/Makefile +++ b/Makefile @@ -28,7 +28,7 @@ username: ## Generate Username (Use only after make up). ${COMPOSE_PREFIX_CMD} docker-compose ${COMPOSE_ALL_FILES} exec web python3 manage.py createsuperuser migrate: ## Apply migrations - ${COMPOSE_PREFIX_CMD} sudo docker-compose ${COMPOSE_ALL_FILES} exec web python3 manage.py migrate + ${COMPOSE_PREFIX_CMD} docker-compose ${COMPOSE_ALL_FILES} exec web python3 manage.py migrate pull: ## Pull Docker images. docker login docker.pkg.github.com diff --git a/install.sh b/install.sh index 55feeff1..9e7a41c4 100755 --- a/install.sh +++ b/install.sh @@ -60,7 +60,6 @@ else tput setaf 2; echo "Docker installed!!!" fi - echo " " tput setaf 4; echo "#########################################################################" @@ -75,7 +74,6 @@ else tput setaf 2; echo "docker-compose installed!!!" fi - echo " " tput setaf 4; echo "#########################################################################" @@ -105,8 +103,6 @@ else exit 1 fi - - echo " " tput setaf 4; echo "#########################################################################" @@ -123,9 +119,9 @@ if [ "${failed}" -eq 0 ]; then echo "Creating an account" echo "#########################################################################" make username - make migrate tput setaf 2 && printf "\n%s\n" "Thank you for installing reNgine, happy recon!!" + echo "In case you have unapplied migrations (see above in red), run 'make migrate'" else tput setaf 1 && printf "\n%s\n" "reNgine installation failed!!" fi From f51c1737a56ab1378321b4b5df03332a87942703 Mon Sep 17 00:00:00 2001 From: Raynald Date: Wed, 18 Oct 2023 14:10:04 +0200 Subject: [PATCH 006/262] Add stack trace into male logs if DEBUG True --- web/reNgine/settings.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/web/reNgine/settings.py b/web/reNgine/settings.py index 4fb209ac..954fc11a 100644 --- a/web/reNgine/settings.py +++ b/web/reNgine/settings.py @@ -201,6 +201,11 @@ 'version': 1, 'disable_existing_loggers': True, 'handlers': { + 'file': { + 'level': 'ERROR', + 'class': 'logging.FileHandler', + 'filename': 'errors.log', + }, 'null': { 'class': 'logging.NullHandler' }, @@ -240,6 +245,11 @@ } }, 'loggers': { + 'django': { + 'handlers': ['file'], + 'level': 'ERROR' if DEBUG else 'CRITICAL', + 'propagate': True, + }, '': { 'handlers': ['brief'], 'level': 'DEBUG' if DEBUG else 'INFO', From 1c9b379cc858b6b6b1cd0bc1df867b69e0dca8d6 Mon Sep 17 00:00:00 2001 From: Raynald Date: Thu, 19 Oct 2023 10:42:55 +0200 Subject: [PATCH 007/262] Update README with debug procedure --- README.md | 46 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 46 insertions(+) diff --git a/README.md b/README.md index 679c8e13..99ce920f 100644 --- a/README.md +++ b/README.md @@ -412,6 +412,52 @@ You can also [join our Discord channel #development](https://discord.gg/JuhHdHTt ![-----------------------------------------------------](https://raw.githubusercontent.com/andreasbm/readme/master/assets/lines/aqua.png) +### Submitting issues + +You can submit issue to the project but you should do it in a way that helps developers to resolve it as quickly as possible. + +For that, you need to add as much valuable informations as possible. + +You can have this valuable informations by doing this steps : + +- Go to the root of the git cloned project +- Edit `web/entrypoint.sh` and add at the top `export DEBUG=1` +This should give you this result + ```python + #!/bin/bash + + export DEBUG=1 + + python3 manage.py migrate + python3 manage.py runserver 0.0.0.0:8000 + + exec "$@" + ``` +- Restart the web container `docker-compose restart web` +- To deactivate set **DEBUG** to **0** and restart web container again + +Then, with **DEBUG** set to **1**, in the `make logs` output you could see the full stack trace to debug reNgine. + +Example with the tool arsenal version check API bug. +``` +web_1 | File "/usr/local/lib/python3.10/dist-packages/celery/app/task.py", line 411, in __call__ +web_1 | return self.run(*args, **kwargs) +web_1 | TypeError: run_command() got an unexpected keyword argument 'echo' +``` +Now you know the real error is `TypeError: run_command() got an unexpected keyword argument 'echo'` + +And you can post the full stack trace to your newly created issue to help developers to track the root cause of the bug and correct the bug easily + +__Activating debug like this also give you the full stack trace in the browser__ instead of a 500 Error without any details. +So don't forget to open the developer console and check for any XHR request with 500 error. +If there's any check the response of this request to get your detailed error. + + + +Happy issuing ;) + +![-----------------------------------------------------](https://raw.githubusercontent.com/andreasbm/readme/master/assets/lines/aqua.png) + ### First-time Open Source contributors Please note that reNgine is beginner friendly. If you have never done open-source before, we encourage you to do so. **We will be happy and proud of your first PR ever.** From d197511aa10d1cff35975ac8366a3a403876a095 Mon Sep 17 00:00:00 2001 From: Raynald Date: Thu, 19 Oct 2023 12:59:49 +0200 Subject: [PATCH 008/262] Filter by important subdomains in API call --- web/api/views.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/web/api/views.py b/web/api/views.py index 8fbaaa66..b4a101ba 100644 --- a/web/api/views.py +++ b/web/api/views.py @@ -1803,6 +1803,9 @@ def get_queryset(self): subdomains = Subdomain.objects.filter(target_domain__project__slug=project) + if 'is_important' in req.query_params: + subdomains = subdomains.filter(is_important=True) + if target_id: self.queryset = ( subdomains From 619377b2f9cfbc0a2b9012eebccc56a7a81f5b78 Mon Sep 17 00:00:00 2001 From: Raynald Date: Thu, 19 Oct 2023 16:27:17 +0200 Subject: [PATCH 009/262] Add important subdomains button --- web/templates/base/_items/subdomain_tab_content.html | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/web/templates/base/_items/subdomain_tab_content.html b/web/templates/base/_items/subdomain_tab_content.html index 6b7e5808..3c2bb7a2 100644 --- a/web/templates/base/_items/subdomain_tab_content.html +++ b/web/templates/base/_items/subdomain_tab_content.html @@ -93,7 +93,11 @@ - + + + + + From 795ddb5a7c751843e8b4aa119e0f9b02383da6d6 Mon Sep 17 00:00:00 2001 From: Raynald Date: Fri, 20 Oct 2023 00:45:32 +0200 Subject: [PATCH 010/262] Add listener on is_important button --- web/startScan/templates/startScan/detail_scan.html | 4 ++++ web/startScan/templates/startScan/subdomains.html | 4 ++++ web/targetApp/templates/target/summary.html | 4 ++++ 3 files changed, 12 insertions(+) diff --git a/web/startScan/templates/startScan/detail_scan.html b/web/startScan/templates/startScan/detail_scan.html index 5d12d153..155e14aa 100644 --- a/web/startScan/templates/startScan/detail_scan.html +++ b/web/startScan/templates/startScan/detail_scan.html @@ -2418,6 +2418,10 @@

Date: Tue, 24 Oct 2023 11:00:57 +0530 Subject: [PATCH 011/262] Update README.md Co-authored-by: AnonymousWP <50231698+AnonymousWP@users.noreply.github.com> --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 99ce920f..0f3e4d2b 100644 --- a/README.md +++ b/README.md @@ -414,7 +414,7 @@ You can also [join our Discord channel #development](https://discord.gg/JuhHdHTt ### Submitting issues -You can submit issue to the project but you should do it in a way that helps developers to resolve it as quickly as possible. +You can submit issues related to this project, but you should do it in a way that helps developers to resolve it as quickly as possible. For that, you need to add as much valuable informations as possible. From accc7a7b8d517931a0c22c490eae8f06224a7f48 Mon Sep 17 00:00:00 2001 From: Raynald Date: Tue, 24 Oct 2023 17:01:13 +0200 Subject: [PATCH 012/262] README modifications --- README.md | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index 0f3e4d2b..a8e88831 100644 --- a/README.md +++ b/README.md @@ -416,13 +416,14 @@ You can also [join our Discord channel #development](https://discord.gg/JuhHdHTt You can submit issues related to this project, but you should do it in a way that helps developers to resolve it as quickly as possible. -For that, you need to add as much valuable informations as possible. +For that, you need to add as much valuable information as possible. -You can have this valuable informations by doing this steps : +You can have this valuable information by following these steps: - Go to the root of the git cloned project -- Edit `web/entrypoint.sh` and add at the top `export DEBUG=1` +- Edit `web/entrypoint.sh` and add `export DEBUG=1` at the top This should give you this result + ```python #!/bin/bash @@ -433,12 +434,13 @@ This should give you this result exec "$@" ``` -- Restart the web container `docker-compose restart web` -- To deactivate set **DEBUG** to **0** and restart web container again +- Restart the web container: `docker-compose restart web` +- To deactivate, set **DEBUG** to **0** and restart the web container again Then, with **DEBUG** set to **1**, in the `make logs` output you could see the full stack trace to debug reNgine. Example with the tool arsenal version check API bug. + ``` web_1 | File "/usr/local/lib/python3.10/dist-packages/celery/app/task.py", line 411, in __call__ web_1 | return self.run(*args, **kwargs) @@ -448,9 +450,9 @@ Now you know the real error is `TypeError: run_command() got an unexpected keywo And you can post the full stack trace to your newly created issue to help developers to track the root cause of the bug and correct the bug easily -__Activating debug like this also give you the full stack trace in the browser__ instead of a 500 Error without any details. -So don't forget to open the developer console and check for any XHR request with 500 error. -If there's any check the response of this request to get your detailed error. +**Activating debug like this also give you the full stack trace in the browser** instead of an error 500 without any details. +So don't forget to open the developer console and check for any XHR request with error 500. +If there's any, check the response of this request to get your detailed error. From 0db2d2e6c7b83640d9e7a7ee91924b23d3d1a2b8 Mon Sep 17 00:00:00 2001 From: Raynald Date: Sat, 18 Nov 2023 15:50:42 +0100 Subject: [PATCH 013/262] Refactor CSS and add missing div --- web/templates/base/_items/top_nav.html | 1 - web/templates/base/base.html | 328 +++++++++++++------------ 2 files changed, 165 insertions(+), 164 deletions(-) diff --git a/web/templates/base/_items/top_nav.html b/web/templates/base/_items/top_nav.html index 76b59a53..dff468eb 100644 --- a/web/templates/base/_items/top_nav.html +++ b/web/templates/base/_items/top_nav.html @@ -1,6 +1,5 @@ {% load permission_tags %} -
From d6921028cb29bc7cc2f37898d4bc0226c62aa89c Mon Sep 17 00:00:00 2001 From: Anonymoussaurus <50231698+AnonymousWP@users.noreply.github.com> Date: Fri, 26 Jan 2024 13:27:40 +0100 Subject: [PATCH 071/262] chore: update version number to 2.0.3 --- docker-compose.dev.yml | 2 +- docker-compose.yml | 2 +- web/dashboard/templates/dashboard/index.html | 2 +- web/templates/base/_items/top_bar.html | 8 ++++---- web/templates/base/login.html | 2 +- 5 files changed, 8 insertions(+), 8 deletions(-) diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index 57fa28e0..4fbe8e53 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -98,7 +98,7 @@ services: - POSTGRES_HOST=${POSTGRES_HOST} # THIS IS A MUST FOR CHECKING UPDATE, EVERYTIME A COMMIT IS MERGED INTO # MASTER, UPDATE THIS!!! MAJOR.MINOR.PATCH https://semver.org/ - - RENGINE_CURRENT_VERSION='2.0.2' + - RENGINE_CURRENT_VERSION='2.0.3' volumes: - ./web:/usr/src/app - github_repos:/usr/src/github diff --git a/docker-compose.yml b/docker-compose.yml index af374d23..22046955 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -99,7 +99,7 @@ services: - DJANGO_SUPERUSER_PASSWORD=${DJANGO_SUPERUSER_PASSWORD} # THIS IS A MUST FOR CHECKING UPDATE, EVERYTIME A COMMIT IS MERGED INTO # MASTER, UPDATE THIS!!! MAJOR.MINOR.PATCH https://semver.org/ - - RENGINE_CURRENT_VERSION='2.0.2' + - RENGINE_CURRENT_VERSION='2.0.3' volumes: - ./web:/usr/src/app - github_repos:/usr/src/github diff --git a/web/dashboard/templates/dashboard/index.html b/web/dashboard/templates/dashboard/index.html index 6e5e98bd..caa3f3a2 100644 --- a/web/dashboard/templates/dashboard/index.html +++ b/web/dashboard/templates/dashboard/index.html @@ -17,7 +17,7 @@ {% endblock custom_js_css_link %} {% block breadcrumb_title %} -reNgine 2.0.2 +reNgine 2.0.3 {% endblock breadcrumb_title %} {% block main_content %} diff --git a/web/templates/base/_items/top_bar.html b/web/templates/base/_items/top_bar.html index f2addbc4..12c62531 100644 --- a/web/templates/base/_items/top_bar.html +++ b/web/templates/base/_items/top_bar.html @@ -170,18 +170,18 @@
Welcome {{user.get_username}}!
diff --git a/web/templates/base/login.html b/web/templates/base/login.html index 0e999dbe..a1931574 100644 --- a/web/templates/base/login.html +++ b/web/templates/base/login.html @@ -58,7 +58,7 @@

Login to reNgine

-

Current release: v2.0.2

+

Current release: v2.0.3

From f757da33fed77119c400eb1da8616467584fd3be Mon Sep 17 00:00:00 2001 From: Anonymoussaurus <50231698+AnonymousWP@users.noreply.github.com> Date: Fri, 26 Jan 2024 13:27:40 +0100 Subject: [PATCH 078/262] chore: update version number to 2.0.3 --- docker-compose.dev.yml | 2 +- docker-compose.yml | 2 +- web/dashboard/templates/dashboard/index.html | 2 +- web/templates/base/_items/top_bar.html | 8 ++++---- web/templates/base/login.html | 2 +- 5 files changed, 8 insertions(+), 8 deletions(-) diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index 57fa28e0..4fbe8e53 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -98,7 +98,7 @@ services: - POSTGRES_HOST=${POSTGRES_HOST} # THIS IS A MUST FOR CHECKING UPDATE, EVERYTIME A COMMIT IS MERGED INTO # MASTER, UPDATE THIS!!! MAJOR.MINOR.PATCH https://semver.org/ - - RENGINE_CURRENT_VERSION='2.0.2' + - RENGINE_CURRENT_VERSION='2.0.3' volumes: - ./web:/usr/src/app - github_repos:/usr/src/github diff --git a/docker-compose.yml b/docker-compose.yml index af374d23..22046955 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -99,7 +99,7 @@ services: - DJANGO_SUPERUSER_PASSWORD=${DJANGO_SUPERUSER_PASSWORD} # THIS IS A MUST FOR CHECKING UPDATE, EVERYTIME A COMMIT IS MERGED INTO # MASTER, UPDATE THIS!!! MAJOR.MINOR.PATCH https://semver.org/ - - RENGINE_CURRENT_VERSION='2.0.2' + - RENGINE_CURRENT_VERSION='2.0.3' volumes: - ./web:/usr/src/app - github_repos:/usr/src/github diff --git a/web/dashboard/templates/dashboard/index.html b/web/dashboard/templates/dashboard/index.html index 6e5e98bd..caa3f3a2 100644 --- a/web/dashboard/templates/dashboard/index.html +++ b/web/dashboard/templates/dashboard/index.html @@ -17,7 +17,7 @@ {% endblock custom_js_css_link %} {% block breadcrumb_title %} -reNgine 2.0.2 +reNgine 2.0.3 {% endblock breadcrumb_title %} {% block main_content %} diff --git a/web/templates/base/_items/top_bar.html b/web/templates/base/_items/top_bar.html index f2addbc4..12c62531 100644 --- a/web/templates/base/_items/top_bar.html +++ b/web/templates/base/_items/top_bar.html @@ -170,18 +170,18 @@
Welcome {{user.get_username}}!
diff --git a/web/templates/base/login.html b/web/templates/base/login.html index 0e999dbe..a1931574 100644 --- a/web/templates/base/login.html +++ b/web/templates/base/login.html @@ -58,7 +58,7 @@

Login to reNgine

-

Current release: v2.0.2

+

Current release: v2.0.3

From 7cff813c432182f815d024800277f1ba1f3098d4 Mon Sep 17 00:00:00 2001 From: Anonymoussaurus <50231698+AnonymousWP@users.noreply.github.com> Date: Fri, 26 Jan 2024 13:27:40 +0100 Subject: [PATCH 085/262] chore: update version number to 2.0.3 --- docker-compose.dev.yml | 2 +- docker-compose.yml | 2 +- web/dashboard/templates/dashboard/index.html | 2 +- web/templates/base/_items/top_bar.html | 8 ++++---- web/templates/base/login.html | 2 +- 5 files changed, 8 insertions(+), 8 deletions(-) diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index 57fa28e0..4fbe8e53 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -98,7 +98,7 @@ services: - POSTGRES_HOST=${POSTGRES_HOST} # THIS IS A MUST FOR CHECKING UPDATE, EVERYTIME A COMMIT IS MERGED INTO # MASTER, UPDATE THIS!!! MAJOR.MINOR.PATCH https://semver.org/ - - RENGINE_CURRENT_VERSION='2.0.2' + - RENGINE_CURRENT_VERSION='2.0.3' volumes: - ./web:/usr/src/app - github_repos:/usr/src/github diff --git a/docker-compose.yml b/docker-compose.yml index af374d23..22046955 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -99,7 +99,7 @@ services: - DJANGO_SUPERUSER_PASSWORD=${DJANGO_SUPERUSER_PASSWORD} # THIS IS A MUST FOR CHECKING UPDATE, EVERYTIME A COMMIT IS MERGED INTO # MASTER, UPDATE THIS!!! MAJOR.MINOR.PATCH https://semver.org/ - - RENGINE_CURRENT_VERSION='2.0.2' + - RENGINE_CURRENT_VERSION='2.0.3' volumes: - ./web:/usr/src/app - github_repos:/usr/src/github diff --git a/web/dashboard/templates/dashboard/index.html b/web/dashboard/templates/dashboard/index.html index 6e5e98bd..caa3f3a2 100644 --- a/web/dashboard/templates/dashboard/index.html +++ b/web/dashboard/templates/dashboard/index.html @@ -17,7 +17,7 @@ {% endblock custom_js_css_link %} {% block breadcrumb_title %} -reNgine 2.0.2 +reNgine 2.0.3 {% endblock breadcrumb_title %} {% block main_content %} diff --git a/web/templates/base/_items/top_bar.html b/web/templates/base/_items/top_bar.html index f2addbc4..12c62531 100644 --- a/web/templates/base/_items/top_bar.html +++ b/web/templates/base/_items/top_bar.html @@ -170,18 +170,18 @@
Welcome {{user.get_username}}!
diff --git a/web/templates/base/login.html b/web/templates/base/login.html index 0e999dbe..a1931574 100644 --- a/web/templates/base/login.html +++ b/web/templates/base/login.html @@ -58,7 +58,7 @@

Login to reNgine

-

Current release: v2.0.2

+

Current release: v2.0.3

@@ -55,7 +55,7 @@

- {{model.modified_at}} Modified at + Modified {{model.modified_at|naturaltime}}
@@ -67,6 +67,7 @@

{{model.details.family}} Family

+ @@ -81,4 +82,38 @@

{% block page_level_script %} + {% endblock page_level_script %} diff --git a/web/scanEngine/views.py b/web/scanEngine/views.py index ec2f2b57..e1932442 100644 --- a/web/scanEngine/views.py +++ b/web/scanEngine/views.py @@ -4,6 +4,7 @@ import shutil import subprocess +from datetime import datetime from django import http from django.contrib import messages from django.shortcuts import get_object_or_404, render @@ -11,7 +12,6 @@ from rolepermissions.decorators import has_permission_decorator from reNgine.common_func import * -from reNgine.llm import CustomOllamaClient from reNgine.tasks import (run_command, send_discord_message, send_slack_message, send_telegram_message) from scanEngine.forms import * from scanEngine.forms import ConfigurationForm @@ -460,11 +460,17 @@ def tool_arsenal_section(request, slug): @has_permission_decorator(PERM_MODIFY_SYSTEM_CONFIGURATIONS, redirect_url=FOUR_OH_FOUR_URL) def llm_toolkit_section(request, slug): context = {} - ollama = CustomOllamaClient() - res = ollama.connect() - if not res.get('status'): - pass - context['installed_models'] = ollama.list_models() + list_all_models_url = f'{OLLAMA_INSTANCE}/api/tags' + response = requests.get(list_all_models_url) + if response.status_code == 200: + models = response.json() + models = models.get('models') + date_format = "%Y-%m-%dT%H:%M:%S" + models = [{**model, 'modified_at': datetime.strptime(model['modified_at'].split('.')[0], date_format)} for model in models] + context['installed_models'] = models + print(context['installed_models']) + else: + context['installed_models'] = [] return render(request, 'scanEngine/settings/llm_toolkit.html', context) From b8e99205b3ef33aaf937d03ddb13f168c8bbd402 Mon Sep 17 00:00:00 2001 From: Yogesh Ojha Date: Sun, 21 Apr 2024 08:57:55 +0530 Subject: [PATCH 191/262] added add model and delete model ui and endpoints --- .../scanEngine/settings/llm_toolkit.html | 101 +++++++++++++++++- 1 file changed, 99 insertions(+), 2 deletions(-) diff --git a/web/scanEngine/templates/scanEngine/settings/llm_toolkit.html b/web/scanEngine/templates/scanEngine/settings/llm_toolkit.html index ef82a728..702ee047 100644 --- a/web/scanEngine/templates/scanEngine/settings/llm_toolkit.html +++ b/web/scanEngine/templates/scanEngine/settings/llm_toolkit.html @@ -31,7 +31,7 @@
{{installed_models|length}} Installed LLM Models
@@ -105,7 +105,26 @@

}).then(function(response) { return response.json(); }).then(function(data) { - return location.reload(); + if (data.status){ + swal.insertQueueStep({ + icon: 'error', + title: 'Oops! Unable to delete the model!' + }) + } + else{ + swal.queue([{ + title: 'Model Successfully deleted!', + icon: 'success', + showCancelButton: false, + confirmButtonText: 'Okay', + padding: '2em', + showLoaderOnConfirm: true, + preConfirm: function() { + location.reload(); + } + }]); + } + //return location.reload(); }).catch(function() { swal.insertQueueStep({ icon: 'error', @@ -115,5 +134,83 @@

} }]) } + + function showAddNewModelModal(){ + $('#modal_title').html('Add new LLM Model'); + $('#modal-content').empty(); + $('#modal-content').append(` +

You can find the list of supported models in Ollama Library

+

We recommend using llama2-uncensored model for better results.

+
+ + +
+
+ +
+ `); + $('#modal_dialog').modal('show'); + } + + function download_model(){ + var model_name = $('#model_name').val(); + if (model_name == ""){ + Swal.fire({ + title: 'Oops!', + text: 'Model name is required', + icon: 'error' + }); + return; + } + var url = "/api/tool/ollama/?model=" + model_name; + swal.queue([{ + title: 'Are you sure you want to download this model?', + text: "Downloading models can take a long time, sometimes a few minutes. Please be patient.", + icon: 'info', + showCancelButton: true, + confirmButtonText: 'Download', + padding: '2em', + showLoaderOnConfirm: true, + preConfirm: function() { + return fetch(url, { + method: 'GET', + credentials: "same-origin", + headers: { + "X-CSRFToken": getCookie("csrftoken") + } + }).then(function(response) { + return response.json(); + }).then(function(data) { + if (!data.status){ + swal.insertQueueStep({ + icon: 'error', + title: 'Oops! Unable to download the model, model does not exist!' + }) + } + else{ + swal.queue([{ + title: 'Model Successfully downloaded!', + icon: 'success', + showCancelButton: false, + confirmButtonText: 'Okay', + padding: '2em', + showLoaderOnConfirm: true, + preConfirm: function() { + location.reload(); + } + }]); + } + //return location.reload(); + }).catch(function() { + swal.insertQueueStep({ + icon: 'error', + title: 'Oops! Unable to download the model!' + }) + }) + } + }]) + } + + {% endblock page_level_script %} From 1f35de39203c929ba1f529c5091f9b6a1add1af1 Mon Sep 17 00:00:00 2001 From: Yogesh Ojha Date: Sun, 21 Apr 2024 10:31:00 +0530 Subject: [PATCH 192/262] added put method to select model --- web/api/views.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/web/api/views.py b/web/api/views.py index 5f1f0def..fe6cd6cf 100644 --- a/web/api/views.py +++ b/web/api/views.py @@ -84,6 +84,22 @@ def delete(self, request): except Exception as e: response['error'] = str(e) return Response(response) + + def put(self, request): + req = self.request + model_name = req.query_params.get('model') + response = { + 'status': False + } + try: + OllamaSettings.objects.update_or_create( + defaults={'selected_model': model_name}, + id=1 + ) + response['status'] = True + except Exception as e: + response['error'] = str(e) + return Response(response) class GPTAttackSuggestion(APIView): From 5ab92fcda9f7f3779122c59012a8316a9bd904a7 Mon Sep 17 00:00:00 2001 From: Yogesh Ojha Date: Sun, 21 Apr 2024 10:31:31 +0530 Subject: [PATCH 193/262] Added ollama settings to store selected models --- .../migrations/0010_ollamasettings.py | 20 +++++ web/dashboard/models.py | 8 ++ web/reNgine/definitions.py | 40 ++++++++++ .../scanEngine/settings/llm_toolkit.html | 73 +++++++++++++++++-- web/scanEngine/views.py | 23 ++++-- 5 files changed, 153 insertions(+), 11 deletions(-) create mode 100644 web/dashboard/migrations/0010_ollamasettings.py diff --git a/web/dashboard/migrations/0010_ollamasettings.py b/web/dashboard/migrations/0010_ollamasettings.py new file mode 100644 index 00000000..1e47e5d0 --- /dev/null +++ b/web/dashboard/migrations/0010_ollamasettings.py @@ -0,0 +1,20 @@ +# Generated by Django 3.2.4 on 2024-04-21 04:35 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('dashboard', '0009_delete_openaikeys'), + ] + + operations = [ + migrations.CreateModel( + name='OllamaSettings', + fields=[ + ('id', models.AutoField(primary_key=True, serialize=False)), + ('selected_model', models.CharField(max_length=500)), + ], + ), + ] diff --git a/web/dashboard/models.py b/web/dashboard/models.py index 3d600174..58bb2183 100644 --- a/web/dashboard/models.py +++ b/web/dashboard/models.py @@ -24,6 +24,14 @@ class OpenAiAPIKey(models.Model): def __str__(self): return self.key + + +class OllamaSettings(models.Model): + id = models.AutoField(primary_key=True) + selected_model = models.CharField(max_length=500) + + def __str__(self): + return self.selected_model class NetlasAPIKey(models.Model): diff --git a/web/reNgine/definitions.py b/web/reNgine/definitions.py index 5abb5ebf..d4abe300 100644 --- a/web/reNgine/definitions.py +++ b/web/reNgine/definitions.py @@ -441,6 +441,46 @@ ############################################################################### OLLAMA_INSTANCE = 'http://ollama:11434' +DEFAULT_GPT_MODELS = [ + { + 'name': 'gpt-3', + 'model': 'gpt-3', + 'modified_at': '', + 'details': { + 'family': 'GPT', + 'parameter_size': '~175B', + } + }, + { + 'name': 'gpt-3.5-turbo', + 'model': 'gpt-3.5-turbo', + 'modified_at': '', + 'details': { + 'family': 'GPT', + 'parameter_size': '~7B', + } + }, + { + 'name': 'gpt-4', + 'model': 'gpt-4', + 'modified_at': '', + 'details': { + 'family': 'GPT', + 'parameter_size': '~1.7T', + } + }, + { + 'name': 'gpt-4-turbo', + 'model': 'gpt-4', + 'modified_at': '', + 'details': { + 'family': 'GPT', + 'parameter_size': '~1.7T', + } + } +] + + # GPT Vulnerability Report Generator VULNERABILITY_DESCRIPTION_SYSTEM_MESSAGE = """ diff --git a/web/scanEngine/templates/scanEngine/settings/llm_toolkit.html b/web/scanEngine/templates/scanEngine/settings/llm_toolkit.html index 702ee047..b6c87c7e 100644 --- a/web/scanEngine/templates/scanEngine/settings/llm_toolkit.html +++ b/web/scanEngine/templates/scanEngine/settings/llm_toolkit.html @@ -34,7 +34,7 @@ Add new model -
{{installed_models|length}} Installed LLM Models
+
{{installed_models|length}} available Models
{% for model in installed_models %}
@@ -45,17 +45,31 @@
{{installed_models|length}} Installed LLM Models

- {{model.name}} + {{model.name}} {% if model.selected %}Selected Model{% endif %}

- Modified {{model.modified_at|naturaltime}} + Modified {% if model.modified_at %}{{model.modified_at|naturaltime}} {% else %} NA{% endif %} + +
+ + + {% if model.is_local %} + Locally installed model + {% else %} + Open AI Model + {% endif %}
@@ -67,7 +81,6 @@

{{model.details.family}} Family

-

@@ -210,7 +223,55 @@

} }]) } - + function selectModel(model_name){ + var url = "/api/tool/ollama/?model=" + model_name; + swal.queue([{ + title: 'Are you sure you want to select this model?', + text: "This model will be used to generate Scan Reports and Attack Suggestions.", + icon: 'info', + showCancelButton: true, + confirmButtonText: 'Select', + padding: '2em', + showLoaderOnConfirm: true, + preConfirm: function() { + return fetch(url, { + method: 'PUT', + credentials: "same-origin", + headers: { + "X-CSRFToken": getCookie("csrftoken") + } + }).then(function(response) { + return response.json(); + }).then(function(data) { + if (!data.status){ + swal.insertQueueStep({ + icon: 'error', + title: 'Oops! Unable to select the model!' + }) + } + else{ + swal.queue([{ + title: 'Model Successfully selected!', + icon: 'success', + showCancelButton: false, + confirmButtonText: 'Okay', + padding: '2em', + showLoaderOnConfirm: true, + preConfirm: function() { + location.reload(); + } + }]); + } + //return location.reload(); + }).catch(function() { + swal.insertQueueStep({ + icon: 'error', + title: 'Oops! Unable to select the model!' + }) + }) + } + }]) + } {% endblock page_level_script %} diff --git a/web/scanEngine/views.py b/web/scanEngine/views.py index e1932442..b4f19574 100644 --- a/web/scanEngine/views.py +++ b/web/scanEngine/views.py @@ -462,15 +462,28 @@ def llm_toolkit_section(request, slug): context = {} list_all_models_url = f'{OLLAMA_INSTANCE}/api/tags' response = requests.get(list_all_models_url) + all_models = [] + all_models = DEFAULT_GPT_MODELS.copy() if response.status_code == 200: models = response.json() - models = models.get('models') + ollama_models = models.get('models') date_format = "%Y-%m-%dT%H:%M:%S" - models = [{**model, 'modified_at': datetime.strptime(model['modified_at'].split('.')[0], date_format)} for model in models] - context['installed_models'] = models - print(context['installed_models']) + for model in ollama_models: + all_models.append({**model, + 'modified_at': datetime.strptime(model['modified_at'].split('.')[0], date_format), + 'is_local': True + }) + # find selected model name from db + selected_model = OllamaSettings.objects.first() + if selected_model: + selected_model = {'selected_model': selected_model.selected_model} else: - context['installed_models'] = [] + # use gpt3.5-turbo as default + selected_model = {'selected_model': 'gpt-3.5-turbo'} + for models in all_models: + if models['name'] == selected_model['selected_model']: + models['selected'] = True + context['installed_models'] = all_models return render(request, 'scanEngine/settings/llm_toolkit.html', context) From 75cadb9ad709f19ce42a40454cc68cb982fbaeaf Mon Sep 17 00:00:00 2001 From: Yogesh Ojha Date: Sun, 21 Apr 2024 11:39:49 +0530 Subject: [PATCH 194/262] fix using ollama vs openai --- web/api/views.py | 9 +++++- .../0011_ollamasettings_is_ollama.py | 18 +++++++++++ ...name_is_ollama_ollamasettings_is_openai.py | 18 +++++++++++ .../migrations/0013_auto_20240421_0507.py | 22 +++++++++++++ ...ame_is_ollama_ollamasettings_use_ollama.py | 18 +++++++++++ web/dashboard/models.py | 1 + web/reNgine/gpt.py | 32 +++++++++++++------ web/scanEngine/views.py | 9 +++--- 8 files changed, 113 insertions(+), 14 deletions(-) create mode 100644 web/dashboard/migrations/0011_ollamasettings_is_ollama.py create mode 100644 web/dashboard/migrations/0012_rename_is_ollama_ollamasettings_is_openai.py create mode 100644 web/dashboard/migrations/0013_auto_20240421_0507.py create mode 100644 web/dashboard/migrations/0014_rename_is_ollama_ollamasettings_use_ollama.py diff --git a/web/api/views.py b/web/api/views.py index fe6cd6cf..45fe5df4 100644 --- a/web/api/views.py +++ b/web/api/views.py @@ -88,12 +88,19 @@ def delete(self, request): def put(self, request): req = self.request model_name = req.query_params.get('model') + # check if model_name is in DEFAULT_GPT_MODELS response = { 'status': False } + use_ollama = True + if any(model['name'] == model_name for model in DEFAULT_GPT_MODELS): + use_ollama = False try: OllamaSettings.objects.update_or_create( - defaults={'selected_model': model_name}, + defaults={ + 'selected_model': model_name, + 'use_ollama': use_ollama + }, id=1 ) response['status'] = True diff --git a/web/dashboard/migrations/0011_ollamasettings_is_ollama.py b/web/dashboard/migrations/0011_ollamasettings_is_ollama.py new file mode 100644 index 00000000..aefdef37 --- /dev/null +++ b/web/dashboard/migrations/0011_ollamasettings_is_ollama.py @@ -0,0 +1,18 @@ +# Generated by Django 3.2.4 on 2024-04-21 05:06 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('dashboard', '0010_ollamasettings'), + ] + + operations = [ + migrations.AddField( + model_name='ollamasettings', + name='is_ollama', + field=models.BooleanField(default=False), + ), + ] diff --git a/web/dashboard/migrations/0012_rename_is_ollama_ollamasettings_is_openai.py b/web/dashboard/migrations/0012_rename_is_ollama_ollamasettings_is_openai.py new file mode 100644 index 00000000..52bdf6ff --- /dev/null +++ b/web/dashboard/migrations/0012_rename_is_ollama_ollamasettings_is_openai.py @@ -0,0 +1,18 @@ +# Generated by Django 3.2.4 on 2024-04-21 05:06 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('dashboard', '0011_ollamasettings_is_ollama'), + ] + + operations = [ + migrations.RenameField( + model_name='ollamasettings', + old_name='is_ollama', + new_name='is_openai', + ), + ] diff --git a/web/dashboard/migrations/0013_auto_20240421_0507.py b/web/dashboard/migrations/0013_auto_20240421_0507.py new file mode 100644 index 00000000..11ab6594 --- /dev/null +++ b/web/dashboard/migrations/0013_auto_20240421_0507.py @@ -0,0 +1,22 @@ +# Generated by Django 3.2.4 on 2024-04-21 05:07 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('dashboard', '0012_rename_is_ollama_ollamasettings_is_openai'), + ] + + operations = [ + migrations.RemoveField( + model_name='ollamasettings', + name='is_openai', + ), + migrations.AddField( + model_name='ollamasettings', + name='is_ollama', + field=models.BooleanField(default=True), + ), + ] diff --git a/web/dashboard/migrations/0014_rename_is_ollama_ollamasettings_use_ollama.py b/web/dashboard/migrations/0014_rename_is_ollama_ollamasettings_use_ollama.py new file mode 100644 index 00000000..a201df3a --- /dev/null +++ b/web/dashboard/migrations/0014_rename_is_ollama_ollamasettings_use_ollama.py @@ -0,0 +1,18 @@ +# Generated by Django 3.2.4 on 2024-04-21 05:08 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('dashboard', '0013_auto_20240421_0507'), + ] + + operations = [ + migrations.RenameField( + model_name='ollamasettings', + old_name='is_ollama', + new_name='use_ollama', + ), + ] diff --git a/web/dashboard/models.py b/web/dashboard/models.py index 58bb2183..8ed77dd4 100644 --- a/web/dashboard/models.py +++ b/web/dashboard/models.py @@ -29,6 +29,7 @@ def __str__(self): class OllamaSettings(models.Model): id = models.AutoField(primary_key=True) selected_model = models.CharField(max_length=500) + use_ollama = models.BooleanField(default=True) def __str__(self): return self.selected_model diff --git a/web/reNgine/gpt.py b/web/reNgine/gpt.py index 2e6682ff..389da1fc 100644 --- a/web/reNgine/gpt.py +++ b/web/reNgine/gpt.py @@ -1,16 +1,19 @@ import openai import re from reNgine.common_func import get_open_ai_key, extract_between -from reNgine.definitions import VULNERABILITY_DESCRIPTION_SYSTEM_MESSAGE, ATTACK_SUGGESTION_GPT_SYSTEM_PROMPT -from langchain.llms import Ollama +from reNgine.definitions import VULNERABILITY_DESCRIPTION_SYSTEM_MESSAGE, ATTACK_SUGGESTION_GPT_SYSTEM_PROMPT, OLLAMA_INSTANCE +from langchain_community.llms import Ollama + +from dashboard.models import OllamaSettings class GPTVulnerabilityReportGenerator: def __init__(self): - self.api_key = get_open_ai_key() - self.model_name = 'gpt-3.5-turbo' - if not self.api_key: - self.ollama = Ollama(base_url='http://ollama:11434', model="llama2-uncensored") + selected_model = OllamaSettings.objects.first() + self.model_name = selected_model.selected_model if selected_model else 'gpt-3.5-turbo' + self.use_ollama = selected_model.use_ollama if selected_model else False + self.openai_api_key = None + self.ollama = None def get_vulnerability_description(self, description): """Generate Vulnerability Description using GPT. @@ -26,12 +29,23 @@ def get_vulnerability_description(self, description): 'references': (list) of urls } """ - if not self.api_key: - prompt = ATTACK_SUGGESTION_GPT_SYSTEM_PROMPT + "\nUser: " + input + print(f"Generating Vulnerability Description for: {description}") + if self.use_ollama: + prompt = VULNERABILITY_DESCRIPTION_SYSTEM_MESSAGE + "\nUser: " + description + self.ollama = Ollama( + base_url=OLLAMA_INSTANCE, + model=self.model_name + ) response_content = self.ollama(prompt) else: - openai.api_key = self.api_key + openai_api_key = get_open_ai_key() + if not openai_api_key: + return { + 'status': False, + 'error': 'OpenAI API Key not set' + } try: + openai.api_key = openai_api_key gpt_response = openai.ChatCompletion.create( model=self.model_name, messages=[ diff --git a/web/scanEngine/views.py b/web/scanEngine/views.py index b4f19574..d1eb2ef5 100644 --- a/web/scanEngine/views.py +++ b/web/scanEngine/views.py @@ -463,6 +463,7 @@ def llm_toolkit_section(request, slug): list_all_models_url = f'{OLLAMA_INSTANCE}/api/tags' response = requests.get(list_all_models_url) all_models = [] + selected_model = None all_models = DEFAULT_GPT_MODELS.copy() if response.status_code == 200: models = response.json() @@ -471,7 +472,7 @@ def llm_toolkit_section(request, slug): for model in ollama_models: all_models.append({**model, 'modified_at': datetime.strptime(model['modified_at'].split('.')[0], date_format), - 'is_local': True + 'is_local': True, }) # find selected model name from db selected_model = OllamaSettings.objects.first() @@ -480,9 +481,9 @@ def llm_toolkit_section(request, slug): else: # use gpt3.5-turbo as default selected_model = {'selected_model': 'gpt-3.5-turbo'} - for models in all_models: - if models['name'] == selected_model['selected_model']: - models['selected'] = True + for model in all_models: + if model['name'] == selected_model['selected_model']: + model['selected'] = True context['installed_models'] = all_models return render(request, 'scanEngine/settings/llm_toolkit.html', context) From bbf3ef8aa16602ac1307ba6200b25972c9c80e2f Mon Sep 17 00:00:00 2001 From: Yogesh Ojha Date: Sun, 12 May 2024 18:16:57 +0530 Subject: [PATCH 195/262] fix requirements --- web/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/web/requirements.txt b/web/requirements.txt index e402d590..fd548693 100644 --- a/web/requirements.txt +++ b/web/requirements.txt @@ -17,6 +17,7 @@ drf-yasg==1.21.3 gunicorn==22.0.0 gevent==24.2.1 humanize==4.3.0 +langchain==0.1.0 Markdown==3.3.4 metafinder==1.2 netaddr==0.8.0 @@ -37,4 +38,3 @@ whatportis weasyprint==53.3 wafw00f==2.2.0 xmltodict==0.13.0 -langchain==0.0.343 From 95f5384d11dd6962aaebfee9a3eb40823a02c96d Mon Sep 17 00:00:00 2001 From: Yogesh Ojha Date: Sun, 12 May 2024 18:17:15 +0530 Subject: [PATCH 196/262] show error message when openai key is not set --- .../templates/scanEngine/settings/llm_toolkit.html | 13 +++++++++++-- web/scanEngine/views.py | 4 ++++ 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/web/scanEngine/templates/scanEngine/settings/llm_toolkit.html b/web/scanEngine/templates/scanEngine/settings/llm_toolkit.html index b6c87c7e..fa6d304e 100644 --- a/web/scanEngine/templates/scanEngine/settings/llm_toolkit.html +++ b/web/scanEngine/templates/scanEngine/settings/llm_toolkit.html @@ -14,7 +14,7 @@ {% endblock breadcrumb_title %} {% block page_title %} -LLM Toolkit +LLM Toolkit (Beta) {% endblock page_title %} {% block main_content %} @@ -22,9 +22,13 @@
+ LLM Toolkit includes the ability to download new LLMs, view available models, and delete models no longer needed, and also choose between various models. +

reNgine makes use of various LLMs to enhance reporting process. Using various LLM AI Models penetration testers will be able to to generate detailed, insightful penetration testing reports.
- LLM Toolkit includes the ability to download new LLMs, view available models, and delete models no longer needed, and also choose between various models. + If you are using custom LLM models, it is expected that response time are much slower in CPU. We recommend using GPU for better performance. Models such as llama2, or llama3 requires significant computation and GPU are required. Having only CPU will result in slow response time. +
+ OpenAI GPT models do not run locally, hence the requirement of GPU is not necessary.

@@ -35,6 +39,11 @@
{{installed_models|length}} available Models
+ {% if openai_key_error %} + + {% endif %}
{% for model in installed_models %}
diff --git a/web/scanEngine/views.py b/web/scanEngine/views.py index d1eb2ef5..0404e8c4 100644 --- a/web/scanEngine/views.py +++ b/web/scanEngine/views.py @@ -485,6 +485,10 @@ def llm_toolkit_section(request, slug): if model['name'] == selected_model['selected_model']: model['selected'] = True context['installed_models'] = all_models + # show error message for openai key, if any gpt is selected + openai_key = get_open_ai_key() + if not openai_key and 'gpt' in selected_model['selected_model']: + context['openai_key_error'] = True return render(request, 'scanEngine/settings/llm_toolkit.html', context) From ac722a653ae56b696407160c216afd670bc9afde Mon Sep 17 00:00:00 2001 From: psyray Date: Tue, 21 May 2024 22:57:58 +0200 Subject: [PATCH 197/262] Add arm64 support and reduce image size --- web/Dockerfile | 127 ++++++++++++++++++++++++++++--------------------- 1 file changed, 73 insertions(+), 54 deletions(-) diff --git a/web/Dockerfile b/web/Dockerfile index 72b13433..c0624a53 100644 --- a/web/Dockerfile +++ b/web/Dockerfile @@ -1,11 +1,19 @@ # Base image -FROM --platform=linux/amd64 ubuntu:22.04 +FROM ubuntu:22.04 + +ARG version=0.33.0 +ARG geckodriver_amd=geckodriver-v${version}-linux64.tar.gz +ARG geckodriver_arm=geckodriver-v${version}-linux-aarch64.tar.gz + +ARG go_version=1.21.5 +ARG go_amd=go${go_version}.linux-amd64.tar.gz +ARG go_arm=go${go_version}.linux-arm64.tar.gz # Labels and Credits LABEL \ - name="reNgine" \ - author="Yogesh Ojha " \ - description="reNgine is a automated pipeline of recon process, useful for information gathering during web application penetration testing." + name="reNgine NG" \ + author="Security-Tools-Alliance (https://github.com/Security-Tools-Alliance) & Yogesh Ojha " \ + description="reNGine NG is a automated pipeline of recon process, useful for information gathering during web application penetration testing." # Environment Variables ENV DEBIAN_FRONTEND="noninteractive" \ @@ -17,15 +25,14 @@ ENV GOPATH=$HOME/go ENV PATH="${PATH}:${GOROOT}/bin:${GOPATH}/bin" # Install Python -RUN apt update -y && \ - apt update -y && \ +RUN apt update && \ apt install -y \ python3.10 \ python3-dev \ - python3-pip + python3-pip # Install essential packages -RUN apt install -y --no-install-recommends \ +RUN apt install -y --no-install-recommends \ build-essential \ cmake \ geoip-bin \ @@ -47,17 +54,35 @@ RUN apt install -y --no-install-recommends \ RUN add-apt-repository ppa:mozillateam/ppa -# Download and install go 1.20 -RUN wget https://golang.org/dl/go1.21.4.linux-amd64.tar.gz -RUN tar -xvf go1.21.4.linux-amd64.tar.gz -RUN rm go1.21.4.linux-amd64.tar.gz -RUN mv go /usr/local - -# Download geckodriver -RUN wget https://github.com/mozilla/geckodriver/releases/download/v0.32.0/geckodriver-v0.32.0-linux64.tar.gz -RUN tar -xvf geckodriver-v0.32.0-linux64.tar.gz -RUN rm geckodriver-v0.32.0-linux64.tar.gz -RUN mv geckodriver /usr/bin +# Download and install go +RUN ARCH=$(dpkg --print-architecture) && \ + if [ "${ARCH}" = "arm64" ]; then \ + wget https://go.dev/dl/${go_arm} && \ + tar -xvf ${go_arm} -C /usr/local/ && \ + rm ${go_arm}; \ + elif [ "${ARCH}" = "amd64" ]; then \ + wget https://go.dev/dl/${go_amd} && \ + tar -xvf ${go_amd} -C /usr/local/ && \ + rm ${go_amd}; \ + else \ + echo "Unknown architecture: $ARCH" ; \ + exit 1; \ + fi + +# Download and install geckodriver +RUN ARCH=$(dpkg --print-architecture) && \ + if [ "${ARCH}" = "arm64" ]; then \ + wget https://github.com/mozilla/geckodriver/releases/download/v${version}/${geckodriver_arm} && \ + tar -xvf ${geckodriver_arm} -C /usr/bin/ && \ + rm ${geckodriver_arm}; \ + elif [ "${ARCH}" = "amd64" ]; then \ + wget https://github.com/mozilla/geckodriver/releases/download/v${version}/${geckodriver_amd} && \ + tar -xvf ${geckodriver_amd} -C /usr/bin/ && \ + rm ${geckodriver_amd}; \ + else \ + echo "Unknown architecture: $ARCH" ; \ + exit 1; \ + fi # Make directory for app WORKDIR /usr/src/app @@ -67,44 +92,41 @@ ENV PYTHONDONTWRITEBYTECODE 1 ENV PYTHONUNBUFFERED 1 # Download Go packages -RUN go install -v github.com/jaeles-project/gospider@latest -RUN go install -v github.com/tomnomnom/gf@latest -RUN go install -v github.com/tomnomnom/unfurl@latest -RUN go install -v github.com/tomnomnom/waybackurls@latest -RUN go install -v github.com/projectdiscovery/httpx/cmd/httpx@latest -RUN go install -v github.com/projectdiscovery/subfinder/v2/cmd/subfinder@latest -RUN go install -v github.com/projectdiscovery/nuclei/v3/cmd/nuclei@latest -RUN go install -v github.com/projectdiscovery/naabu/v2/cmd/naabu@latest -RUN go install -v github.com/hakluke/hakrawler@latest -RUN go install -v github.com/lc/gau/v2/cmd/gau@latest -RUN go install -v github.com/owasp-amass/amass/v3/...@latest -RUN go install -v github.com/ffuf/ffuf@latest -RUN go install -v github.com/projectdiscovery/tlsx/cmd/tlsx@latest -RUN go install -v github.com/hahwul/dalfox/v2@latest -RUN go install -v github.com/projectdiscovery/katana/cmd/katana@latest -RUN go install -v github.com/dwisiswant0/crlfuzz/cmd/crlfuzz@latest -RUN go install -v github.com/sa7mon/s3scanner@latest - -# Update Nuclei and Nuclei-Templates -RUN nuclei -update +ENV GO111MODULE=on +RUN ARCH=$(dpkg --print-architecture) \ + && if [ "$ARCH" = "arm64" ] || [ "$ARCH" = "amd64" ]; then \ + GOARCH=$ARCH go install -v github.com/jaeles-project/gospider@latest \ + && GOARCH=$ARCH go install -v github.com/tomnomnom/gf@latest \ + && GOARCH=$ARCH go install -v github.com/tomnomnom/unfurl@latest \ + && GOARCH=$ARCH go install -v github.com/tomnomnom/waybackurls@latest \ + && GOARCH=$ARCH go install -v github.com/projectdiscovery/httpx/cmd/httpx@latest \ + && GOARCH=$ARCH go install -v github.com/projectdiscovery/subfinder/v2/cmd/subfinder@latest \ + && GOARCH=$ARCH go install -v github.com/projectdiscovery/nuclei/v3/cmd/nuclei@latest \ + && GOARCH=$ARCH go install -v github.com/projectdiscovery/naabu/v2/cmd/naabu@latest \ + && GOARCH=$ARCH go install -v github.com/lc/gau/v2/cmd/gau@latest \ + && GOARCH=$ARCH go install -v github.com/owasp-amass/amass/v3/...@latest \ + && GOARCH=$ARCH go install -v github.com/ffuf/ffuf@latest \ + && GOARCH=$ARCH go install -v github.com/projectdiscovery/tlsx/cmd/tlsx@latest \ + && GOARCH=$ARCH go install -v github.com/hahwul/dalfox/v2@latest \ + && GOARCH=$ARCH go install -v github.com/projectdiscovery/katana/cmd/katana@latest \ + && GOARCH=$ARCH go install -v github.com/dwisiswant0/crlfuzz/cmd/crlfuzz@latest \ + && GOARCH=$ARCH go install -v github.com/sa7mon/s3scanner@latest \ + && rm -rf /go/pkg/* && rm -rf /root/.cache/go-build; \ + else \ + echo "Unknown architecture: $ARCH" ; \ + exit 1; \ + fi + +# Update Nuclei-Templates RUN nuclei -update-templates -# Update project discovery tools -RUN httpx -up -RUN naabu -up -RUN subfinder -up -RUN tlsx -up -RUN katana -up - # Copy requirements COPY ./requirements.txt /tmp/requirements.txt RUN pip3 install --upgrade setuptools pip && \ - pip3 install -r /tmp/requirements.txt + pip3 install -r /tmp/requirements.txt --no-cache-dir - -# install eyewitness - -RUN python3 -m pip install fuzzywuzzy \ +# install fuzzywuzzy +RUN python3 -m pip install --no-cache-dir fuzzywuzzy \ selenium==4.9.1 \ python-Levenshtein \ pyvirtualdisplay \ @@ -112,6 +134,3 @@ RUN python3 -m pip install fuzzywuzzy \ # Copy source code COPY . /usr/src/app/ - -# httpx seems to have issue, use alias instead!!! -RUN echo 'alias httpx="/go/bin/httpx"' >> ~/.bashrc From 2be3626b839ec896f6ebde7026a127b9401874e3 Mon Sep 17 00:00:00 2001 From: psyray Date: Tue, 21 May 2024 23:00:58 +0200 Subject: [PATCH 198/262] Split certs generation and docker starts and add notes --- install.sh | 40 +++++++++++++++++++++++++++++----------- 1 file changed, 29 insertions(+), 11 deletions(-) diff --git a/install.sh b/install.sh index 1e2ae5bd..e666db38 100755 --- a/install.sh +++ b/install.sh @@ -3,10 +3,21 @@ tput setaf 2; cat web/art/reNgine.txt +echo " " tput setaf 1; echo "Before running this script, please make sure Docker is running and you have made changes to .env file." -tput setaf 2; echo "Changing the postgres username & password from .env is highly recommended." +echo " " +tput setaf 1; echo "Changing the postgres username & password from .env is highly recommended." + +echo " " +tput setaf 3; +echo "#########################################################################" +echo "Please note that, this installation script is only intended for Linux" +echo "x86_64 and arm64 platform (Apple Mx series) are supported" +echo "Raspbery Pi is not recommended, all install tests have failed" +echo "#########################################################################" tput setaf 4; +echo " " read -p "Are you sure, you made changes to .env file (y/n)? " answer case ${answer:0:1} in y|Y|yes|YES|Yes ) @@ -23,13 +34,6 @@ case ${answer:0:1} in ;; esac -echo " " -tput setaf 3; -echo "#########################################################################" -echo "Please note that, this installation script is only intended for Linux" -echo "For Mac and Windows, refer to the official guide https://rengine.wiki" -echo "#########################################################################" - echo " " tput setaf 4; echo "Installing reNgine and it's dependencies" @@ -97,7 +101,7 @@ echo "#########################################################################" echo "Checking Docker status" echo "#########################################################################" if docker info >/dev/null 2>&1; then - tput setaf 4; + tput setaf 2; echo "Docker is running." else tput setaf 1; @@ -109,9 +113,23 @@ fi echo " " tput setaf 4; echo "#########################################################################" -echo "Installing reNgine" +echo "Installing reNgine, please be patient it could take a while" echo "#########################################################################" -make certs && make build && make up && tput setaf 2 && echo "reNgine is installed!!!" && failed=0 || failed=1 + + +echo " " +tput setaf 5; +echo "=========================================================================" +echo "Generating certificates and building docker images" +echo "=========================================================================" +make certs && make build && failed=0 || failed=1 + +echo " " +tput setaf 5; +echo "=========================================================================" +echo "Docker containers starting, please wait celery container could be long" +echo "=========================================================================" +make up && tput setaf 2 && echo "reNgine is installed!!!" && failed=0 || failed=1 if [ "${failed}" -eq 0 ]; then sleep 3 From 8bfda54996895f8238d2effd6a66318ea6479b3b Mon Sep 17 00:00:00 2001 From: psyray Date: Wed, 22 May 2024 15:51:15 +0200 Subject: [PATCH 199/262] Bump release version to 2.1.0 --- docker-compose.dev.yml | 2 +- docker-compose.yml | 2 +- web/art/reNgine.txt | 2 +- web/dashboard/templates/dashboard/index.html | 2 +- web/templates/base/_items/top_bar.html | 8 ++++---- web/templates/base/login.html | 2 +- 6 files changed, 9 insertions(+), 9 deletions(-) diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index ffd733e3..48fec819 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -98,7 +98,7 @@ services: - POSTGRES_HOST=${POSTGRES_HOST} # THIS IS A MUST FOR CHECKING UPDATE, EVERYTIME A COMMIT IS MERGED INTO # MASTER, UPDATE THIS!!! MAJOR.MINOR.PATCH https://semver.org/ - - RENGINE_CURRENT_VERSION='2.0.6' + - RENGINE_CURRENT_VERSION='2.1.0' volumes: - ./web:/usr/src/app - github_repos:/usr/src/github diff --git a/docker-compose.yml b/docker-compose.yml index 3d2dc052..3e836f6a 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -99,7 +99,7 @@ services: - DJANGO_SUPERUSER_PASSWORD=${DJANGO_SUPERUSER_PASSWORD} # THIS IS A MUST FOR CHECKING UPDATE, EVERYTIME A COMMIT IS MERGED INTO # MASTER, UPDATE THIS!!! MAJOR.MINOR.PATCH https://semver.org/ - - RENGINE_CURRENT_VERSION='2.0.6' + - RENGINE_CURRENT_VERSION='2.1.0' volumes: - ./web:/usr/src/app - github_repos:/usr/src/github diff --git a/web/art/reNgine.txt b/web/art/reNgine.txt index 822ca95b..a6898d56 100644 --- a/web/art/reNgine.txt +++ b/web/art/reNgine.txt @@ -3,6 +3,6 @@ _ __ ___| \| | __ _ _ _ __ ___ | '__/ _ \ . ` |/ _` | | '_ \ / _ \ | | | __/ |\ | (_| | | | | | __/ - |_| \___|_| \_|\__, |_|_| |_|\___| v2.0.6-jasper + |_| \___|_| \_|\__, |_|_| |_|\___| v2.1.0-jasper __/ | |___/ diff --git a/web/dashboard/templates/dashboard/index.html b/web/dashboard/templates/dashboard/index.html index 3f5f8356..91620f7d 100644 --- a/web/dashboard/templates/dashboard/index.html +++ b/web/dashboard/templates/dashboard/index.html @@ -17,7 +17,7 @@ {% endblock custom_js_css_link %} {% block breadcrumb_title %} -reNgine 2.0.6 +reNgine 2.1.0 {% endblock breadcrumb_title %} {% block main_content %} diff --git a/web/templates/base/_items/top_bar.html b/web/templates/base/_items/top_bar.html index d061b56d..39703c35 100644 --- a/web/templates/base/_items/top_bar.html +++ b/web/templates/base/_items/top_bar.html @@ -170,18 +170,18 @@
Welcome {{user.get_username}}!
diff --git a/web/templates/base/login.html b/web/templates/base/login.html index 47ae02c0..5088860a 100644 --- a/web/templates/base/login.html +++ b/web/templates/base/login.html @@ -58,7 +58,7 @@

Login to reNgine

-

Current release: v2.0.6

+

Current release: v2.1.0

From 30869d82a7b273c485b098edbc00acda4b26fe50 Mon Sep 17 00:00:00 2001 From: psyray Date: Sat, 17 Aug 2024 01:43:57 +0200 Subject: [PATCH 219/262] fix(ui): fix some forgotten reNgine-ng names --- web/dashboard/templates/dashboard/onboarding.html | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/web/dashboard/templates/dashboard/onboarding.html b/web/dashboard/templates/dashboard/onboarding.html index e171f52e..e34b8dd8 100644 --- a/web/dashboard/templates/dashboard/onboarding.html +++ b/web/dashboard/templates/dashboard/onboarding.html @@ -3,7 +3,7 @@ - reNgine Onboarding + reNgine-ng Onboarding @@ -17,8 +17,8 @@ {% csrf_token %}
-

Hey {{user.username}}! Welcome to reNgine

-

You will need to create your first project before you start using reNgine. Projects are now a part of reNgine 2.0! Learn more about projects.

+

Hey {{user.username}}! Welcome to reNgine-ng

+

You will need to create your first project before you start using reNgine-ng. Projects are now a part of reNgine-ng 2.0! Learn more about projects.

{% if error %}
{{error}} From 5e0d68d9627432a3d2c6e6528fcad8830652f2f9 Mon Sep 17 00:00:00 2001 From: psyray Date: Sat, 17 Aug 2024 19:09:09 +0200 Subject: [PATCH 220/262] feat(update): improve update script due to prebuilt image change --- scripts/update.sh | 117 ++++++++++++++++++++++++++++++++++++++++++++++ update.sh | 38 --------------- 2 files changed, 117 insertions(+), 38 deletions(-) create mode 100755 scripts/update.sh delete mode 100755 update.sh diff --git a/scripts/update.sh b/scripts/update.sh new file mode 100755 index 00000000..f3c55d25 --- /dev/null +++ b/scripts/update.sh @@ -0,0 +1,117 @@ +#!/bin/bash + +# Define color codes. +# Using `tput setaf` at some places because the variable only works with log/echo + +COLOR_BLACK=0 +COLOR_RED=1 +COLOR_GREEN=2 +COLOR_YELLOW=3 +COLOR_BLUE=4 +COLOR_MAGENTA=5 +COLOR_CYAN=6 +COLOR_WHITE=7 +COLOR_DEFAULT=$COLOR_WHITE # Use white as default for clarity + +# Log messages in different colors +log() { + local color=${2:-$COLOR_DEFAULT} # Use default color if $2 is not set + if [ "$color" -ne $COLOR_DEFAULT ]; then + tput setaf "$color" + fi + printf "$1\r\n" + tput sgr0 # Reset text color +} + +# Function to compare version strings +version_compare() { + if [[ $1 == $2 ]] + then + return 0 + fi + local IFS=. + local i ver1=($1) ver2=($2) + for ((i=${#ver1[@]}; i<${#ver2[@]}; i++)) + do + ver1[i]=0 + done + for ((i=0; i<${#ver1[@]}; i++)) + do + if [[ -z ${ver2[i]} ]] + then + ver2[i]=0 + fi + if ((10#${ver1[i]} > 10#${ver2[i]})) + then + return 1 + fi + if ((10#${ver1[i]} < 10#${ver2[i]})) + then + return 2 + fi + done + return 0 +} + +# Get current version +CURRENT_VERSION=$(cat ../web/reNgine/version.txt) + +# Get latest release version from GitHub +LATEST_VERSION=$(curl -s https://api.github.com/repos/Security-Tools-Alliance/rengine-ng/releases/latest | grep '"tag_name":' | sed -E 's/.*"([^"]+)".*/\1/' | sed 's/v//') + +cat ../web/art/reNgine.txt + +log "\n" $COLOR_DEFAULT +log "Current version: $CURRENT_VERSION" $COLOR_CYAN +log "Latest version: $LATEST_VERSION" $COLOR_CYAN +log "\n" $COLOR_DEFAULT + +# Compare versions +version_compare $CURRENT_VERSION $LATEST_VERSION +case $? in + 0) log "You are already on the latest version." $COLOR_GREEN + #exit 0 + ;; + 1) log "Your version is newer than the latest release. No update needed." $COLOR_YELLOW + #exit 0 + ;; + 2) log "An update is available." $COLOR_CYAN + ;; +esac + +read -p "Do you want to update to the latest version? (y/n) " answer + +if [[ $answer == "y" ]]; then + read -p "Do you want to update from prebuilt images or build from source? (prebuilt/source) " install_type + read -p "Do you want to apply your local changes after updating? (y/n) " apply_changes + + cd .. + if [[ $apply_changes == "y" ]]; then + make down && git stash save && git pull && git stash apply + if [[ $install_type == "prebuilt" ]]; then + make pull_up + elif [[ $install_type == "source" ]]; then + make build_up + else + log "Invalid installation type. Update cancelled." $COLOR_RED + exit 1 + fi + log "Successfully updated to version $LATEST_VERSION and local changes have been reapplied" $COLOR_GREEN + elif [[ $apply_changes == "n" ]]; then + make down && git stash && git stash drop && git pull + if [[ $install_type == "prebuilt" ]]; then + make pull_up + elif [[ $install_type == "source" ]]; then + make build_up + else + log "Invalid installation type. Update cancelled." $COLOR_RED + exit 1 + fi + log "Successfully updated to version $LATEST_VERSION" $COLOR_GREEN + else + log "Invalid input. Update cancelled." $COLOR_RED + exit 1 + fi +else + log "Update cancelled." $COLOR_YELLOW +fi \ No newline at end of file diff --git a/update.sh b/update.sh deleted file mode 100755 index 32d95744..00000000 --- a/update.sh +++ /dev/null @@ -1,38 +0,0 @@ -#!/bin/bash - -# Define color codes. -# Using `tput setaf` at some places because the variable only works with log/echo - -COLOR_BLACK=0 -COLOR_RED=1 -COLOR_GREEN=2 -COLOR_YELLOW=3 -COLOR_BLUE=4 -COLOR_MAGENTA=5 -COLOR_CYAN=6 -COLOR_WHITE=7 -COLOR_DEFAULT=$COLOR_WHITE # Use white as default for clarity - -# Log messages in different colors -log() { - local color=${2:-$COLOR_DEFAULT} # Use default color if $2 is not set - if [ "$color" -ne $COLOR_DEFAULT ]; then - tput setaf "$color" - fi - printf "$1\r\n" - tput sgr0 # Reset text color -} - -read -p "Do you want to apply your local changes after updating? (y/n) " answer - -if [[ $answer == "y" ]]; then - make down && git stash save && git pull && git stash apply && make build && make up - tput setaf 2; - echo "Successfully updated" -elif [[ $answer == "n" ]]; then - make down && git stash && git stash drop && git pull && make build && make up - tput setaf 2; - echo "Successfully updated" -else - echo "Invalid input. Please enter 'y' or 'n'." -fi From ebab28388c21115fb0c6dd97bc180d07fae0f347 Mon Sep 17 00:00:00 2001 From: psyray Date: Sun, 18 Aug 2024 21:55:00 +0200 Subject: [PATCH 221/262] fix(cidr): add CIDR import --- web/api/views.py | 27 +++++++++++--------- web/targetApp/templates/target/add.html | 28 ++++++++++++--------- web/targetApp/views.py | 33 +++++++++++++++++++++++-- 3 files changed, 62 insertions(+), 26 deletions(-) diff --git a/web/api/views.py b/web/api/views.py index 6746d878..2cc724d4 100644 --- a/web/api/views.py +++ b/web/api/views.py @@ -4,6 +4,7 @@ from pathlib import Path import socket import subprocess +from ipaddress import IPv4Network import requests import validators @@ -1214,20 +1215,22 @@ def get(self, request): }) try: logger.info(f'Resolving IP address {ip_address} ...') - domain, domains, ips = socket.gethostbyaddr(ip_address) - response = { - 'status': True, - 'ip_address': ip_address, - 'domains': domains or [domain], - 'resolves_to': domain - } - except socket.herror: # ip does not have a PTR record - logger.info(f'No PTR record for {ip_address}') + resolved_ips = [] + for ip in IPv4Network(ip_address, False): + domains = [] + ips = [] + try: + (domain, domains, ips) = socket.gethostbyaddr(str(ip)) + except socket.herror: + logger.info(f'No PTR record for {ip_address}') + domain = str(ip) + if domain not in domains: + domains.append(domain) + resolved_ips.append({'ip': str(ip),'domain': domain, 'domains': domains, 'ips': ips}) response = { 'status': True, - 'ip_address': ip_address, - 'domains': [ip_address], - 'resolves_to': ip_address + 'orig': ip_address, + 'ip_address': resolved_ips, } except Exception as e: logger.exception(e) diff --git a/web/targetApp/templates/target/add.html b/web/targetApp/templates/target/add.html index 87747fd8..ad7c336c 100644 --- a/web/targetApp/templates/target/add.html +++ b/web/targetApp/templates/target/add.html @@ -261,18 +261,22 @@
if (json_data['status']) { // #resolved_domains_div $("#all_domains_checkbox").show(); - $('#resolved_domains_div').append(`${json_data['domains'].length} domains associated with IP Address ${ip_address.value}
`); - $('#resolved_domains_div').append(`Please select the domains to import.`); - $('#resolved_domains_div').append(`
`); - for (var domain in json_data['domains']) { - $('#domains_checkbox').append(` -
-
- - -
-
` - ); + if(Array.isArray(json_data['ip_address'])) { + $('#resolved_domains_div').append(`${json_data['ip_address'].length} domains associated with IP Address ${json_data['orig']}
`); + $('#resolved_domains_div').append(`Please select the domains to import.`); + $('#resolved_domains_div').append(`
`); + json_data['ip_address'].forEach((ip_info, index, array) => { + for (var domain in ip_info['domains']) { + $('#domains_checkbox').append(` +
+
+ + +
+
` + ); + } + }) } swal.close(); // resolved_ip_domains if any is checked, then only enable add button diff --git a/web/targetApp/views.py b/web/targetApp/views.py index ac951202..09067991 100644 --- a/web/targetApp/views.py +++ b/web/targetApp/views.py @@ -93,8 +93,8 @@ def add_target(request, slug): domains.append(target) elif is_range: - ips = get_ips_from_cidr_range(target) - for ip_address in ips: + _ips = get_ips_from_cidr_range(target) + for ip_address in _ips: ips.append(ip_address) domains.append(ip_address) else: @@ -230,6 +230,35 @@ def add_target(request, slug): project=project, insert_date=timezone.now()) organization.domains.add(domain_obj) + elif ip_target: + # add ip's from "resolve and add ip address" tab + resolved_ips = [ip.rstrip() for ip in request.POST.getlist('resolved_ip_domains') if ip] + for ip in resolved_ips: + is_domain = bool(validators.domain(ip)) + is_ip = bool(validators.ipv4(ip)) or bool(validators.ipv6(ip)) + description = request.POST.get('targetDescription', '') + h1_team_handle = request.POST.get('targetH1TeamHandle') + if not Domain.objects.filter(name=ip).exists(): + domain, created = Domain.objects.get_or_create( + name=ip, + description=description, + h1_team_handle=h1_team_handle, + project=project, + ip_address_cidr=ip if is_ip else None) + domain.insert_date = timezone.now() + domain.save() + added_target_count += 1 + if created: + logger.info(f'Added new domain {domain.name}') + if is_ip: + ip_data = get_ip_info(ip) + ip, created = IpAddress.objects.get_or_create(address=ip) + ip.reverse_pointer = ip_data.reverse_pointer + ip.is_private = ip_data.is_private + ip.version = ip_data.version + ip.save() + if created: + logger.info(f'Added new IP {ip}') except Exception as e: logger.exception(e) From 467524175757e1fc36ee002f415ee20ef0275e51 Mon Sep 17 00:00:00 2001 From: Psyray Date: Mon, 19 Aug 2024 19:15:27 +0200 Subject: [PATCH 222/262] Apply text suggestions from code review Co-authored-by: Anonymoussaurus <50231698+AnonymousWP@users.noreply.github.com> --- .env-dist | 7 ++++--- docker/celery/Dockerfile | 2 +- docker/docker-compose.yml | 2 +- install.sh | 4 ++-- 4 files changed, 8 insertions(+), 7 deletions(-) diff --git a/.env-dist b/.env-dist index ef622684..d138cb0a 100644 --- a/.env-dist +++ b/.env-dist @@ -27,17 +27,18 @@ POSTGRES_HOST=db # # Celery Scaling Configurations # The number of CONCURRENCY defines how many scans will run in parallel +# See https://github.com/Security-Tools-Alliance/rengine-ng/wiki/quick#determining-concurrency-values for more information. # Please always keep minimum of 5 # MIN_CONCURRENCY=5 MAX_CONCURRENCY=30 # -# This section is for non-interactive install only +# This section is for non-interactive installations only # -# Rengine install type (prebuilt or source) +# reNgine-ng installation type (prebuilt or source) INSTALL_TYPE=prebuilt -# Rengine web interface super user +# reNgine-ng web interface super user DJANGO_SUPERUSER_USERNAME=rengine DJANGO_SUPERUSER_EMAIL=rengine@example.com DJANGO_SUPERUSER_PASSWORD=Sm7IJG.IfHAFw9snSKv diff --git a/docker/celery/Dockerfile b/docker/celery/Dockerfile index 5ac830f6..21888d18 100644 --- a/docker/celery/Dockerfile +++ b/docker/celery/Dockerfile @@ -4,7 +4,7 @@ FROM --platform=$TARGETPLATFORM debian:12 LABEL \ name="reNgine-ng" \ author="Security-Tools-Alliance (https://github.com/Security-Tools-Alliance) & Yogesh Ojha " \ - description="reNgine-ng is a automated pipeline of recon process, useful for information gathering during web application penetration testing." + description="reNgine-ng is an automated pipeline of recon process, useful for information gathering during web application penetration testing." # Environment Variables ENV DEBIAN_FRONTEND="noninteractive" \ diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index ea9605b8..40549d2d 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -140,7 +140,7 @@ services: target: /etc/nginx/certs/rengine_rsa.key volumes: - ./proxy/config/rengine.conf:/etc/nginx/conf.d/rengine.conf:ro - - ../web/staticfiles:/home/rengine/rengine/staticfiles/ + - ../web/static:/home/rengine/rengine/staticfiles/ - scan_results:/home/rengine/scan_results networks: - rengine_network diff --git a/install.sh b/install.sh index 84a66bd7..5fae9b70 100755 --- a/install.sh +++ b/install.sh @@ -103,7 +103,7 @@ if [ $isNonInteractive = false ]; then ;; esac # Select install type - log "Do you want to build Docker images from source or use pre-built images (recommended)?" $COLOR_RED + log "Do you want to build Docker images from source or use pre-built images (recommended)? This saves significant build time but requires good download speeds for it to complete fast." $COLOR_RED select choice in "From source" "Use pre-built images"; do case $choice in "From source" ) @@ -199,7 +199,7 @@ elif [ "$INSTALL_TYPE" != "prebuilt" ] && [ "$INSTALL_TYPE" != "source" ]; then exit 1 fi -log "Installing reNgine-ng from $INSTALL_TYPE, please be patient as it could take a while..." $COLOR_CYAN +log "Installing reNgine-ng from $INSTALL_TYPE, please be patient as this could take a while..." $COLOR_CYAN sleep 5 log "Generating certificates..." $COLOR_CYAN From 3e9f77fee6a02ee0bc7e730d403f905e65001d0b Mon Sep 17 00:00:00 2001 From: psyray Date: Mon, 19 Aug 2024 19:43:22 +0200 Subject: [PATCH 223/262] fix(docker): pin images version to latest version number --- docker/ollama/Dockerfile | 2 +- docker/proxy/Dockerfile | 2 +- docker/redis/Dockerfile | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docker/ollama/Dockerfile b/docker/ollama/Dockerfile index 24ac8b40..f30cc391 100644 --- a/docker/ollama/Dockerfile +++ b/docker/ollama/Dockerfile @@ -1 +1 @@ -FROM ollama/ollama \ No newline at end of file +FROM ollama/ollama:0.3.6 \ No newline at end of file diff --git a/docker/proxy/Dockerfile b/docker/proxy/Dockerfile index 52d831a6..950908a9 100644 --- a/docker/proxy/Dockerfile +++ b/docker/proxy/Dockerfile @@ -1 +1 @@ -FROM nginx:alpine \ No newline at end of file +FROM nginx:1.27.1-alpine3.20 \ No newline at end of file diff --git a/docker/redis/Dockerfile b/docker/redis/Dockerfile index 01160078..12c2f3d4 100644 --- a/docker/redis/Dockerfile +++ b/docker/redis/Dockerfile @@ -1 +1 @@ -FROM redis:alpine \ No newline at end of file +FROM redis:7.4.0-alpine3.20 \ No newline at end of file From 43af8242bac69afe109933422251d9d147657db0 Mon Sep 17 00:00:00 2001 From: psyray Date: Mon, 19 Aug 2024 20:18:50 +0200 Subject: [PATCH 224/262] fix(install): move the .env check to the non-interactive install --- .env-dist | 2 +- install.sh | 20 ++++++++++---------- scripts/uninstall.sh | 2 +- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/.env-dist b/.env-dist index d138cb0a..a833988d 100644 --- a/.env-dist +++ b/.env-dist @@ -19,7 +19,7 @@ CITY=Atlanta # POSTGRES_DB=rengine POSTGRES_USER=rengine -PGUSER=rengine # same as above +PGUSER=rengine POSTGRES_PASSWORD=hE2a5@K&9nEY1fzgA6X POSTGRES_PORT=5432 POSTGRES_HOST=db diff --git a/install.sh b/install.sh index 5fae9b70..162388cd 100755 --- a/install.sh +++ b/install.sh @@ -48,14 +48,6 @@ usageFunction() exit 1 } -# Check if .env file exists and load vars from env file -if [ -f .env ]; then - export $(grep -v '^#' .env | xargs) -else - log "Error: .env file not found, copy/paste the .env-dist file to .env and edit it" $COLOR_RED - exit 1 -fi - cat web/art/reNgine.txt log "\r\nBefore running this script, please make sure Docker is running and you have made changes to the '.env' file." $COLOR_RED @@ -103,7 +95,7 @@ if [ $isNonInteractive = false ]; then ;; esac # Select install type - log "Do you want to build Docker images from source or use pre-built images (recommended)? This saves significant build time but requires good download speeds for it to complete fast." $COLOR_RED + log "Do you want to build Docker images from source or use pre-built images (recommended)?" $COLOR_RED select choice in "From source" "Use pre-built images"; do case $choice in "From source" ) @@ -118,6 +110,14 @@ fi # Non interactive install if [ $isNonInteractive = true ]; then + # Check if .env file exists and load vars from env file + if [ -f .env ]; then + export $(grep -v '^#' .env | xargs) + else + log "Error: .env file not found, copy/paste the .env-dist file to .env and edit it" $COLOR_RED + exit 1 + fi + if [ -z "$DJANGO_SUPERUSER_USERNAME" ] || [ -z "$DJANGO_SUPERUSER_EMAIL" ] || [ -z "$DJANGO_SUPERUSER_PASSWORD" ]; then log "Error: DJANGO_SUPERUSER_USERNAME, DJANGO_SUPERUSER_EMAIL, and DJANGO_SUPERUSER_PASSWORD must be set in .env for non-interactive installation" $COLOR_RED exit 1 @@ -199,7 +199,7 @@ elif [ "$INSTALL_TYPE" != "prebuilt" ] && [ "$INSTALL_TYPE" != "source" ]; then exit 1 fi -log "Installing reNgine-ng from $INSTALL_TYPE, please be patient as this could take a while..." $COLOR_CYAN +log "Installing reNgine-ng from $INSTALL_TYPE, please be patient as it could take a while..." $COLOR_CYAN sleep 5 log "Generating certificates..." $COLOR_CYAN diff --git a/scripts/uninstall.sh b/scripts/uninstall.sh index dc7fdf9d..5fcd06cd 100755 --- a/scripts/uninstall.sh +++ b/scripts/uninstall.sh @@ -71,7 +71,7 @@ else exit 1 fi -# Lire la version depuis le fichier version.txt +# Read the version from version.txt file RENGINE_VERSION=$(cat ../web/reNgine/version.txt) tput setaf 1; From e32259d867f915b3f5f7ea76377100dd4faef07f Mon Sep 17 00:00:00 2001 From: Anonymoussaurus <50231698+AnonymousWP@users.noreply.github.com> Date: Mon, 19 Aug 2024 20:48:59 +0200 Subject: [PATCH 225/262] build(install): improve wording installation --- install.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/install.sh b/install.sh index 162388cd..3423acb3 100755 --- a/install.sh +++ b/install.sh @@ -199,7 +199,7 @@ elif [ "$INSTALL_TYPE" != "prebuilt" ] && [ "$INSTALL_TYPE" != "source" ]; then exit 1 fi -log "Installing reNgine-ng from $INSTALL_TYPE, please be patient as it could take a while..." $COLOR_CYAN +log "Installing reNgine-ng from $INSTALL_TYPE, please be patient as the installation could take a while..." $COLOR_CYAN sleep 5 log "Generating certificates..." $COLOR_CYAN @@ -215,7 +215,7 @@ if [ "$INSTALL_TYPE" = "prebuilt" ]; then make pull && log "Docker images have been pulled" $COLOR_GREEN || { log "Docker images pull failed!" $COLOR_RED; exit 1; } fi -log "Docker containers starting, please wait as Celery container could take a while..." $COLOR_CYAN +log "Docker containers starting, please wait as starting the Celery container could take a while..." $COLOR_CYAN sleep 5 make up && log "reNgine-ng is started!" $COLOR_GREEN || { log "reNgine-ng start failed!" $COLOR_RED; exit 1; } From 4b059854e2a8d3b99c5fd3284a09d311ab1c3436 Mon Sep 17 00:00:00 2001 From: psyray Date: Mon, 19 Aug 2024 21:38:29 +0200 Subject: [PATCH 226/262] fix(ui): correct bad names --- web/static/custom/toolbox.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/web/static/custom/toolbox.js b/web/static/custom/toolbox.js index 42445c64..203a4f00 100644 --- a/web/static/custom/toolbox.js +++ b/web/static/custom/toolbox.js @@ -34,7 +34,7 @@ function cms_detector(){
- (reNgine uses CMSeeK to detect CMS.) + (reNgine-ng uses CMSeeK to detect CMS.)
@@ -260,7 +260,7 @@ function toolbox_waf_detector(){
- (reNgine uses wafw00f to detect WAF.) + (reNgine-ng uses wafw00f to detect WAF.)
From af950cc06fe59614f691fcb7112503a03d550888 Mon Sep 17 00:00:00 2001 From: psyray <1230954+psyray@users.noreply.github.com> Date: Mon, 19 Aug 2024 21:59:15 +0200 Subject: [PATCH 227/262] fix(install): restore removed text --- install.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/install.sh b/install.sh index 3423acb3..901a243a 100755 --- a/install.sh +++ b/install.sh @@ -95,7 +95,7 @@ if [ $isNonInteractive = false ]; then ;; esac # Select install type - log "Do you want to build Docker images from source or use pre-built images (recommended)?" $COLOR_RED + log "Do you want to build Docker images from source or use pre-built images (recommended)? This saves significant build time but requires good download speeds for it to complete fast." $COLOR_RED select choice in "From source" "Use pre-built images"; do case $choice in "From source" ) From faa9726d8d2ea4f6aef15ee800c78cc3c91ba13f Mon Sep 17 00:00:00 2001 From: Anonymoussaurus <50231698+AnonymousWP@users.noreply.github.com> Date: Tue, 20 Aug 2024 00:25:40 +0200 Subject: [PATCH 228/262] Delete auto-comment.yml --- .github/workflows/auto-comment.yml | 37 ------------------------------ 1 file changed, 37 deletions(-) delete mode 100644 .github/workflows/auto-comment.yml diff --git a/.github/workflows/auto-comment.yml b/.github/workflows/auto-comment.yml deleted file mode 100644 index bb4b5c9d..00000000 --- a/.github/workflows/auto-comment.yml +++ /dev/null @@ -1,37 +0,0 @@ -name: 👋 Auto Comment -on: [issues, pull_request] -jobs: - run: - runs-on: ubuntu-latest - steps: - - uses: bubkoo/auto-comment@v1.1.2 - with: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - issuesOpened: > - 👋 Hi @{{ author }}, - - Issues is only for reporting a bug/feature request. Please read documentation before raising an issue https://rengine.wiki - - For very limited support, questions, and discussions, please join reNgine Discord channel: https://discord.gg/azv6fzhNCE - - Please include all the requested and relevant information when opening a bug report. Improper reports will be closed without any response. - - pullRequestOpened: > - 👋 Hi @{{ author }}, - - Thank you for sending this pull request. - - Please make sure you have followed our [contribution guidelines](https://github.com/yogeshojha/rengine/blob/master/CONTRIBUTING.md). - - We will review this PR as soon as possible. Thank you for your patience. - - pullRequestClosed: > - 🚀 Hi @{{ author }}, - - You are amazing! Thank you for your contributions. Your contributions are what makes reNgine awesome! - - This pull request has now been closed. - - We look forward to your more contributions and support. - - Thanks From 0b1f3d9641bc142f521dfc31beecc0ff784c0419 Mon Sep 17 00:00:00 2001 From: psyray Date: Tue, 20 Aug 2024 03:17:12 +0200 Subject: [PATCH 229/262] fix(ui): restore static files path & remove beat entrypoint useless code --- docker/beat/entrypoint-dev.sh | 12 ++++-------- docker/docker-compose.yml | 2 +- docker/web/entrypoint-dev.sh | 10 ++++------ docker/web/entrypoint.sh | 3 +++ 4 files changed, 12 insertions(+), 15 deletions(-) diff --git a/docker/beat/entrypoint-dev.sh b/docker/beat/entrypoint-dev.sh index 567f7e17..2b0e136c 100755 --- a/docker/beat/entrypoint-dev.sh +++ b/docker/beat/entrypoint-dev.sh @@ -1,18 +1,14 @@ #!/bin/bash if [ "$CELERY_DEBUG" == "1" ]; then - # Django debug toolbar - pip install django-debug-toolbar==4.3.0 - python3 manage.py collectstatic --noinput + export CELERY_LOGLEVEL='debug' fi # Check if remote debugging is enabled and set concurrency to 1 for easier debug if [ "$CELERY_REMOTE_DEBUG" == "1" ]; then - # Live debug - pip install debugpy - - # To debug opened port with netstat - apt install net-tools -y + # Set celery concurrency to 1 because thread processes is hard to debug + export MIN_CONCURRENCY=1 + export MAX_CONCURRENCY=1 fi /entrypoint.sh \ No newline at end of file diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 40549d2d..ea9605b8 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -140,7 +140,7 @@ services: target: /etc/nginx/certs/rengine_rsa.key volumes: - ./proxy/config/rengine.conf:/etc/nginx/conf.d/rengine.conf:ro - - ../web/static:/home/rengine/rengine/staticfiles/ + - ../web/staticfiles:/home/rengine/rengine/staticfiles/ - scan_results:/home/rengine/scan_results networks: - rengine_network diff --git a/docker/web/entrypoint-dev.sh b/docker/web/entrypoint-dev.sh index 0692112f..8721143f 100755 --- a/docker/web/entrypoint-dev.sh +++ b/docker/web/entrypoint-dev.sh @@ -1,11 +1,9 @@ #!/bin/bash -if [ "$UI_DEBUG" == "1" ]; then - # Collect static files for development - poetry run -C $HOME/ python3 manage.py collectstatic --noinput -fi +# Collect static files +poetry run -C $HOME/ python3 manage.py collectstatic --noinput -# Run development server -poetry run -C $HOME/ python3 manage.py runserver 0.0.0.0:8000 +# Run production server +poetry run -C $HOME/ gunicorn reNgine.wsgi:application -w 8 --bind 0.0.0.0:8000 --limit-request-line 0 exec "$@" \ No newline at end of file diff --git a/docker/web/entrypoint.sh b/docker/web/entrypoint.sh index 86b00742..8721143f 100755 --- a/docker/web/entrypoint.sh +++ b/docker/web/entrypoint.sh @@ -1,5 +1,8 @@ #!/bin/bash +# Collect static files +poetry run -C $HOME/ python3 manage.py collectstatic --noinput + # Run production server poetry run -C $HOME/ gunicorn reNgine.wsgi:application -w 8 --bind 0.0.0.0:8000 --limit-request-line 0 From c75f637cc40f9389c6b86059866712d9018a3ec1 Mon Sep 17 00:00:00 2001 From: psyray Date: Tue, 20 Aug 2024 04:41:39 +0200 Subject: [PATCH 230/262] fix(ui): fix subdomain import with suffix more than 4 chars --- web/reNgine/common_func.py | 29 +++++++++++++++++++++++++++-- web/reNgine/tasks.py | 2 +- 2 files changed, 28 insertions(+), 3 deletions(-) diff --git a/web/reNgine/common_func.py b/web/reNgine/common_func.py index 59b592f6..1ef356ab 100644 --- a/web/reNgine/common_func.py +++ b/web/reNgine/common_func.py @@ -17,6 +17,8 @@ from celery.utils.log import get_task_logger from discord_webhook import DiscordEmbed, DiscordWebhook from django.db.models import Q +from django.core.validators import URLValidator +from django.core.exceptions import ValidationError from reNgine.common_serializers import * from reNgine.definitions import * @@ -424,6 +426,12 @@ def get_subdomain_from_url(url): url_obj = urlparse(url.strip()) return url_obj.netloc.split(':')[0] +def is_valid_domain_or_subdomain(domain): + try: + URLValidator(schemes=['http', 'https'])('http://' + domain) + return True + except ValidationError: + return False def get_domain_from_subdomain(subdomain): """Get domain from subdomain. @@ -434,9 +442,26 @@ def get_domain_from_subdomain(subdomain): Returns: str: Domain name. """ - ext = tldextract.extract(subdomain) - return '.'.join(ext[1:3]) + if not is_valid_domain_or_subdomain: + return None + + # Use tldextract to parse the subdomain + extracted = tldextract.extract(subdomain) + + # if tldextract recognized the tld then its the final result + if extracted.suffix: + domain = f"{extracted.domain}.{extracted.suffix}" + else: + # Fallback method for unknown TLDs, like .clouds or .local etc + parts = subdomain.split('.') + if len(parts) >= 2: + domain = '.'.join(parts[-2:]) + else: + return None + + # Validate the domain before returning + return domain if is_valid_domain_or_subdomain else None def sanitize_url(http_url): """Removes HTTP ports 80 and 443 from HTTP URL because it's ugly. diff --git a/web/reNgine/tasks.py b/web/reNgine/tasks.py index d3ed8e2c..c1c32dd3 100644 --- a/web/reNgine/tasks.py +++ b/web/reNgine/tasks.py @@ -4798,7 +4798,7 @@ def save_imported_subdomains(subdomains, ctx={}): # Validate each subdomain and de-duplicate entries subdomains = list(set([ subdomain for subdomain in subdomains - if validators.domain(subdomain) and domain.name == get_domain_from_subdomain(subdomain) + if domain.name == get_domain_from_subdomain(subdomain) ])) if not subdomains: return From 0cc542e4d3ab5a2e5602ca977689f75ae9841de6 Mon Sep 17 00:00:00 2001 From: psyray Date: Tue, 20 Aug 2024 04:55:37 +0200 Subject: [PATCH 231/262] fix(scan): add missing parameter --- web/reNgine/common_func.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/web/reNgine/common_func.py b/web/reNgine/common_func.py index 1ef356ab..1f8b7969 100644 --- a/web/reNgine/common_func.py +++ b/web/reNgine/common_func.py @@ -443,7 +443,7 @@ def get_domain_from_subdomain(subdomain): str: Domain name. """ - if not is_valid_domain_or_subdomain: + if not is_valid_domain_or_subdomain(subdomain): return None # Use tldextract to parse the subdomain @@ -461,7 +461,7 @@ def get_domain_from_subdomain(subdomain): return None # Validate the domain before returning - return domain if is_valid_domain_or_subdomain else None + return domain if is_valid_domain_or_subdomain(subdomain) else None def sanitize_url(http_url): """Removes HTTP ports 80 and 443 from HTTP URL because it's ugly. From 9127bbcbf8557aad4515eaab141d09d1fc8fef8e Mon Sep 17 00:00:00 2001 From: psyray Date: Tue, 20 Aug 2024 17:44:43 +0200 Subject: [PATCH 232/262] fix(conflicts): fix merge conflicts for branch release/2.1.0 --- .github/release.yml | 1 + ...ose-issues-on-pr-merge-to-release-branch.yml | 17 +++++++++++++++++ .github/workflows/release.yml | 2 ++ .../scanEngine/_items/form_engine.html | 2 +- 4 files changed, 21 insertions(+), 1 deletion(-) create mode 100644 .github/workflows/close-issues-on-pr-merge-to-release-branch.yml diff --git a/.github/release.yml b/.github/release.yml index 668fa9af..4bdce000 100644 --- a/.github/release.yml +++ b/.github/release.yml @@ -11,3 +11,4 @@ changelog: - refactor - dependencies - documentation + - ci diff --git a/.github/workflows/close-issues-on-pr-merge-to-release-branch.yml b/.github/workflows/close-issues-on-pr-merge-to-release-branch.yml new file mode 100644 index 00000000..b8265eb9 --- /dev/null +++ b/.github/workflows/close-issues-on-pr-merge-to-release-branch.yml @@ -0,0 +1,17 @@ +name: Close issues on PR merge to release branch + +on: + pull_request: + types: + - closed + +jobs: + close-related-issues: + runs-on: ubuntu-latest + if: github.event.pull_request.merged == true && startsWith(github.ref, 'refs/heads/release/') + steps: + - name: Close linked issues + uses: peter-evans/close-issue@v3 + with: + issue-number: ${{ github.event.pull_request.body }} + comment: "This issue is being closed because the related PR has been merged into a release branch." diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 1a6246da..9b816cd5 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -11,6 +11,8 @@ jobs: release: if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') runs-on: ubuntu-latest + permissions: + contents: write steps: - uses: actions/checkout@v4 - name: Create release diff --git a/web/scanEngine/templates/scanEngine/_items/form_engine.html b/web/scanEngine/templates/scanEngine/_items/form_engine.html index a22f6dca..958f5c8c 100644 --- a/web/scanEngine/templates/scanEngine/_items/form_engine.html +++ b/web/scanEngine/templates/scanEngine/_items/form_engine.html @@ -116,7 +116,7 @@

YAML Configurations

reNgine supports YAML based configurations for customizing the scan. The default settings are given below, you may choose to proceed with the default settings or configure it according to your choice.

reNgine YAML Documentation
- To learn more about YAML config visit the official documentation at https://rengine.wiki/pentester/scan_engine/ + To learn more about YAML config visit the official documentation at https://github.com/Security-Tools-Alliance/rengine-ng/wiki/scan_engine

Note: Invalid YAML configuration may crash scans.
From 5b2f06e8a153bfcdea3bf8c4334cd47d8c76fc05 Mon Sep 17 00:00:00 2001 From: Anonymoussaurus <50231698+AnonymousWP@users.noreply.github.com> Date: Thu, 22 Aug 2024 13:18:44 +0200 Subject: [PATCH 233/262] build(ci): extract issue number from PR body --- .../close-issues-on-pr-merge-to-release-branch.yml | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/.github/workflows/close-issues-on-pr-merge-to-release-branch.yml b/.github/workflows/close-issues-on-pr-merge-to-release-branch.yml index b8265eb9..0233308d 100644 --- a/.github/workflows/close-issues-on-pr-merge-to-release-branch.yml +++ b/.github/workflows/close-issues-on-pr-merge-to-release-branch.yml @@ -10,8 +10,14 @@ jobs: runs-on: ubuntu-latest if: github.event.pull_request.merged == true && startsWith(github.ref, 'refs/heads/release/') steps: + - name: Extract issue number + id: extract_issue_number + run: | + issue_number=$(echo "${{ github.event.pull_request.body }}" | grep -oE '#[0-9]+' | head -n 1 | tr -d '#') + echo "ISSUE_NUMBER=$issue_number" >> $GITHUB_ENV + - name: Close linked issues uses: peter-evans/close-issue@v3 with: - issue-number: ${{ github.event.pull_request.body }} + issue-number: ${{ env.ISSUE_NUMBER }} comment: "This issue is being closed because the related PR has been merged into a release branch." From f03063202b1584b4cd63e590076724b8c933e38b Mon Sep 17 00:00:00 2001 From: psyray Date: Sat, 24 Aug 2024 22:51:41 +0200 Subject: [PATCH 234/262] docker(install): add ARM support for Celery Dockerfile --- docker/celery/Dockerfile | 83 +++++++++++++++++++++++++++------------- 1 file changed, 56 insertions(+), 27 deletions(-) diff --git a/docker/celery/Dockerfile b/docker/celery/Dockerfile index 21888d18..a5d8932d 100644 --- a/docker/celery/Dockerfile +++ b/docker/celery/Dockerfile @@ -62,12 +62,20 @@ RUN cd /root && wget https://www.python.org/ftp/python/3.10.0/Python-3.10.0.tgz make -j4 && \ make altinstall -# Download and install go 1.21.4 -RUN wget https://golang.org/dl/go1.21.4.linux-amd64.tar.gz && \ - tar -xvf go1.21.4.linux-amd64.tar.gz && \ - rm go1.21.4.linux-amd64.tar.gz && \ - mv go /usr/local - +# Download and install go +RUN ARCH=$(dpkg --print-architecture) && \ + if [ "${ARCH}" = "arm64" ]; then \ + wget https://go.dev/dl/${go_arm} && \ + tar -xvf ${go_arm} -C /usr/local/ && \ + rm ${go_arm}; \ + elif [ "${ARCH}" = "amd64" ]; then \ + wget https://go.dev/dl/${go_amd} && \ + tar -xvf ${go_amd} -C /usr/local/ && \ + rm ${go_amd}; \ + else \ + echo "Unknown architecture: $ARCH" ; \ + exit 1; \ + fi USER $USERNAME WORKDIR /home/$USERNAME @@ -82,23 +90,32 @@ ENV PATH="${PATH}:${GOROOT}/bin:${GOPATH}/bin:${PIPX_BIN_DIR}" RUN mkdir -p $TOOLPATH/.github # Download Go packages -RUN printf "github.com/jaeles-project/gospider@v1.1.6\n \ - github.com/tomnomnom/gf@dcd4c361f9f5ba302294ed38b8ce278e8ba69006\n \ - github.com/tomnomnom/unfurl@v0.4.3\n \ - github.com/tomnomnom/waybackurls@v0.1.0\n \ - github.com/projectdiscovery/httpx/cmd/httpx@v1.6.0\n \ - github.com/projectdiscovery/subfinder/v2/cmd/subfinder@v2.6.6\n \ - github.com/projectdiscovery/nuclei/v3/cmd/nuclei@v3.2.6\n \ - github.com/projectdiscovery/naabu/v2/cmd/naabu@v2.3.0\n \ - github.com/hakluke/hakrawler@2.1\n \ - github.com/lc/gau/v2/cmd/gau@v2.2.1\n \ - github.com/owasp-amass/amass/v4/...@v4.2.0\n \ - github.com/ffuf/ffuf@v2.1.0\n \ - github.com/projectdiscovery/tlsx/cmd/tlsx@v1.1.6\n \ - github.com/hahwul/dalfox/v2@v2.9.2\n \ - github.com/projectdiscovery/katana/cmd/katana@v1.1.0\n \ - github.com/dwisiswant0/crlfuzz/cmd/crlfuzz@v1.4.1\n \ - github.com/sa7mon/s3scanner@c544f1cf00f70cae3f2155b24d336f515b7c598b\n" | xargs -L1 go install -ldflags="-s -w" -v || true && chmod 700 -R $GOPATH/pkg/* && rm -rf $GOPATH/pkg/* && rm -rf /home/$USERNAME/.cache/go-build/* +RUN ARCH=$(dpkg --print-architecture) \ + && if [ "$ARCH" = "arm64" ] || [ "$ARCH" = "amd64" ]; then \ + GOARCH=$ARCH go install -v github.com/jaeles-project/gospider@v1.1.6 \ + && GOARCH=$ARCH go install -ldflags="-s -w" -v github.com/tomnomnom/gf@dcd4c361f9f5ba302294ed38b8ce278e8ba69006 \ + && GOARCH=$ARCH go install -ldflags="-s -w" -v github.com/tomnomnom/unfurl@v0.4.3 \ + && GOARCH=$ARCH go install -ldflags="-s -w" -v github.com/tomnomnom/waybackurls@v0.1.0 \ + && GOARCH=$ARCH go install -ldflags="-s -w" -v github.com/projectdiscovery/httpx/cmd/httpx@v1.6.0 \ + && GOARCH=$ARCH go install -ldflags="-s -w" -v github.com/projectdiscovery/subfinder/v2/cmd/subfinder@v2.6.6 \ + && GOARCH=$ARCH go install -ldflags="-s -w" -v github.com/projectdiscovery/nuclei/v3/cmd/nuclei@v3.2.6 \ + && GOARCH=$ARCH go install -ldflags="-s -w" -v github.com/projectdiscovery/naabu/v2/cmd/naabu@v2.3.0 \ + && GOARCH=$ARCH go install -ldflags="-s -w" -v github.com/hakluke/hakrawler@2.1 \ + && GOARCH=$ARCH go install -ldflags="-s -w" -v github.com/lc/gau/v2/cmd/gau@v2.2.1 \ + && GOARCH=$ARCH go install -ldflags="-s -w" -v github.com/owasp-amass/amass/v4/...@v4.2.0 \ + && GOARCH=$ARCH go install -ldflags="-s -w" -v github.com/ffuf/ffuf@v2.1.0 \ + && GOARCH=$ARCH go install -ldflags="-s -w" -v github.com/projectdiscovery/tlsx/cmd/tlsx@v1.1.6 \ + && GOARCH=$ARCH go install -ldflags="-s -w" -v github.com/hahwul/dalfox/v2@v2.9.2 \ + && GOARCH=$ARCH go install -ldflags="-s -w" -v github.com/projectdiscovery/katana/cmd/katana@v1.1.0 \ + && GOARCH=$ARCH go install -ldflags="-s -w" -v github.com/dwisiswant0/crlfuzz/cmd/crlfuzz@v1.4.1 \ + && GOARCH=$ARCH go install -ldflags="-s -w" -v github.com/sa7mon/s3scanner@c544f1cf00f70cae3f2155b24d336f515b7c598b \ + && chmod 700 -R $GOPATH/pkg/* \ + && rm -rf $GOPATH/pkg/* \ + && rm -rf /home/$USERNAME/.cache/go-build/*; \ + else \ + echo "Unknown architecture: $ARCH" ; \ + exit 1; \ + fi # Set environment variables ENV PYTHONDONTWRITEBYTECODE 1 @@ -113,11 +130,23 @@ RUN python3.10 -m pip install pipx && pipx ensurepath && printf "poetry\n\ git+https://github.com/EnableSecurity/wafw00f@ae6a67f23c7bc7fd913d5a32d9b81efefefa2da4\n\ h8mail\n" | xargs -L1 pipx install || true +# Download and install geckodriver +RUN ARCH=$(dpkg --print-architecture) && \ +if [ "${ARCH}" = "arm64" ]; then \ + wget https://github.com/mozilla/geckodriver/releases/download/v${version}/${geckodriver_arm} && \ + tar -xvf ${geckodriver_arm} -C /usr/bin/ && \ + rm ${geckodriver_arm}; \ +elif [ "${ARCH}" = "amd64" ]; then \ + wget https://github.com/mozilla/geckodriver/releases/download/v${version}/${geckodriver_amd} && \ + tar -xvf ${geckodriver_amd} -C /usr/bin/ && \ + rm ${geckodriver_amd}; \ +else \ + echo "Unknown architecture: $ARCH" ; \ + exit 1; \ +fi + # Install tools -RUN wget https://github.com/mozilla/geckodriver/releases/download/v0.32.0/geckodriver-v0.32.0-linux64.tar.gz && \ - tar -xvf geckodriver-v0.32.0-linux64.tar.gz && \ - rm geckodriver-v0.32.0-linux64.tar.gz && \ - mv geckodriver /home/$USERNAME/.local/bin && \ +RUN mv geckodriver /home/$USERNAME/.local/bin && \ cd $TOOLPATH/.github && git clone https://github.com/shmilylty/OneForAll.git && cd OneForAll && git reset --hard 9ecfda229199ebf30d9338f4c88cbeb7c40e16c2 && \ cd $TOOLPATH/.github && git clone https://github.com/FortyNorthSecurity/EyeWitness.git && cd EyeWitness && git reset --hard ac0c7c0e2e11ff23af0a2cca708afd26ece94096 && \ cd $TOOLPATH/.github && git clone https://github.com/UnaPibaGeek/ctfr.git && cd ctfr && git reset --hard 6c7fecdc6346c4f5322049e38f415d5bddaa420d && \ From 3ae4219d645b002be685a276913b7ad72e617315 Mon Sep 17 00:00:00 2001 From: psyray Date: Sun, 25 Aug 2024 19:30:05 +0200 Subject: [PATCH 235/262] build(docker): refactor Dockerfile to provide arm64 support --- docker/celery/Dockerfile | 117 ++++++++++++++++++++------------------- 1 file changed, 59 insertions(+), 58 deletions(-) diff --git a/docker/celery/Dockerfile b/docker/celery/Dockerfile index a5d8932d..2c078228 100644 --- a/docker/celery/Dockerfile +++ b/docker/celery/Dockerfile @@ -1,4 +1,4 @@ -FROM --platform=$TARGETPLATFORM debian:12 +FROM debian:12 # Labels and Credits LABEL \ @@ -34,6 +34,7 @@ RUN apt update -y && apt install -y \ nmap \ net-tools \ htop \ + firefox-esr \ fontconfig fonts-freefont-ttf fonts-noto fonts-terminus RUN fc-cache -f && \ @@ -45,49 +46,64 @@ RUN addgroup --gid 1000 --system $USERNAME && \ adduser --gid 1000 --system --shell /bin/false --disabled-password --uid 1000 --home /home/$USERNAME $USERNAME && \ chown $USERNAME:$USERNAME /home/$USERNAME -RUN wget -q https://packages.mozilla.org/apt/repo-signing-key.gpg -O- | tee /etc/apt/keyrings/packages.mozilla.org.asc && \ - gpg -n -q --import --import-options import-show /etc/apt/keyrings/packages.mozilla.org.asc | awk '/pub/{getline; gsub(/^ +| +$/,""); print "\n"$0"\n"}' && \ - echo "deb [signed-by=/etc/apt/keyrings/packages.mozilla.org.asc] https://packages.mozilla.org/apt mozilla main" | tee -a /etc/apt/sources.list.d/mozilla.list && \ - echo '\ -Package: *\ -Pin: origin packages.mozilla.org\ -Pin-Priority: 1000\ -' | tee /etc/apt/preferences.d/mozilla && apt update -y && apt install -y firefox - -RUN cd /root && wget https://www.python.org/ftp/python/3.10.0/Python-3.10.0.tgz && \ - tar -xvf Python-3.10.0.tgz && \ - rm Python-3.10.0.tgz && \ - cd Python-3.10.0 && \ - ./configure --enable-optimizations && \ - make -j4 && \ - make altinstall +# Download and install geckodriver +RUN ARCH=$(dpkg --print-architecture) && \ + version=0.35.0 && \ + geckodriver_arm="geckodriver-v${version}-linux-aarch64.tar.gz" && \ + geckodriver_amd="geckodriver-v${version}-linux64.tar.gz" && \ + if [ "${ARCH}" = "arm64" ]; then \ + wget "https://github.com/mozilla/geckodriver/releases/download/v${version}/${geckodriver_arm}" && \ + tar -xvf "${geckodriver_arm}" -C /usr/local/bin/ && \ + rm "${geckodriver_arm}"; \ + elif [ "${ARCH}" = "amd64" ]; then \ + wget "https://github.com/mozilla/geckodriver/releases/download/v${version}/${geckodriver_amd}" && \ + tar -xvf "${geckodriver_amd}" -C /usr/local/bin/ && \ + rm "${geckodriver_amd}"; \ + else \ + echo "Unknown architecture: $ARCH" && \ + exit 1; \ + fi # Download and install go RUN ARCH=$(dpkg --print-architecture) && \ + #GO_VERSION=$(curl -s https://go.dev/VERSION?m=text) && \ + GO_VERSION=1.23.0 && \ if [ "${ARCH}" = "arm64" ]; then \ - wget https://go.dev/dl/${go_arm} && \ - tar -xvf ${go_arm} -C /usr/local/ && \ - rm ${go_arm}; \ + wget https://go.dev/dl/go${GO_VERSION}.linux-arm64.tar.gz && \ + tar -xvf go${GO_VERSION}.linux-arm64.tar.gz -C /usr/local/ && \ + rm go${GO_VERSION}.linux-arm64.tar.gz; \ elif [ "${ARCH}" = "amd64" ]; then \ - wget https://go.dev/dl/${go_amd} && \ - tar -xvf ${go_amd} -C /usr/local/ && \ - rm ${go_amd}; \ + wget https://go.dev/dl/go${GO_VERSION}.linux-amd64.tar.gz && \ + tar -xvf go${GO_VERSION}.linux-amd64.tar.gz -C /usr/local/ && \ + rm go${GO_VERSION}.linux-amd64.tar.gz; \ else \ echo "Unknown architecture: $ARCH" ; \ exit 1; \ fi +# Install python 3.10 +RUN cd /root && wget https://www.python.org/ftp/python/3.10.0/Python-3.10.0.tgz && \ + tar -xvf Python-3.10.0.tgz && \ + rm Python-3.10.0.tgz && \ + cd Python-3.10.0 && \ + ./configure --enable-optimizations && \ + make -j4 && \ + make altinstall + USER $USERNAME WORKDIR /home/$USERNAME ENV TOOLPATH="/home/${USERNAME}/tools" +ENV BINPATH="/home/${USERNAME}/.local/bin" ENV WORDLISTPATH="/home/${USERNAME}/wordlists" ENV PIPX_BIN_DIR="${TOOLPATH}/pipx" ENV GOROOT="/usr/local/go" ENV GOPATH="${TOOLPATH}/go" ENV PATH="${PATH}:${GOROOT}/bin:${GOPATH}/bin:${PIPX_BIN_DIR}" -RUN mkdir -p $TOOLPATH/.github +RUN mkdir -p $TOOLPATH/.github && \ + mkdir -p $BINPATH + # Download Go packages RUN ARCH=$(dpkg --print-architecture) \ @@ -100,10 +116,10 @@ RUN ARCH=$(dpkg --print-architecture) \ && GOARCH=$ARCH go install -ldflags="-s -w" -v github.com/projectdiscovery/subfinder/v2/cmd/subfinder@v2.6.6 \ && GOARCH=$ARCH go install -ldflags="-s -w" -v github.com/projectdiscovery/nuclei/v3/cmd/nuclei@v3.2.6 \ && GOARCH=$ARCH go install -ldflags="-s -w" -v github.com/projectdiscovery/naabu/v2/cmd/naabu@v2.3.0 \ - && GOARCH=$ARCH go install -ldflags="-s -w" -v github.com/hakluke/hakrawler@2.1 \ + && GOARCH=$ARCH go install -ldflags="-s -w" -v github.com/hakluke/hakrawler@latest \ && GOARCH=$ARCH go install -ldflags="-s -w" -v github.com/lc/gau/v2/cmd/gau@v2.2.1 \ && GOARCH=$ARCH go install -ldflags="-s -w" -v github.com/owasp-amass/amass/v4/...@v4.2.0 \ - && GOARCH=$ARCH go install -ldflags="-s -w" -v github.com/ffuf/ffuf@v2.1.0 \ + && GOARCH=$ARCH go install -ldflags="-s -w" -v github.com/ffuf/ffuf/v2@v2.1.0 \ && GOARCH=$ARCH go install -ldflags="-s -w" -v github.com/projectdiscovery/tlsx/cmd/tlsx@v1.1.6 \ && GOARCH=$ARCH go install -ldflags="-s -w" -v github.com/hahwul/dalfox/v2@v2.9.2 \ && GOARCH=$ARCH go install -ldflags="-s -w" -v github.com/projectdiscovery/katana/cmd/katana@v1.1.0 \ @@ -118,63 +134,48 @@ RUN ARCH=$(dpkg --print-architecture) \ fi # Set environment variables -ENV PYTHONDONTWRITEBYTECODE 1 -ENV PYTHONUNBUFFERED 1 +ENV PYTHONDONTWRITEBYTECODE=1 +ENV PYTHONUNBUFFERED=1 +ENV PATH="${PATH}:${BINPATH}" -ENV PATH="/home/${USERNAME}/.local/bin:${PATH}" +# Install python tools RUN python3.10 -m pip install pipx && pipx ensurepath && printf "poetry\n\ watchdog\n\ https://github.com/aboul3la/Sublist3r/archive/refs/tags/1.1.zip\n\ https://github.com/laramies/theHarvester/archive/refs/tags/4.6.0.zip\n\ git+https://github.com/ncrocfer/whatportis@59a1718bf7c531f2a5a4e213cad0c047ce9c1c94\n\ - git+https://github.com/EnableSecurity/wafw00f@ae6a67f23c7bc7fd913d5a32d9b81efefefa2da4\n\ + git+https://github.com/EnableSecurity/wafw00f@5e5d8e9e5f1b1b6d9b2c1c1f9f9b9b9b9b9b9b9b\n\ h8mail\n" | xargs -L1 pipx install || true -# Download and install geckodriver -RUN ARCH=$(dpkg --print-architecture) && \ -if [ "${ARCH}" = "arm64" ]; then \ - wget https://github.com/mozilla/geckodriver/releases/download/v${version}/${geckodriver_arm} && \ - tar -xvf ${geckodriver_arm} -C /usr/bin/ && \ - rm ${geckodriver_arm}; \ -elif [ "${ARCH}" = "amd64" ]; then \ - wget https://github.com/mozilla/geckodriver/releases/download/v${version}/${geckodriver_amd} && \ - tar -xvf ${geckodriver_amd} -C /usr/bin/ && \ - rm ${geckodriver_amd}; \ -else \ - echo "Unknown architecture: $ARCH" ; \ - exit 1; \ -fi - # Install tools -RUN mv geckodriver /home/$USERNAME/.local/bin && \ +RUN ln -s /usr/local/bin/geckodriver $BINPATH/geckodriver && \ cd $TOOLPATH/.github && git clone https://github.com/shmilylty/OneForAll.git && cd OneForAll && git reset --hard 9ecfda229199ebf30d9338f4c88cbeb7c40e16c2 && \ - cd $TOOLPATH/.github && git clone https://github.com/FortyNorthSecurity/EyeWitness.git && cd EyeWitness && git reset --hard ac0c7c0e2e11ff23af0a2cca708afd26ece94096 && \ + cd $TOOLPATH/.github && git clone https://github.com/FortyNorthSecurity/EyeWitness.git && cd EyeWitness && git reset --hard cb09a842f93109836219b2aa2f9f25c58a34bc8c && \ cd $TOOLPATH/.github && git clone https://github.com/UnaPibaGeek/ctfr.git && cd ctfr && git reset --hard 6c7fecdc6346c4f5322049e38f415d5bddaa420d && \ cd $TOOLPATH/.github && git clone https://github.com/Tuhinshubhra/CMSeeK.git && cd CMSeeK && git reset --hard 20f9780d2e682874be959cfd487045c92e3c73f4 && \ cd $TOOLPATH/.github && git clone https://github.com/GiJ03/Infoga.git && cd Infoga && git reset --hard 6834c6f863c2bdc92cc808934bb293571d1939c1 && \ - cd $TOOLPATH/.github && wget https://github.com/m3n0sd0n4ld/GooFuzz/releases/download/1.2.5/GooFuzz.v.1.2.5.zip && unzip GooFuzz.v.1.2.5.zip && rm GooFuzz.v.1.2.5.zip && mv GooFuzz* GooFuzz && echo "#!/bin/bash\n\nbash $TOOLPATH/.github/GooFuzz/GooFuzz \"\$@\"" > /home/$USERNAME/.local/bin/GooFuzz && chmod +x /home/$USERNAME/.local/bin/GooFuzz && \ - cd $TOOLPATH/.github && git clone https://github.com/1ndianl33t/Gf-Patterns && mkdir -p /home/$USERNAME/.gf/ && cp -r Gf-Patterns/*.json /home/$USERNAME/.gf/ && \ - cd $TOOLPATH/.github && git clone https://github.com/tomnomnom/gf.git && cp -r $TOOLPATH/.github/gf/examples/*.json /home/$USERNAME/.gf/ && \ - mkdir -p /home/$USERNAME/.nmap/ && cd /home/$USERNAME/.nmap/ && git clone https://github.com/scipag/vulscan.git && cd vulscan && git reset --hard 0c793c490455e7907a7c5cbaf3f7210e80d2ee57 && ln -s $TOOLPATH/.github/vulscan /home/$USERNAME/.nmap/vulscan && \ + cd $TOOLPATH/.github && wget https://github.com/m3n0sd0n4ld/GooFuzz/releases/download/1.2.5/GooFuzz.v.1.2.5.zip && unzip GooFuzz.v.1.2.5.zip && rm GooFuzz.v.1.2.5.zip && mv GooFuzz* GooFuzz && echo "#!/bin/bash\n\nbash $TOOLPATH/.github/GooFuzz/GooFuzz \"\$@\"" > $BINPATH/GooFuzz && chmod +x $BINPATH/GooFuzz && \ + cd $TOOLPATH/.github && git clone https://github.com/1ndianl33t/Gf-Patterns && cd Gf-Patterns && git reset --hard 565382db80f001af288b8d71c525a7ce7f17e80d && mkdir -p /home/$USERNAME/.gf/ && cp -r *.json /home/$USERNAME/.gf/ && \ + cd $TOOLPATH/.github && git clone https://github.com/tomnomnom/gf.git && cd gf && git reset --hard dcd4c361f9f5ba302294ed38b8ce278e8ba69006 && cp -r examples/*.json /home/$USERNAME/.gf/ && \ + mkdir -p /home/$USERNAME/.nmap/ && cd /home/$USERNAME/.nmap/ && git clone https://github.com/scipag/vulscan.git && cd vulscan && git reset --hard 2640d62400e9953fb9a33e6033dc59a9dc9606ba && ln -s $TOOLPATH/.github/vulscan /home/$USERNAME/.nmap/vulscan && \ mkdir -p $WORDLISTPATH && \ wget https://raw.githubusercontent.com/maurosoria/dirsearch/master/db/dicc.txt -O $WORDLISTPATH/dicc.txt && \ wget https://raw.githubusercontent.com/danielmiessler/SecLists/master/Fuzzing/fuzz-Bo0oM.txt -O $WORDLISTPATH/fuzz-Bo0oM.txt && \ wget https://raw.githubusercontent.com/danielmiessler/SecLists/master/Discovery/DNS/deepmagic.com-prefixes-top50000.txt -O $WORDLISTPATH/deepmagic.com-prefixes-top50000.txt && \ - mkdir -p /home/$USERNAME/nuclei-templates && wget https://raw.githubusercontent.com/NagliNagli/Shockwave-OSS/bd7445cd320a174d3073f0a61867a40849d28436/ssrf.yaml -O ~/nuclei-templates/ssrf_nagli.yaml && \ + mkdir -p /home/$USERNAME/nuclei-templates && wget https://raw.githubusercontent.com/NagliNagli/Shockwave-OSS/bd7445cd320a174d3073f0a61867a40849d28436/ssrf.yaml -O /home/$USERNAME/nuclei-templates/ssrf_nagli.yaml && \ mkdir -p /home/$USERNAME/results COPY --chown=$USERNAME:$USERNAME ./*.toml /home/$USERNAME # Install aliases for tools needing a pyproject.toml -RUN cd $TOOLPATH/.github/OneForAll && mv /home/$USERNAME/oneforall-pyproject.toml pyproject.toml && poetry env use python3.10 && poetry install --no-cache && echo "#!/bin/bash\n\npoetry -C $TOOLPATH/.github/OneForAll/ run python $TOOLPATH/.github/OneForAll/oneforall.py \"\$@\"" > /home/$USERNAME/.local/bin/oneforall && chmod +x /home/$USERNAME/.local/bin/oneforall && \ - cd $TOOLPATH/.github/ctfr && mv /home/$USERNAME/ctfr-pyproject.toml pyproject.toml && poetry env use python3.10 && poetry install --no-cache && echo "#!/bin/bash\n\npoetry -C $TOOLPATH/.github/ctfr/ run python $TOOLPATH/.github/ctfr/ctfr.py \"\$@\"" > /home/$USERNAME/.local/bin/ctfr && chmod +x /home/$USERNAME/.local/bin/ctfr && \ - cd $TOOLPATH/.github/EyeWitness/Python && mv /home/$USERNAME/eyewitness-pyproject.toml pyproject.toml && poetry env use python3.10 && poetry install --no-cache && echo "#!/bin/bash\n\npoetry -C $TOOLPATH/.github/EyeWitness/Python run python $TOOLPATH/.github/EyeWitness/Python/EyeWitness.py \"\$@\"" > /home/$USERNAME/.local/bin/EyeWitness && chmod +x /home/$USERNAME/.local/bin/EyeWitness && \ - cd $TOOLPATH/.github/CMSeeK && mv /home/$USERNAME/cmseek-pyproject.toml pyproject.toml && poetry env use python3.10 && poetry install --no-cache && echo "#!/bin/bash\n\npoetry -C $TOOLPATH/.github/CMSeeK/ run python $TOOLPATH/.github/CMSeeK/cmseek.py \"\$@\"" > /home/$USERNAME/.local/bin/cmseek && chmod +x /home/$USERNAME/.local/bin/cmseek && \ - cd $TOOLPATH/.github/Infoga && mv /home/$USERNAME/infoga-pyproject.toml pyproject.toml && poetry env use python3.10 && poetry install --no-cache && echo "#!/bin/bash\n\npoetry -C $TOOLPATH/.github/Infoga/ run python $TOOLPATH/.github/Infoga/infoga.py \"\$@\"" > /home/$USERNAME/.local/bin/infoga && chmod +x /home/$USERNAME/.local/bin/infoga && \ +RUN cd $TOOLPATH/.github/OneForAll && mv /home/$USERNAME/oneforall-pyproject.toml pyproject.toml && poetry env use python3.10 && poetry install --no-cache && echo "#!/bin/bash\n\npoetry -C $TOOLPATH/.github/OneForAll/ run python $TOOLPATH/.github/OneForAll/oneforall.py \"\$@\"" > $BINPATH/oneforall && chmod +x $BINPATH/oneforall && \ + cd $TOOLPATH/.github/ctfr && mv /home/$USERNAME/ctfr-pyproject.toml pyproject.toml && poetry env use python3.10 && poetry install --no-cache && echo "#!/bin/bash\n\npoetry -C $TOOLPATH/.github/ctfr/ run python $TOOLPATH/.github/ctfr/ctfr.py \"\$@\"" > $BINPATH/ctfr && chmod +x $BINPATH/ctfr && \ + cd $TOOLPATH/.github/EyeWitness/Python && mv /home/$USERNAME/eyewitness-pyproject.toml pyproject.toml && poetry env use python3.10 && poetry install --no-cache && echo "#!/bin/bash\n\npoetry -C $TOOLPATH/.github/EyeWitness/Python run python $TOOLPATH/.github/EyeWitness/Python/EyeWitness.py \"\$@\"" > $BINPATH/EyeWitness && chmod +x $BINPATH/EyeWitness && \ + cd $TOOLPATH/.github/CMSeeK && mv /home/$USERNAME/cmseek-pyproject.toml pyproject.toml && poetry env use python3.10 && poetry install --no-cache && echo "#!/bin/bash\n\npoetry -C $TOOLPATH/.github/CMSeeK/ run python $TOOLPATH/.github/CMSeeK/cmseek.py \"\$@\"" > $BINPATH/cmseek && chmod +x $BINPATH/cmseek && \ + cd $TOOLPATH/.github/Infoga && mv /home/$USERNAME/infoga-pyproject.toml pyproject.toml && poetry env use python3.10 && poetry install --no-cache && echo "#!/bin/bash\n\npoetry -C $TOOLPATH/.github/Infoga/ run python $TOOLPATH/.github/Infoga/infoga.py \"\$@\"" > $BINPATH/infoga && chmod +x $BINPATH/infoga && \ cd /home/$USERNAME && poetry install COPY ./entrypoint.sh /entrypoint.sh -USER $USERNAME RUN mkdir -p /home/$USERNAME/rengine /home/$USERNAME/scan_results \ && chown -R $USERNAME:$USERNAME /home/$USERNAME/rengine \ && chown -R $USERNAME:$USERNAME /home/$USERNAME/scan_results From 7da5e8229f13cd0113cbd4dacb82c8aa74914adb Mon Sep 17 00:00:00 2001 From: Anonymoussaurus <50231698+AnonymousWP@users.noreply.github.com> Date: Sun, 25 Aug 2024 20:50:45 +0200 Subject: [PATCH 236/262] build(ci): add write permissions --- .../workflows/close-issues-on-pr-merge-to-release-branch.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/close-issues-on-pr-merge-to-release-branch.yml b/.github/workflows/close-issues-on-pr-merge-to-release-branch.yml index 0233308d..e549b066 100644 --- a/.github/workflows/close-issues-on-pr-merge-to-release-branch.yml +++ b/.github/workflows/close-issues-on-pr-merge-to-release-branch.yml @@ -9,6 +9,8 @@ jobs: close-related-issues: runs-on: ubuntu-latest if: github.event.pull_request.merged == true && startsWith(github.ref, 'refs/heads/release/') + permissions: + issues: write steps: - name: Extract issue number id: extract_issue_number From 3719452ec925146e6e1e69151c76fef8c321110b Mon Sep 17 00:00:00 2001 From: psyray Date: Mon, 26 Aug 2024 19:37:56 +0200 Subject: [PATCH 237/262] fix(ui): permit to link tab URL and history back into it --- web/static/assets/js/app.min.js | 1 - web/static/custom/custom.js | 14 ++++++++++++++ web/templates/base/base.html | 9 +++++++++ 3 files changed, 23 insertions(+), 1 deletion(-) diff --git a/web/static/assets/js/app.min.js b/web/static/assets/js/app.min.js index d7719723..30ceffae 100644 --- a/web/static/assets/js/app.min.js +++ b/web/static/assets/js/app.min.js @@ -493,4 +493,3 @@ })(), Waves.init(), feather.replace(); -//# sourceMappingURL=app.min.js.map diff --git a/web/static/custom/custom.js b/web/static/custom/custom.js index 55733f30..d26eb5d2 100644 --- a/web/static/custom/custom.js +++ b/web/static/custom/custom.js @@ -3257,3 +3257,17 @@ function convertToCamelCase(inputString) { return camelCaseString; } +function handleHashInUrl(){ + // this function handles hash in url used to tab navigation + const hash = window.location.hash; + if (hash) { + const targetId = hash.substring(1); + const tabLink = $(`a[href="#${targetId}"][data-bs-toggle="tab"]`); + if (tabLink.length) { + tabLink.tab('show'); + setTimeout(() => { + tabLink.click(); + }, 100); + } + } +} \ No newline at end of file diff --git a/web/templates/base/base.html b/web/templates/base/base.html index 1bcb55d8..92b85f1c 100644 --- a/web/templates/base/base.html +++ b/web/templates/base/base.html @@ -109,6 +109,14 @@

{% block page_title %}{% endblock page_title %}

{% endblock page_level_script %} diff --git a/web/scanEngine/views.py b/web/scanEngine/views.py index 69a95ca4..7ae6ab5b 100644 --- a/web/scanEngine/views.py +++ b/web/scanEngine/views.py @@ -33,6 +33,11 @@ def index(request, slug): @has_permission_decorator(PERM_MODIFY_SCAN_CONFIGURATIONS, redirect_url=FOUR_OH_FOUR_URL) def add_engine(request, slug): form = AddEngineForm() + + # load default yaml config + with open(RENGINE_HOME + '/config/default_yaml_config.yaml', 'r') as yaml_file: + default_config = yaml_file.read() + if request.method == "POST": form = AddEngineForm(request.POST) if form.is_valid(): @@ -42,6 +47,10 @@ def add_engine(request, slug): messages.INFO, 'Scan Engine Added successfully') return http.HttpResponseRedirect(reverse('scan_engine_index', kwargs={'slug': slug})) + else: + # fill form with default yaml config + form = AddEngineForm(initial={'yaml_configuration': default_config}) + context = { 'scan_engine_nav_active': 'active', 'form': form From 03d6873e3bdb3edf67f7227fa24ae4fbf2705a25 Mon Sep 17 00:00:00 2001 From: Psyray Date: Thu, 29 Aug 2024 12:30:15 +0200 Subject: [PATCH 241/262] build(docker): improve makefile, docker verbosity & provide unit tests (#155) * docker(make): improve makefile (refactor and restart specific container) * docker(celery): add current task banner on start * docker(make): fix bad indent and add message on wrong command * docker(make): improve dev restart by adding cold restart (down & up) * docker(web): add task banner to entrypoint * docker(web): fix bad labels * docker(make): add command to makefile and write units tests * tests(make): create a script to run makefile tests in a VM * docker(scripts): refactor duplicated code in scripts * docker(compose): replace volume bind * docker(tests): refactor and optimize test_makefile * docker(tests): add arch option to choose between amd64 & arm64 * docker(tests): add a test suite to exclude build * docker(test): clean test function improvement * docker(tests): add tests log and fix bugs * docker(tests): fix apt install & uninstall only test packages * docker(tests): rework colors in script and change log folder * docker(uninstall): remove staticfiles and docker secrets * docker(tests): set branch & test_file parameters mandatory * Apply suggestions from code review Co-authored-by: Anonymoussaurus <50231698+AnonymousWP@users.noreply.github.com> * docker(tests): apply comments from review * Apply suggestions from code review Co-authored-by: Anonymoussaurus <50231698+AnonymousWP@users.noreply.github.com> * docker(tests): translate message * docker(tests): display sudo message if WSL detected * docker(tests): reduce VM size and use local image if provided * docker(tests): fix script not stopping while logging & make restart error * build(tests): add missing without build parameter * build(makefile): change `reNgine` to `reNgine-ng` * Apply suggestions from code review Co-authored-by: Anonymoussaurus <50231698+AnonymousWP@users.noreply.github.com> * docker(tests): add sudo for qemu related commands --------- Co-authored-by: Anonymoussaurus <50231698+AnonymousWP@users.noreply.github.com> --- .env-dist | 2 +- .gitignore | 1 + Makefile | 179 ++++++++---- docker/celery/entrypoint.sh | 15 + docker/docker-compose.yml | 6 +- docker/web/entrypoint-dev.sh | 23 +- install.sh | 58 ++-- scripts/common_functions.sh | 22 ++ scripts/run_tests.sh | 529 ++++++++++++++++++++++++++++++++++ scripts/uninstall.sh | 108 +++---- scripts/update.sh | 112 +++++--- tests/test_makefile.py | 533 +++++++++++++++++++++++++++++++++++ 12 files changed, 1398 insertions(+), 190 deletions(-) create mode 100644 scripts/common_functions.sh create mode 100755 scripts/run_tests.sh create mode 100644 tests/test_makefile.py diff --git a/.env-dist b/.env-dist index a833988d..75a7be4a 100644 --- a/.env-dist +++ b/.env-dist @@ -37,7 +37,7 @@ MAX_CONCURRENCY=30 # This section is for non-interactive installations only # # reNgine-ng installation type (prebuilt or source) -INSTALL_TYPE=prebuilt +INSTALL_TYPE=pre-built # reNgine-ng web interface super user DJANGO_SUPERUSER_USERNAME=rengine DJANGO_SUPERUSER_EMAIL=rengine@example.com diff --git a/.gitignore b/.gitignore index 8e8f6e3d..64e774e2 100644 --- a/.gitignore +++ b/.gitignore @@ -62,6 +62,7 @@ staticfiles/ secret docker/secrets +logs get-docker.sh diff --git a/Makefile b/Makefile index 76271f9c..21bc683c 100644 --- a/Makefile +++ b/Makefile @@ -10,91 +10,158 @@ export RENGINE_VERSION # This for future release of Compose that will use Docker Buildkit, which is much efficient. COMPOSE_PREFIX_CMD := COMPOSE_DOCKER_CLI_BUILD=1 COMPOSE_CMD := docker compose -COMPOSE_FILE := -f docker/docker-compose.yml -COMPOSE_FILE_BUILD := -f docker/docker-compose.yml -f docker/docker-compose.build.yml -COMPOSE_DEV_FILE := -f docker/docker-compose.yml -f docker/docker-compose.dev.yml +COMPOSE_FILE := docker/docker-compose.yml +COMPOSE_FILE_BUILD := docker/docker-compose.build.yml +COMPOSE_FILE_DEV := docker/docker-compose.dev.yml +COMPOSE_FILE_SETUP := docker/docker-compose.setup.yml SERVICES := db web proxy redis celery celery-beat ollama # Check if 'docker compose' command is available, otherwise use 'docker-compose' DOCKER_COMPOSE := $(shell if command -v docker > /dev/null && docker compose version > /dev/null 2>&1; then echo "docker compose"; else echo "docker-compose"; fi) $(info Using: $(shell echo "$(DOCKER_COMPOSE)")) +# Define common commands +DOCKER_COMPOSE_CMD := ${COMPOSE_PREFIX_CMD} ${DOCKER_COMPOSE} +DOCKER_COMPOSE_FILE_CMD := ${DOCKER_COMPOSE_CMD} -f ${COMPOSE_FILE} + # -------------------------- -.PHONY: setup certs up build username pull down stop restart rm logs +.PHONY: certs up dev_up build_up build pull superuser_create superuser_delete superuser_changepassword migrate down stop restart remove_images test logs images prune help -certs: ## Generate certificates. - @${COMPOSE_PREFIX_CMD} ${DOCKER_COMPOSE} -f docker/docker-compose.setup.yml run --rm certs +pull: ## Pull pre-built Docker images from repository. + ${DOCKER_COMPOSE_FILE_CMD} pull -setup: ## Generate certificates. - @make certs +images: ## Show all Docker images for reNgine services. + @docker images --filter=reference='ghcr.io/security-tools-alliance/rengine-ng:*' --format "table {{.Repository}}\t{{.Tag}}\t{{.ID}}\t{{.Size}}" -up: ## Build and start all services. - ${COMPOSE_PREFIX_CMD} ${DOCKER_COMPOSE} ${COMPOSE_FILE} up -d ${SERVICES} - -pull_up: ## Pull Docker images. - @make pull - @make up +build: ## Build all Docker images locally. + @make remove_images + ${DOCKER_COMPOSE_FILE_CMD} -f ${COMPOSE_FILE_BUILD} build ${SERVICES} build_up: ## Build and start all services. + @make down @make build @make up -build: ## Build all services. - ${COMPOSE_PREFIX_CMD} ${DOCKER_COMPOSE} ${COMPOSE_FILE_BUILD} build ${SERVICES} +certs: ## Generate certificates. + @${DOCKER_COMPOSE_CMD} -f ${COMPOSE_FILE_SETUP} run --rm certs -username: ## Generate Username (Use only after make up). +up: ## Pull and start all services. + ${DOCKER_COMPOSE_FILE_CMD} up -d ${SERVICES} + +dev_up: ## Pull and start all services with development configuration (more debug logs and Django Toolbar in UI). + @make down + ${DOCKER_COMPOSE_FILE_CMD} -f ${COMPOSE_FILE_DEV} up -d ${SERVICES} + +superuser_create: ## Generate username (use only after `make up`). ifeq ($(isNonInteractive), true) - ${COMPOSE_PREFIX_CMD} ${DOCKER_COMPOSE} ${COMPOSE_FILE} exec web poetry -C /home/rengine run python3 manage.py createsuperuser --username ${DJANGO_SUPERUSER_USERNAME} --email ${DJANGO_SUPERUSER_EMAIL} --noinput + ${DOCKER_COMPOSE_FILE_CMD} exec web poetry -C /home/rengine run python3 manage.py createsuperuser --username ${DJANGO_SUPERUSER_USERNAME} --email ${DJANGO_SUPERUSER_EMAIL} --noinput else - ${COMPOSE_PREFIX_CMD} ${DOCKER_COMPOSE} ${COMPOSE_FILE} exec web poetry -C /home/rengine run python3 manage.py createsuperuser + ${DOCKER_COMPOSE_FILE_CMD} exec web poetry -C /home/rengine run python3 manage.py createsuperuser endif -changepassword: ## Change password for user - ${COMPOSE_PREFIX_CMD} ${DOCKER_COMPOSE} ${COMPOSE_FILE} exec web poetry -C /home/rengine run python3 manage.py changepassword +superuser_delete: ## Delete username (use only after `make up`). + ${DOCKER_COMPOSE_FILE_CMD} exec -T web poetry -C /home/rengine run python3 manage.py shell -c "from django.contrib.auth import get_user_model; User = get_user_model(); User.objects.filter(username='${DJANGO_SUPERUSER_USERNAME}').delete()" -migrate: ## Apply migrations - ${COMPOSE_PREFIX_CMD} ${DOCKER_COMPOSE} ${COMPOSE_FILE} exec web poetry -C /home/rengine run python3 manage.py migrate +superuser_changepassword: ## Change password for user (use only after `make up` & `make username`). +ifeq ($(isNonInteractive), true) + ${DOCKER_COMPOSE_FILE_CMD} exec -T web poetry -C /home/rengine run python3 manage.py shell -c "from django.contrib.auth import get_user_model; User = get_user_model(); u = User.objects.get(username='${DJANGO_SUPERUSER_USERNAME}'); u.set_password('${DJANGO_SUPERUSER_PASSWORD}'); u.save()" +else + ${DOCKER_COMPOSE_FILE_CMD} exec web poetry -C /home/rengine run python3 manage.py changepassword +endif -pull: ## Pull Docker images. - ${COMPOSE_PREFIX_CMD} ${DOCKER_COMPOSE} ${COMPOSE_FILE} pull +migrate: ## Apply Django migrations + ${DOCKER_COMPOSE_FILE_CMD} exec web poetry -C /home/rengine run python3 manage.py migrate -down: ## Down all services. - ${COMPOSE_PREFIX_CMD} ${DOCKER_COMPOSE} ${COMPOSE_FILE} down +down: ## Down all services and remove containers. + ${DOCKER_COMPOSE_FILE_CMD} down stop: ## Stop all services. - ${COMPOSE_PREFIX_CMD} ${DOCKER_COMPOSE} ${COMPOSE_FILE} stop ${SERVICES} - -restart: ## Restart all services. - ${COMPOSE_PREFIX_CMD} ${DOCKER_COMPOSE} ${COMPOSE_FILE} restart ${SERVICES} - -rm: ## Remove all services containers. - ${COMPOSE_PREFIX_CMD} ${DOCKER_COMPOSE} $(COMPOSE_FILE) rm -f ${SERVICES} + ${DOCKER_COMPOSE_FILE_CMD} stop ${SERVICES} + +restart: ## Restart specified services or all if not specified. Use DEV=1 for development mode, COLD=1 for down and up instead of restart. + @if [ "$(COLD)" = "1" ]; then \ + if [ "$(DEV)" = "1" ]; then \ + if [ -n "$(filter-out $@,$(MAKECMDGOALS))" ]; then \ + echo "Cold restart $(filter-out $@,$(MAKECMDGOALS)) in dev mode"; \ + ${DOCKER_COMPOSE_FILE_CMD} -f ${COMPOSE_FILE_DEV} down $(filter-out $@,$(MAKECMDGOALS)); \ + ${DOCKER_COMPOSE_FILE_CMD} -f ${COMPOSE_FILE_DEV} up -d $(filter-out $@,$(MAKECMDGOALS)); \ + else \ + echo "Cold restart ${SERVICES} in dev mode"; \ + ${DOCKER_COMPOSE_FILE_CMD} -f ${COMPOSE_FILE_DEV} down; \ + ${DOCKER_COMPOSE_FILE_CMD} -f ${COMPOSE_FILE_DEV} up -d ${SERVICES}; \ + fi \ + else \ + if [ -n "$(filter-out $@,$(MAKECMDGOALS))" ]; then \ + echo "Cold restart $(filter-out $@,$(MAKECMDGOALS)) in production mode"; \ + ${DOCKER_COMPOSE_FILE_CMD} down $(filter-out $@,$(MAKECMDGOALS)); \ + ${DOCKER_COMPOSE_FILE_CMD} up -d $(filter-out $@,$(MAKECMDGOALS)); \ + else \ + echo "Cold restart ${SERVICES} in production mode"; \ + ${DOCKER_COMPOSE_FILE_CMD} down; \ + ${DOCKER_COMPOSE_FILE_CMD} up -d ${SERVICES}; \ + fi \ + fi \ + else \ + if [ "$(DEV)" = "1" ]; then \ + if [ -n "$(filter-out $@,$(MAKECMDGOALS))" ]; then \ + echo "Restart $(filter-out $@,$(MAKECMDGOALS)) in dev mode"; \ + ${DOCKER_COMPOSE_FILE_CMD} -f ${COMPOSE_FILE_DEV} restart $(filter-out $@,$(MAKECMDGOALS)); \ + else \ + echo "Restart ${SERVICES} in dev mode"; \ + ${DOCKER_COMPOSE_FILE_CMD} -f ${COMPOSE_FILE_DEV} restart ${SERVICES}; \ + fi \ + else \ + if [ -n "$(filter-out $@,$(MAKECMDGOALS))" ]; then \ + echo "Restart $(filter-out $@,$(MAKECMDGOALS)) in production mode"; \ + ${DOCKER_COMPOSE_FILE_CMD} restart $(filter-out $@,$(MAKECMDGOALS)); \ + else \ + echo "Restart ${SERVICES} in production mode"; \ + ${DOCKER_COMPOSE_FILE_CMD} restart ${SERVICES}; \ + fi \ + fi \ + fi + +remove_images: ## Remove all Docker images for reNgine-ng services. + @images=$$(docker images --filter=reference='ghcr.io/security-tools-alliance/rengine-ng:*' --format "{{.ID}}"); \ + if [ -n "$$images" ]; then \ + echo "Removing images: $$images"; \ + docker rmi -f $$images; \ + else \ + echo "No images found for ghcr.io/security-tools-alliance/rengine-ng"; \ + fi test: - ${COMPOSE_PREFIX_CMD} ${DOCKER_COMPOSE} $(COMPOSE_FILE) exec celery poetry -C /home/rengine run python3 -m unittest tests/test_scan.py - -logs: ## Tail all logs with -n 1000. - ${COMPOSE_PREFIX_CMD} ${DOCKER_COMPOSE} $(COMPOSE_FILE) logs --follow --tail=1000 ${SERVICES} + ${DOCKER_COMPOSE_FILE_CMD} exec celery poetry -C /home/rengine run python3 -m unittest tests/test_scan.py -images: ## Show all Docker images. - ${COMPOSE_PREFIX_CMD} ${DOCKER_COMPOSE} $(COMPOSE_FILE) images ${SERVICES} +logs: ## Tail all containers logs with -n 1000 (useful for debug). + ${DOCKER_COMPOSE_FILE_CMD} logs --follow --tail=1000 ${SERVICES} -prune: ## Remove containers and delete volume data. - @make stop && make rm && docker volume prune -f +prune: ## Remove containers, delete volume data, and prune Docker system. + @make down + @make remove_images + @docker volume rm $$(docker volume ls -q --filter name=rengine_) 2>/dev/null || true + @docker system prune -af --volumes help: ## Show this help. - @echo "Make application Docker images and manage containers using Docker Compose files." - @awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m\033[0m (default: help)\n\nTargets:\n"} /^[a-zA-Z_-]+:.*?##/ { printf " \033[36m%-12s\033[0m %s\n", $$1, $$2 }' $(MAKEFILE_LIST) - -dev_build: ## Build all services. - ${COMPOSE_PREFIX_CMD} ${DOCKER_COMPOSE} ${COMPOSE_DEV_FILE} build ${SERVICES_DEV} - -dev_up: ## Build and start all services. - ${COMPOSE_PREFIX_CMD} ${DOCKER_COMPOSE} ${COMPOSE_DEV_FILE} up -d ${SERVICES_DEV} - -dev_down: ## Down all services. - ${COMPOSE_PREFIX_CMD} ${DOCKER_COMPOSE} ${COMPOSE_DEV_FILE} down - -dev_logs: ## Tail all logs with -n 1000. - ${COMPOSE_PREFIX_CMD} ${DOCKER_COMPOSE} $(COMPOSE_DEV_FILE) logs --follow --tail=1000 ${SERVICES_DEV} \ No newline at end of file + @echo "Manage Docker images, containers and Django commands using Docker Compose files." + @echo "" + @echo "Usage:" + @echo " make (default: help)" + @echo "" + @echo "Targets:" + @awk 'BEGIN {FS = ":.*##"; printf " \033[36m%-15s\033[0m %s\n", "Target", "Description"}' $(MAKEFILE_LIST) + @awk 'BEGIN {FS = ":.*##"} /^[a-zA-Z_-]+:.*?##/ { printf " \033[36m%-15s\033[0m %s\n", $$1, $$2 }' $(MAKEFILE_LIST) + @echo "" + @echo "Special commands:" + @echo " make restart [service1] [service2] ... Restart specific services in production mode" + @echo " make restart DEV=1 [service1] [service2] ... Restart specific services in development mode" + @echo " make restart Restart all services in production mode" + @echo " make restart DEV=1 Restart all services in development mode" + @echo " make restart COLD=1 [service1] [service2] ... Cold restart (recreate containers) specific services in production mode" + @echo " make restart DEV=1 COLD=1 [service1] [service2] ... Cold restart (recreate containers) specific services in development mode" + @echo " make restart COLD=1 Cold restart (recreate containers) all services in production mode" + @echo " make restart DEV=1 COLD=1 Cold restart (recreate containers) all services in development mode" + +%: + @: diff --git a/docker/celery/entrypoint.sh b/docker/celery/entrypoint.sh index 3d799742..2aad31b7 100755 --- a/docker/celery/entrypoint.sh +++ b/docker/celery/entrypoint.sh @@ -1,17 +1,32 @@ #!/bin/bash +print_msg() { + printf "\r\n" + printf "========================================\r\n" + printf "$1\r\n" + printf "========================================\r\n\r\n" +} + +print_msg "Generate Django migrations files" +poetry run -C $HOME/ python3 manage.py makemigrations +print_msg "Migrate database" poetry run -C $HOME/ python3 manage.py migrate +print_msg "Collect static files" poetry run -C $HOME/ python3 manage.py collectstatic --no-input --clear # Load default engines, keywords, and external tools +print_msg "Load default engines" poetry run -C $HOME/ python3 manage.py loaddata fixtures/default_scan_engines.yaml --app scanEngine.EngineType +print_msg "Load default keywords" poetry run -C $HOME/ python3 manage.py loaddata fixtures/default_keywords.yaml --app scanEngine.InterestingLookupModel +print_msg "Load default external tools" poetry run -C $HOME/ python3 manage.py loaddata fixtures/external_tools.yaml --app scanEngine.InstalledExternalTool if [ ! "$CELERY_LOGLEVEL" ]; then export CELERY_LOGLEVEL='info' fi +print_msg "Start celery workers" watchmedo auto-restart --recursive --pattern="*.py" --directory="/home/rengine/rengine/" -- poetry run -C $HOME/ celery -A reNgine.tasks worker --loglevel=$CELERY_LOGLEVEL --autoscale=$MAX_CONCURRENCY,$MIN_CONCURRENCY -Q main_scan_queue & watchmedo auto-restart --recursive --pattern="*.py" --directory="/home/rengine/rengine/" -- poetry run -C $HOME/ celery -A reNgine.tasks worker --pool=gevent --concurrency=30 --loglevel=$CELERY_LOGLEVEL -Q initiate_scan_queue -n initiate_scan_worker & watchmedo auto-restart --recursive --pattern="*.py" --directory="/home/rengine/rengine/" -- poetry run -C $HOME/ celery -A reNgine.tasks worker --pool=gevent --concurrency=30 --loglevel=$CELERY_LOGLEVEL -Q subscan_queue -n subscan_worker & diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index ea9605b8..a508f615 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -44,11 +44,7 @@ services: - CELERY_BROKER=redis://redis:6379/0 - CELERY_BACKEND=redis://redis:6379/0 volumes: - - type: bind - source: ../web - target: /home/rengine/rengine - bind: - create_host_path: true + - ../web:/home/rengine/rengine:rw,z - ./celery/entrypoint.sh:/entrypoint.sh:ro - scan_results:/home/rengine/scan_results healthcheck: diff --git a/docker/web/entrypoint-dev.sh b/docker/web/entrypoint-dev.sh index 8721143f..e9d96e00 100755 --- a/docker/web/entrypoint-dev.sh +++ b/docker/web/entrypoint-dev.sh @@ -1,9 +1,24 @@ #!/bin/bash -# Collect static files +print_msg() { + printf "\r\n" + printf "========================================\r\n" + printf "$1\r\n" + printf "========================================\r\n\r\n" +} + +print_msg "Generate Django migrations files" +poetry run -C $HOME/ python3 manage.py makemigrations + +print_msg "Migrate database" +poetry run -C $HOME/ python3 manage.py migrate + +# Collect static files for development +print_msg "Collect static files" poetry run -C $HOME/ python3 manage.py collectstatic --noinput -# Run production server -poetry run -C $HOME/ gunicorn reNgine.wsgi:application -w 8 --bind 0.0.0.0:8000 --limit-request-line 0 +# Run development server +print_msg "Launching Django development Web server" +poetry run -C $HOME/ python3 manage.py runserver 0.0.0.0:8000 -exec "$@" \ No newline at end of file +exec "$@" diff --git a/install.sh b/install.sh index 901a243a..f873d178 100755 --- a/install.sh +++ b/install.sh @@ -1,17 +1,7 @@ #!/bin/bash -# Define color codes. -# Using `tput setaf` at some places because the variable only works with log/echo - -COLOR_BLACK=0 -COLOR_RED=1 -COLOR_GREEN=2 -COLOR_YELLOW=3 -COLOR_BLUE=4 -COLOR_MAGENTA=5 -COLOR_CYAN=6 -COLOR_WHITE=7 -COLOR_DEFAULT=$COLOR_WHITE # Use white as default for clarity +# Import common functions +source "$(pwd)/scripts/common_functions.sh" # Fetch the internal and external IP address so that it can be printed later when the script has finished installing reNgine-ng external_ip=$(curl -s https://ipecho.net/plain) @@ -21,16 +11,6 @@ for ip in $internal_ips; do formatted_ips="${formatted_ips}https://$ip\n" done -# Log messages in different colors -log() { - local color=${2:-$COLOR_DEFAULT} # Use default color if $2 is not set - if [ "$color" -ne $COLOR_DEFAULT ]; then - tput setaf "$color" - fi - printf "$1\r\n" - tput sgr0 # Reset text color -} - # Check for root privileges if [ "$(whoami)" != "root" ] then @@ -94,18 +74,26 @@ if [ $isNonInteractive = false ]; then nano .env ;; esac - # Select install type - log "Do you want to build Docker images from source or use pre-built images (recommended)? This saves significant build time but requires good download speeds for it to complete fast." $COLOR_RED - select choice in "From source" "Use pre-built images"; do - case $choice in - "From source" ) - INSTALL_TYPE="source" - break;; - "Use pre-built images" ) - INSTALL_TYPE="prebuilt" - break;; - esac - done + # Select install type + log "Do you want to build Docker images from source or use pre-built images (recommended)? \nThis saves significant build time but requires good download speeds for it to complete fast." $COLOR_RED + log "1) From source" $COLOR_GREEN + log "2) Use pre-built images (default)" $COLOR_GREEN + read -p "Enter your choice (1 or 2, default is 2): " choice + + case $choice in + 1) + INSTALL_TYPE="source" + ;; + 2|"") + INSTALL_TYPE="pre-built" + ;; + *) + log "Invalid choice. Defaulting to pre-built images." $COLOR_YELLOW + INSTALL_TYPE="pre-built" + ;; + esac + + log "Selected installation type: $INSTALL_TYPE" $COLOR_CYAN fi # Non interactive install @@ -220,7 +208,7 @@ sleep 5 make up && log "reNgine-ng is started!" $COLOR_GREEN || { log "reNgine-ng start failed!" $COLOR_RED; exit 1; } log "Creating an account..." $COLOR_CYAN -make username isNonInteractive=$isNonInteractive +make superuser_create isNonInteractive=$isNonInteractive log "reNgine-ng is successfully installed and started!" $COLOR_GREEN log "\r\nThank you for installing reNgine-ng, happy recon!" $COLOR_GREEN diff --git a/scripts/common_functions.sh b/scripts/common_functions.sh new file mode 100644 index 00000000..3fec890d --- /dev/null +++ b/scripts/common_functions.sh @@ -0,0 +1,22 @@ +#!/bin/bash + +# Define color codes. +COLOR_BLACK=0 +COLOR_RED=1 +COLOR_GREEN=2 +COLOR_YELLOW=3 +COLOR_BLUE=4 +COLOR_MAGENTA=5 +COLOR_CYAN=6 +COLOR_WHITE=7 +COLOR_DEFAULT=$COLOR_WHITE # Use white as default for clarity + +# Log messages in different colors +log() { + local color=${2:-$COLOR_DEFAULT} # Use default color if $2 is not set + if [ "$color" -ne $COLOR_DEFAULT ]; then + tput setaf "$color" + fi + printf "$1\r\n" + tput sgr0 # Reset text color +} diff --git a/scripts/run_tests.sh b/scripts/run_tests.sh new file mode 100755 index 00000000..e5ff4fe0 --- /dev/null +++ b/scripts/run_tests.sh @@ -0,0 +1,529 @@ +#!/bin/bash + +# Exit on any error +set -e + +# Import common functions +source "$(pwd)/common_functions.sh" + +# Function to determine host architecture +get_host_architecture() { + local arch=$(uname -m) + case $arch in + x86_64) + echo "amd64" + ;; + aarch64) + echo "arm64" + ;; + *) + echo "Unsupported architecture: $arch" >&2 + exit 1 + ;; + esac +} + +# Function to display help message +show_help() { + echo "Usage: $0 [--arch ] [--clean-temp] [--clean-all] [--without-build] [test1] [test2] ..." + echo + echo "Run tests for the reNgine-ng project in a VM environment." + echo + echo "Mandatory arguments:" + echo " branch_name The Git branch to test" + echo " test_file The test file to run" + echo + echo "Optional arguments:" + echo " --arch Specify the architecture (amd64 or arm64). If not specified, uses host architecture." + echo " --clean-temp Clean temporary files and VM without prompting" + echo " --clean-all Clean temporary files, VM, and installed packages without prompting" + echo " --without-build Run all tests except the build test" + echo " test1 test2 ... Specific tests to run from the test file" + echo + echo "Examples:" + echo " $0 # Run all tests on host architecture" + echo " $0 --arch amd64 # Run all tests on amd64 architecture" + echo " $0 --arch arm64 feature-branch # Run tests on arm64 for feature-branch" + echo " $0 --arch amd64 master makefile certs pull # Run specific tests on amd64" + echo " $0 --clean-temp # Clean temporary files and VM without prompting" + echo " $0 --clean-all # Clean temporary files, VM, and installed packages without prompting" + echo " $0 --without-build # Run all tests except the build test" + echo + echo "The script will create a VM for the specified architecture, set up the environment, and run the specified tests." +} + +# Get host architecture +HOST_ARCH=$(get_host_architecture) + +# Initialize cleanup variables +CLEAN_TEMP=false +CLEAN_ALL=false + +# Parse command line arguments +ARCH="" +WITHOUT_BUILD=false +while [[ $# -gt 0 ]]; do + case $1 in + --arch) + ARCH="$2" + shift 2 + ;; + --clean-temp) + CLEAN_TEMP=true + shift + ;; + --clean-all) + CLEAN_ALL=true + shift + ;; + --without-build) + WITHOUT_BUILD=true + shift + ;; + -h|--help) + show_help + exit 0 + ;; + *) + break + ;; + esac +done + +# If architecture is not specified, use host architecture +if [ -z "$ARCH" ]; then + ARCH="$HOST_ARCH" + log "Architecture not specified. Using host architecture: $ARCH" $COLOR_YELLOW +fi + +# Validate architecture +if [ "$ARCH" != "amd64" ] && [ "$ARCH" != "arm64" ]; then + log "Error: Invalid architecture. Must be either amd64 or arm64." $COLOR_RED + exit 1 +fi + +# Function to check if a branch exists +branch_exists() { + git ls-remote --exit-code --heads origin "$1" &>/dev/null +} + +# Set default branch +DEFAULT_BRANCH="master" + +# VM parameters +VM_NAME="test-rengine-ng" +VM_IMAGE="test-debian.qcow2" +VM_RAM="8G" +VM_CPUS="8" +VM_DISK_SIZE="30G" # Adjust this value as needed + +# SSH parameters +SSH_OPTIONS="-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null" + +# Rengine root directory inside the VM +RENGINE_ROOT='~/rengine' + +# Check if mandatory arguments are provided +if [ $# -lt 2 ]; then + log "Error: branch_name and test_file are mandatory parameters." $COLOR_RED + show_help + exit 1 +fi + +# Extract branch_name and test_file from arguments +RELEASE_VERSION="$1" +TEST_FILE="$2" +shift 2 + +# Check if the branch exists +if ! branch_exists "$RELEASE_VERSION"; then + log "Error: Branch $RELEASE_VERSION does not exist." $COLOR_RED + exit 1 +fi + +# Extract test names from remaining arguments +TEST_NAMES="$@" + +# Function to generate test names +generate_test_names() { + local names="" + for name in $TEST_NAMES; do + names+="test_$name " + done + echo $names +} + +# Generate the test names +FORMATTED_TEST_NAMES=$(generate_test_names) + +# Create log directory if it doesn't exist +LOG_DIR="$(pwd)/../logs/tests" +mkdir -p "$LOG_DIR" + +# Generate a unique log file name +TIMESTAMP=$(date +"%Y%m%d_%H%M%S") +LOG_FILE="${LOG_DIR}/test_${TEST_FILE}_log_${TIMESTAMP}.txt" + +# When you're ready to use RELEASE_VERSION: +log "Checking out branch: $RELEASE_VERSION" $COLOR_CYAN + +# Function to check if a command exists +command_exists() { + command -v "$1" >/dev/null 2>&1 +} + +# Install QEMU if not already installed +INSTALLED_PACKAGES_FOR_TESTS="qemu-system-x86 qemu-system-arm qemu-utils cloud-image-utils" +INSTALLED_COMMON_PACKAGES="socat wget openssh-client tar gzip git curl gpg coreutils" + +# Create a temporary directory for the test +TEMP_DIR=$HOME/tmp +mkdir -p $TEMP_DIR +TEST_DIR=$(mktemp -d -p $TEMP_DIR) + +# Function to clean up resources +cleanup() { + local clean_temp=false + local clean_packages=false + + if [ "$CLEAN_TEMP" = true ] || [ "$CLEAN_ALL" = true ]; then + clean_temp=true + fi + + if [ "$CLEAN_ALL" = true ]; then + clean_packages=true + fi + + if [ "$CLEAN_TEMP" = false ] && [ "$CLEAN_ALL" = false ]; then + echo -e "\n\033[1;33mCleanup Confirmation\033[0m" + read -p "Do you want to remove temporary files and VM? (y/n): " temp_response + if [[ "$temp_response" == "y" ]]; then + clean_temp=true + fi + + read -p $'Do you want to uninstall the packages installed for testing? +Installed packages for testing: ('"$INSTALLED_PACKAGES_FOR_TESTS"$') +Installed common packages: ('"$INSTALLED_COMMON_PACKAGES"$') +Only installed packages for testing will be removed, common packages will be left untouched. +You may consider removing these packages by hand. +Type your answer (y/n): ' packages_response + + if [[ "$packages_response" == "y" ]]; then + clean_packages=true + fi + fi + + if [ "$clean_temp" = true ]; then + log "Cleaning up temporary files and VM..." $COLOR_CYAN + # Send powerdown command to QEMU monitor + echo "system_powerdown" | sudo socat - UNIX-CONNECT:/tmp/qemu-monitor.sock 2>/dev/null || true + + # Wait for VM to stop (with timeout) + for i in {1..15}; do + if ! pgrep -f "qemu-system-.*$VM_NAME" > /dev/null; then + log "VM stopped successfully" $COLOR_GREEN + break + fi + sleep 1 + done + + # Force stop if VM is still running + if pgrep -f "qemu-system-.*$VM_NAME" > /dev/null; then + log "Forcing VM to stop..." $COLOR_RED + sudo pkill -f "qemu-system-.*$VM_NAME" || true + fi + + if [[ "$TEST_DIR" == "$HOME/tmp/"* ]]; then + log "Removing temporary directory..." $COLOR_CYAN + rm -rf "$TEST_DIR" + log "Temporary directory removed." $COLOR_GREEN + else + log "Error: TEST_DIR is not in $HOME/tmp. Skipping directory removal for safety." $COLOR_RED + fi + fi + + if [ "$clean_packages" = true ]; then + log "Uninstalling packages..." $COLOR_CYAN + sudo apt-get remove -y $INSTALLED_PACKAGES_FOR_TESTS + sudo apt-get autoremove -y + log "Packages uninstalled." $COLOR_GREEN + fi + + log "Cleanup completed." $COLOR_GREEN +} + +# Set trap to ensure cleanup on script exit (normal or abnormal) +trap 'log "Interruption detected."; cleanup; log "Exiting script."; exit 130' INT TERM EXIT + +# Function to get the image filename based on architecture +get_image_filename() { + if [ "$ARCH" = "amd64" ]; then + echo "debian-12-generic-amd64.qcow2" + elif [ "$ARCH" = "arm64" ]; then + echo "debian-12-generic-arm64.qcow2" + else + log "Unsupported architecture: $ARCH" $COLOR_RED + exit 1 + fi +} + +# Get the image filename +IMAGE_FILENAME=$(get_image_filename) + +# Check if the image already exists in TEMP_DIR +if [ -f "$TEMP_DIR/$IMAGE_FILENAME" ]; then + cp "$TEMP_DIR/$IMAGE_FILENAME" "$TEST_DIR/$IMAGE_FILENAME" + log "Debian 12 image for $ARCH found in $TEMP_DIR. Using existing image." $COLOR_GREEN +else + # Download appropriate Debian 12 cloud image + log "Downloading Debian 12 cloud image for $ARCH..." $COLOR_CYAN + if [ "$ARCH" = "amd64" ]; then + wget -q https://cloud.debian.org/images/cloud/bookworm/latest/debian-12-generic-amd64.qcow2 -O "$TEST_DIR/$IMAGE_FILENAME" + elif [ "$ARCH" = "arm64" ]; then + wget -q https://cloud.debian.org/images/cloud/bookworm/latest/debian-12-generic-arm64.qcow2 -O "$TEST_DIR/$IMAGE_FILENAME" + fi + + if [ $? -eq 0 ]; then + log "Debian 12 image for $ARCH downloaded successfully." $COLOR_GREEN + else + log "Failed to download Debian 12 image for $ARCH." $COLOR_RED + exit 1 + fi +fi + +# Create a temporary file for communication +TEMP_FILE="$TEST_DIR/subshell_status.txt" + +# Create a named pipe for logging +PIPE_FILE="$TEST_DIR/log_pipe" +mkfifo "$PIPE_FILE" + +# Start tee in the background to handle logging +tee -a "$LOG_FILE" < "$PIPE_FILE" & +TEE_PID=$! + +# Execute the tests in a subshell +( + # Redirect all output to the named pipe + exec > "$PIPE_FILE" 2>&1 + + # Install QEMU & dependencies + log "Installing QEMU..." $COLOR_CYAN + sudo apt-get update + sudo apt-get install -y $INSTALLED_PACKAGES_FOR_TESTS $INSTALLED_COMMON_PACKAGES + + # Copy project files to the temporary directory + log "Copying project files to temporary directory..." $COLOR_CYAN + + # Compress the project directory + log "Compressing project files..." $COLOR_CYAN + (cd .. && tar -czf "$TEST_DIR/rengine-project.tar.gz" --exclude='docker/secrets' .) + + cd "$TEST_DIR" + + # Create a larger disk image + qemu-img create -f qcow2 -o preallocation=metadata "$TEST_DIR/large-debian.qcow2" $VM_DISK_SIZE + + # Resize the downloaded image + qemu-img resize --shrink "$TEST_DIR/$IMAGE_FILENAME" $VM_DISK_SIZE + + # Combine the two images + qemu-img convert -O qcow2 -o preallocation=metadata "$TEST_DIR/$IMAGE_FILENAME" "$TEST_DIR/large-debian.qcow2" + + # Create a copy of the image for testing + mv large-debian.qcow2 test-debian.qcow2 + + # Generate SSH key pair + log "Generating SSH key pair..." $COLOR_CYAN + ssh-keygen -t ssh-keygen -t ed25519 -f ./id_ed25519 -N "" + + # Create a cloud-init configuration file + cat > cloud-init.yml </dev/null; then + log "SSH is now available" $COLOR_GREEN + break + fi + if [ $i -eq 30 ]; then + log "Timed out waiting for SSH" $COLOR_RED + exit 1 + fi + sleep 10 + done + + # Run setup commands in the VM + log "Setting up locales in the VM..." $COLOR_CYAN + ssh -p 2222 $SSH_OPTIONS -i ./id_ed25519 rengine@localhost << EOF + # Update and install dependencies + sudo apt-get update + sudo apt-get install -y locales-all +EOF + + # Copy compressed project files to the VM + log "Copying compressed project files to the VM..." $COLOR_CYAN + scp -P 2222 $SSH_OPTIONS -i ./id_ed25519 "$TEST_DIR/rengine-project.tar.gz" rengine@localhost:~ + + log "Decompressing project files on the VM..." $COLOR_CYAN + ssh -p 2222 $SSH_OPTIONS -i ./id_ed25519 rengine@localhost << EOF + sudo apt-get install git -y + mkdir -p $RENGINE_ROOT + tar -xzf ~/rengine-project.tar.gz -C $RENGINE_ROOT + rm ~/rengine-project.tar.gz + cd $RENGINE_ROOT + cat > $RENGINE_ROOT/.git/config << EOG +[core] + repositoryformatversion = 0 + filemode = true + bare = false + logallrefupdates = true +[remote "origin"] + url = https://github.com/Security-Tools-Alliance/rengine-ng.git + fetch = +refs/heads/*:refs/remotes/origin/* +[branch "master"] + remote = origin + merge = refs/heads/master + vscode-merge-base = origin/master +EOG + cp $RENGINE_ROOT/.env-dist $RENGINE_ROOT/.env +EOF + + # Run setup commands in the VM + log "Setting up Docker and the application in the VM..." $COLOR_CYAN + ssh -p 2222 $SSH_OPTIONS -i ./id_ed25519 rengine@localhost << EOF + # Update and install dependencies + sudo apt-get install -y ca-certificates curl gnupg make htop iftop net-tools + + # Add Docker's official GPG key + sudo install -m 0755 -d /etc/apt/keyrings + curl -fsSL https://download.docker.com/linux/debian/gpg | sudo gpg --dearmor -o /etc/apt/keyrings/docker.gpg + sudo chmod a+r /etc/apt/keyrings/docker.gpg + + # Set up Docker repository + echo \ + "deb [arch=\$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.gpg] https://download.docker.com/linux/debian \ + \$(. /etc/os-release && echo "\$VERSION_CODENAME") stable" | \ + sudo tee /etc/apt/sources.list.d/docker.list > /dev/null + + # Install Docker Engine, Docker Compose and python libs + sudo apt-get update + sudo apt-get install -y docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin python3-docker python3-parameterized + + # Add rengine user to docker group + sudo usermod -aG docker rengine + newgrp docker + + # Run tests + cd $RENGINE_ROOT + if [ "$WITHOUT_BUILD" = true ]; then + python3 tests/test_$TEST_FILE.py ${FORMATTED_TEST_NAMES:+--tests $FORMATTED_TEST_NAMES} --exclude-build + else + python3 tests/test_$TEST_FILE.py ${FORMATTED_TEST_NAMES:+--tests $FORMATTED_TEST_NAMES} + fi +EOF + + # Get the test status + TEST_STATUS=$? + + # Write the test status to the temporary file + echo $TEST_STATUS > "$TEMP_FILE" + + # Signal that the subshell has finished + echo "DONE" >> "$TEMP_FILE" + + log "Tests completed with status: $TEST_STATUS" $COLOR_GREEN + +) & + +SUBSHELL_PID=$! + +log "Waiting for tests to complete..." $COLOR_CYAN + +# Wait for the subshell to finish (with a timeout of 2 hours) +for i in {1..7200}; do + if [ -f "$TEMP_FILE" ] && grep -q "DONE" "$TEMP_FILE"; then + log "Tests finished" $COLOR_GREEN + break + fi + sleep 1 + if [ $((i % 60)) -eq 0 ]; then + log "Still waiting for tests to complete... (${i}s)" $COLOR_YELLOW + fi +done + +# Check if the subshell completed +if [ ! -f "$TEMP_FILE" ] || ! grep -q "DONE" "$TEMP_FILE"; then + log "Error: Tests did not complete within the allocated time" $COLOR_RED + TEST_STATUS=1 +else + # Get the test status from the temporary file + TEST_STATUS=$(head -n 1 "$TEMP_FILE") +fi + +# Clean up +rm -f "$TEMP_FILE" +rm -f "$PIPE_FILE" +kill $TEE_PID +wait $SUBSHELL_PID + +# Exit with the status +exit $TEST_STATUS \ No newline at end of file diff --git a/scripts/uninstall.sh b/scripts/uninstall.sh index 5fcd06cd..c36bbb6a 100755 --- a/scripts/uninstall.sh +++ b/scripts/uninstall.sh @@ -1,27 +1,7 @@ #!/bin/bash -# Define color codes. -# Using `tput setaf` at some places because the variable only works with log/echo - -COLOR_BLACK=0 -COLOR_RED=1 -COLOR_GREEN=2 -COLOR_YELLOW=3 -COLOR_BLUE=4 -COLOR_MAGENTA=5 -COLOR_CYAN=6 -COLOR_WHITE=7 -COLOR_DEFAULT=$COLOR_WHITE # Use white as default for clarity - -# Log messages in different colors -log() { - local color=${2:-$COLOR_DEFAULT} # Use default color if $2 is not set - if [ "$color" -ne $COLOR_DEFAULT ]; then - tput setaf "$color" - fi - printf "$1\r\n" - tput sgr0 # Reset text color -} +# Import common functions +source "$(pwd)/common_functions.sh" cat ../web/art/reNgine.txt @@ -38,7 +18,7 @@ log "" log "Uninstalling reNgine-ng..." $COLOR_CYAN log "" -tput setaf 1 +tput setaf $COLOR_RED; read -p "This action will stop and remove all containers, volumes and networks of reNgine-ng. Do you want to continue? [y/n] " -n 1 log "" @@ -47,23 +27,56 @@ then log "" log "Stopping reNgine-ng..." $COLOR_CYAN - docker stop rengine-web-1 rengine-db-1 rengine-celery-1 rengine-celery-beat-1 rengine-redis-1 rengine-proxy-1 rengine-ollama-1 - log "Stopped reNgine-ng" $COLOR_GREEN - log "" - - log "Removing all containers related to reNgine-ng..." $COLOR_CYAN - docker rm rengine-web-1 rengine-db-1 rengine-celery-1 rengine-celery-beat-1 rengine-redis-1 rengine-proxy-1 rengine-ollama-1 - log "Removed all containers related to reNgine-ng" $COLOR_GREEN + if (cd .. && make down); then + log "Stopped reNgine-ng" $COLOR_GREEN + else + log "Failed to stop reNgine-ng" $COLOR_RED + exit 1 + fi log "" log "Removing all volumes related to reNgine-ng..." $COLOR_CYAN - docker volume rm rengine_gf_patterns rengine_github_repos rengine_ollama_data rengine_nuclei_templates rengine_postgres_data rengine_scan_results rengine_tool_config rengine_static_volume rengine_wordlist - log "Removed all volumes related to reNgine-ng" $COLOR_GREEN + if docker volume rm $(docker volume ls -q --filter name=rengine_) 2>/dev/null || true; then + log "Removed all volumes related to reNgine-ng" $COLOR_GREEN + else + log "Warning: Failed to remove some or all volumes" $COLOR_YELLOW + fi log "" log "Removing all networks related to reNgine-ng..." $COLOR_CYAN - docker network rm rengine_network - log "Removed all networks related to reNgine-ng" $COLOR_GREEN + if docker network rm rengine_network; then + log "Removed all networks related to reNgine-ng" $COLOR_GREEN + else + log "Warning: Failed to remove rengine_network" $COLOR_YELLOW + fi + log "" + + log "Removing static files and secrets from reNgine-ng..." $COLOR_CYAN + + # Remove web/staticfiles directory + if [ -d "../web/staticfiles" ]; then + log "Removing web/staticfiles directory..." $COLOR_CYAN + if (cd .. && rm -rf web/staticfiles); then + log "Removed web/staticfiles directory" $COLOR_GREEN + else + log "Warning: Failed to remove web/staticfiles directory" $COLOR_YELLOW + fi + else + log "web/staticfiles directory not found, skipping..." $COLOR_YELLOW + fi + + # Remove docker/secrets directory + if [ -d "../docker/secrets" ]; then + log "Removing docker/secrets directory..." $COLOR_CYAN + if (cd .. && rm -rf docker/secrets); then + log "Removed docker/secrets directory" $COLOR_GREEN + else + log "Warning: Failed to remove docker/secrets directory" $COLOR_YELLOW + fi + else + log "docker/secrets directory not found, skipping..." $COLOR_YELLOW + fi + log "" else log "" @@ -71,10 +84,7 @@ else exit 1 fi -# Read the version from version.txt file -RENGINE_VERSION=$(cat ../web/reNgine/version.txt) - -tput setaf 1; +tput setaf $COLOR_RED; read -p "Do you want to remove Docker images related to reNgine-ng? [y/n] " -n 1 -r log "" @@ -82,21 +92,18 @@ if [[ $REPLY =~ ^[Yy]$ ]] then log "" log "Removing all Docker images related to reNgine-ng..." $COLOR_CYAN - docker image rm ghcr.io/security-tools-alliance/rengine-ng:rengine-celery-v${RENGINE_VERSION} \ - ghcr.io/security-tools-alliance/rengine-ng:rengine-web-v${RENGINE_VERSION} \ - ghcr.io/security-tools-alliance/rengine-ng:rengine-postgres-v${RENGINE_VERSION} \ - ghcr.io/security-tools-alliance/rengine-ng:rengine-redis-v${RENGINE_VERSION} \ - ghcr.io/security-tools-alliance/rengine-ng:rengine-ollama-v${RENGINE_VERSION} \ - ghcr.io/security-tools-alliance/rengine-ng:rengine-certs-v${RENGINE_VERSION} \ - ghcr.io/security-tools-alliance/rengine-ng:rengine-proxy-v${RENGINE_VERSION} - log "Removed all Docker images" $COLOR_GREEN + if (cd .. && make remove_images); then + log "Removed all Docker images" $COLOR_GREEN + else + log "Warning: Failed to remove some or all Docker images" $COLOR_YELLOW + fi log "" else log "" log "Skipping removal of Docker images" $COLOR_CYAN fi -tput setaf 1; +tput setaf $COLOR_RED; read -p "Do you want to remove all Docker-related leftovers? [y/n] " -n 1 -r log "" @@ -104,8 +111,11 @@ if [[ $REPLY =~ ^[Yy]$ ]] then log "" log "Removing all Docker-related leftovers..." $COLOR_CYAN - docker system prune -a -f - log "Removed all Docker-related leftovers" $COLOR_GREEN + if docker system prune -a -f; then + log "Removed all Docker-related leftovers" $COLOR_GREEN + else + log "Warning: Failed to remove some or all Docker-related leftovers" $COLOR_YELLOW + fi log "" else log "" diff --git a/scripts/update.sh b/scripts/update.sh index f3c55d25..85f24855 100755 --- a/scripts/update.sh +++ b/scripts/update.sh @@ -1,27 +1,14 @@ #!/bin/bash -# Define color codes. -# Using `tput setaf` at some places because the variable only works with log/echo +# Import common functions +source "$(pwd)/common_functions.sh" -COLOR_BLACK=0 -COLOR_RED=1 -COLOR_GREEN=2 -COLOR_YELLOW=3 -COLOR_BLUE=4 -COLOR_MAGENTA=5 -COLOR_CYAN=6 -COLOR_WHITE=7 -COLOR_DEFAULT=$COLOR_WHITE # Use white as default for clarity - -# Log messages in different colors -log() { - local color=${2:-$COLOR_DEFAULT} # Use default color if $2 is not set - if [ "$color" -ne $COLOR_DEFAULT ]; then - tput setaf "$color" - fi - printf "$1\r\n" - tput sgr0 # Reset text color -} +# Check for root privileges +if [ "$(whoami)" != "root" ]; then + log "Error updating reNgine-ng: please run this script as root!" $COLOR_RED + log "Example: sudo ./update.sh" $COLOR_RED + exit 1 +fi # Function to compare version strings version_compare() { @@ -61,48 +48,93 @@ LATEST_VERSION=$(curl -s https://api.github.com/repos/Security-Tools-Alliance/re cat ../web/art/reNgine.txt +# Compare versions +version_compare $CURRENT_VERSION $LATEST_VERSION +comparison_result=$? + log "\n" $COLOR_DEFAULT log "Current version: $CURRENT_VERSION" $COLOR_CYAN log "Latest version: $LATEST_VERSION" $COLOR_CYAN log "\n" $COLOR_DEFAULT -# Compare versions -version_compare $CURRENT_VERSION $LATEST_VERSION -case $? in +case $comparison_result in 0) log "You are already on the latest version." $COLOR_GREEN - #exit 0 + exit 0 ;; 1) log "Your version is newer than the latest release. No update needed." $COLOR_YELLOW - #exit 0 + exit 0 ;; 2) log "An update is available." $COLOR_CYAN ;; + *) log "Error comparing versions." $COLOR_RED + exit 1 + ;; esac read -p "Do you want to update to the latest version? (y/n) " answer if [[ $answer == "y" ]]; then - read -p "Do you want to update from prebuilt images or build from source? (prebuilt/source) " install_type - read -p "Do you want to apply your local changes after updating? (y/n) " apply_changes + while true; do + read -p "Do you want to update from pre-built images or build from source? (pre-built/source, default is pre-built): " install_type + install_type=${install_type:-pre-built} # Set default to pre-built if empty + if [[ $install_type == "pre-built" || $install_type == "source" ]]; then + break + else + log "Invalid input. Please enter 'pre-built' or 'source'." $COLOR_YELLOW + fi + done - cd .. - if [[ $apply_changes == "y" ]]; then - make down && git stash save && git pull && git stash apply - if [[ $install_type == "prebuilt" ]]; then - make pull_up - elif [[ $install_type == "source" ]]; then - make build_up + log "Selected installation type: $install_type" $COLOR_CYAN + + while true; do + read -p "Do you want to apply your local changes after updating? (y/n) " apply_changes + if [[ $apply_changes == "y" || $apply_changes == "n" ]]; then + break else - log "Invalid installation type. Update cancelled." $COLOR_RED + log "Invalid input. Please enter 'y' or 'n'." $COLOR_YELLOW + fi + done + + if [[ $apply_changes == "y" ]]; then + if ! (cd .. && make down); then + log "Failed to stop reNgine-ng" $COLOR_RED + exit 1 + fi + if ! sudo -u rengine git stash save && sudo -u rengine git pull && sudo -u rengine git stash apply; then + log "Failed to update and apply local changes" $COLOR_RED exit 1 fi + if [[ $install_type == "pre-built" ]]; then + if ! (cd .. && make up); then + log "Failed to pull and start updated images" $COLOR_RED + exit 1 + fi + elif [[ $install_type == "source" ]]; then + if ! (cd .. && make build_up); then + log "Failed to build and start updated images" $COLOR_RED + exit 1 + fi + fi log "Successfully updated to version $LATEST_VERSION and local changes have been reapplied" $COLOR_GREEN elif [[ $apply_changes == "n" ]]; then - make down && git stash && git stash drop && git pull - if [[ $install_type == "prebuilt" ]]; then - make pull_up + if ! (cd .. && make down); then + log "Failed to stop reNgine-ng" $COLOR_RED + exit 1 + fi + if ! sudo -u rengine git stash && sudo -u rengine git stash drop && sudo -u rengine git pull; then + log "Failed to update" $COLOR_RED + exit 1 + fi + if [[ $install_type == "pre-built" ]]; then + if ! (cd .. && make up); then + log "Failed to pull and start updated images" $COLOR_RED + exit 1 + fi elif [[ $install_type == "source" ]]; then - make build_up + if ! (cd .. && make build_up); then + log "Failed to build and start updated images" $COLOR_RED + exit 1 + fi else log "Invalid installation type. Update cancelled." $COLOR_RED exit 1 diff --git a/tests/test_makefile.py b/tests/test_makefile.py new file mode 100644 index 00000000..f3b117ff --- /dev/null +++ b/tests/test_makefile.py @@ -0,0 +1,533 @@ +""" +This module contains tests for the Makefile commands in the reNgine-ng project. +It verifies various make commands and their effects on the Docker environment. +""" + +import os +import unittest +import subprocess +import time +import signal +import sys +from functools import wraps +from docker import from_env as docker_from_env +from docker.errors import NotFound + +# Add these constants for colors +BLACK = '\033[30m' +RED = '\033[31m' +GREEN = '\033[32m' +YELLOW = '\033[33m' +BLUE = '\033[34m' +MAGENTA = '\033[35m' +CYAN = '\033[36m' +WHITE = '\033[37m' +ENDC = '\033[0m' + +print("Starting test_makefile.py") +print(f"Current working directory: {os.getcwd()}") + +RENGINE_PATH = "/home/rengine/rengine" + +# Read version from version.txt +with open( + f"{RENGINE_PATH}/web/reNgine/version.txt", "r", encoding="utf-8" +) as version_file: + RENGINE_VERSION = version_file.read().strip() + + +class TestMakefile(unittest.TestCase): + """ + A test suite for verifying the functionality of the Makefile commands in the reNgine-ng project. + This class tests various make commands and their effects on the Docker environment. + """ + + expected_services = [ + "rengine-web-1", + "rengine-db-1", + "rengine-celery-1", + "rengine-celery-beat-1", + "rengine-redis-1", + "rengine-proxy-1", + "rengine-ollama-1", + ] + expected_images = [ + f"ghcr.io/security-tools-alliance/rengine-ng:rengine-celery-v{RENGINE_VERSION}", + f"ghcr.io/security-tools-alliance/rengine-ng:rengine-web-v{RENGINE_VERSION}", + f"ghcr.io/security-tools-alliance/rengine-ng:rengine-postgres-v{RENGINE_VERSION}", + f"ghcr.io/security-tools-alliance/rengine-ng:rengine-redis-v{RENGINE_VERSION}", + f"ghcr.io/security-tools-alliance/rengine-ng:rengine-ollama-v{RENGINE_VERSION}", + f"ghcr.io/security-tools-alliance/rengine-ng:rengine-certs-v{RENGINE_VERSION}", + f"ghcr.io/security-tools-alliance/rengine-ng:rengine-proxy-v{RENGINE_VERSION}", + ] + + @classmethod + def setUpClass(cls): + """ + Set up the test environment before running any tests. + This method initializes the Docker client. + """ + cls.client = docker_from_env() + + # Search for the Makefile by traversing up the parent directories + cls.makefile_dir = cls.find_makefile_directory() + if not cls.makefile_dir: + raise FileNotFoundError("Makefile not found in the current directory or its parents") + + # Change the working directory to the one containing the Makefile + os.chdir(cls.makefile_dir) + print(f"Changed working directory to: {os.getcwd()}") + + @classmethod + def find_makefile_directory(cls): + """ + Search for the directory containing the Makefile by traversing up the directory tree. + """ + current_dir = os.path.abspath(os.getcwd()) + while current_dir != '/': + if os.path.exists(os.path.join(current_dir, 'Makefile')): + return current_dir + current_dir = os.path.dirname(current_dir) + return None + + @classmethod + def tearDownClass(cls): + """ + Clean up the test environment after all tests have been run. + This method stops all services. + """ + cls.run_make_command("down") + + @classmethod + def run_make_command(cls, command, capture_output=False, env_vars=None): + """ + Run a make command and optionally capture its output. + """ + cmd = f"make {command}" + if env_vars: + cmd = " ".join([f"{k}={v}" for k, v in env_vars.items()]) + " " + cmd + + print(f"{YELLOW}Executing command: {cmd}{ENDC}") + if capture_output: + make_result = subprocess.run( + cmd, shell=True, capture_output=True, text=True, check=False + ) + if make_result.returncode != 0: + print(f"Command failed. Stderr: {make_result.stderr}") + return make_result.stdout, make_result.stderr, make_result.returncode + make_result = subprocess.run(cmd, shell=True, check=False) + if make_result.returncode != 0: + print(f"Command failed. Returncode: {make_result.returncode}") + return make_result.returncode + + def assert_containers_running(self): + """ + Assert that all expected services are running. + """ + running_containers = self.client.containers.list() + for service in self.expected_services: + container = next((c for c in running_containers if service in c.name), None) + self.assertIsNotNone(container, f"Service {service} is not running") + self.assertEqual( + container.status, + "running", + f"Container {container.name} is not in 'running' state", + ) + + def clean_secrets(self): + """ + Clean up the secrets directory. + """ + secrets_path = f"{RENGINE_PATH}/docker/secrets" + if os.path.exists(secrets_path): + subprocess.run(f"sudo rm -rf {secrets_path}", shell=True, check=False) + + @staticmethod + def with_cleanup(func): + """ + Decorator to ensure cleanup after test execution. + """ + + @wraps(func) + def wrapper(self, *args, **kwargs): + try: + return func(self, *args, **kwargs) + finally: + self.clean_secrets() + return wrapper + + def test_pull(self): + """ + Test the `make pull` command. + This test verifies that all required Docker images can be pulled successfully. + """ + returncode = self.run_make_command("pull") + self.assertEqual(returncode, 0) + images = self.client.images.list() + for image in self.expected_images: + self.assertTrue( + any(image in img.tags[0] for img in images if img.tags), + f"Image {image} not found", + ) + + def test_images(self): + """ + Test the `make images` command. + This test verifies that all required Docker images are present and correctly tagged. + """ + self.run_make_command("pull") + stdout, _, returncode = self.run_make_command( + "images", capture_output=True + ) + self.assertEqual(returncode, 0) + for image in self.expected_images: + repo, tag = image.split(":") + self.assertIn(repo, stdout, f"Repository {repo} not found in output") + self.assertIn(tag, stdout, f"Tag {tag} not found in output") + + @with_cleanup + def test_start_services_up(self): + """ + Test the `make up` command. + This test verifies that the application can be started successfully with the 'up' command. + """ + print(f"{BLUE}test_start_services_up{ENDC}") + print(f"{CYAN}Test the 'up' make command. ... {ENDC}\n") + self._test_start_services("up", {}) + + @with_cleanup + def test_start_services_build(self): + """ + Test the `make build` command. + This test verifies that the application can be built and started successfully with the 'build' command. + """ + print(f"{BLUE}test_start_services_build{ENDC}") + print(f"{CYAN}Test the 'build' make command. ... {ENDC}\n") + self._test_start_services("build", {}) + + def _test_start_services(self, command, env_vars): + """ + Helper method to test start services. + This method contains the common logic for testing 'up' and 'build' commands. + """ + self.run_make_command("down") + self.run_make_command("certs") + + if "build" in command: + _, stderr, returncode = self.run_make_command( + command, capture_output=True, env_vars=env_vars + ) + self.assertEqual( + returncode, 0, f"Build command failed with error: {stderr}" + ) + _, stderr, returncode = self.run_make_command( + "up", capture_output=True, env_vars=env_vars + ) + else: + _, stderr, returncode = self.run_make_command( + command, capture_output=True, env_vars=env_vars + ) + + self.assertEqual( + returncode, 0, f"{command} command failed with error: {stderr}" + ) + self.assert_containers_running() + + @with_cleanup + def test_restart_services(self): + """ + Test the `make restart` command with various configurations. + This test verifies that services can be restarted successfully in different scenarios. + """ + print(f"{BLUE}test_restart_services (__main__.TestMakefile.test_restart_services){ENDC}") + print(f"{CYAN}Test the 'restart' make command with various configurations. ... {ENDC}") + scenarios = [ + ("restart", {}, []), + ("restart", {"DEV": "1"}, []), + ("restart", {"COLD": "1"}, []), + ("restart", {}, ["web"]), + ("restart", {}, ["celery"]), + ] + + for command, env_vars, services in scenarios: + with self.subTest(command=command, env_vars=env_vars, services=services): + self._test_restart_services(command, env_vars, services) + + def _test_restart_services(self, command, env_vars, services): + """ + Helper method to test restart services. + This method contains the common logic for testing various restart scenarios. + """ + self.run_make_command("certs") + self.run_make_command("up") + + restart_command = f"{command} {' '.join(services)}" + _, stderr, returncode = self.run_make_command( + restart_command.strip(), capture_output=True, env_vars=env_vars + ) + + self.assertEqual(returncode, 0, f"Restart command failed with error: {stderr}") + self.assert_containers_running() + + @with_cleanup + def test_logs(self): + """ + Test the `make logs` command. + This test verifies that logs can be retrieved and contain expected content. + It ensures services are up before checking logs and limits the log collection time. + """ + self.run_make_command("certs") + self.run_make_command("up") + + logs_process = subprocess.Popen( + "make logs", + shell=True, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + text=True, + start_new_session=True, + ) + time.sleep(5) + os.killpg(os.getpgid(logs_process.pid), signal.SIGTERM) + stdout, _ = logs_process.communicate(timeout=1) + + expected_services = [ + "redis-1", + "db-1", + "web-1", + "celery-1", + "celery-beat-1", + "ollama-1", + "proxy-1", + ] + for service in expected_services: + self.assertIn(service, stdout, f"Logs for {service} not found") + + @with_cleanup + def test_superuser(self): + """ + Test the superuser-related make commands. + This test verifies that a superuser can be created, its password changed, and then deleted. + """ + self.run_make_command("certs") + self.run_make_command("up") + + create_result = subprocess.run( + "make superuser_create isNonInteractive=true", + shell=True, + capture_output=True, + text=True, + check=False, + ) + + self.assertEqual( + create_result.returncode, + 0, + f"Superuser creation failed with error: {create_result.stderr}", + ) + self.assertIn("Superuser created successfully", create_result.stdout) + + changepassword_result = subprocess.run( + "make superuser_changepassword isNonInteractive=true", + shell=True, + capture_output=True, + text=True, + check=False, + ) + + self.assertEqual( + changepassword_result.returncode, + 0, + f"Superuser password change failed with error: {changepassword_result.stderr}", + ) + + delete_result = subprocess.run( + "make superuser_delete", + shell=True, + capture_output=True, + text=True, + check=False, + ) + + self.assertEqual( + delete_result.returncode, + 0, + f"Superuser deletion failed with error: {delete_result.stderr}", + ) + + @with_cleanup + def test_migrate(self): + """ + Test the `make migrate` command. + This test verifies that database migrations can be applied successfully. + """ + # First, generate certificates and start services + self.run_make_command("certs") + self.run_make_command("up") + + # Now run the migrate command + stdout, _, returncode = self.run_make_command( + "migrate", capture_output=True + ) + self.assertEqual(returncode, 0) + self.assertIn("Apply all migrations", stdout) + + @with_cleanup + def test_certs(self): + """ + Test the `make certs` command. + This test verifies that SSL certificates can be generated successfully. + """ + returncode = self.run_make_command("certs") + self.assertEqual(returncode, 0) + self.assertTrue( + os.path.exists(f"{RENGINE_PATH}/docker/secrets/certs/rengine_chain.pem") + ) + self.assertTrue( + os.path.exists(f"{RENGINE_PATH}/docker/secrets/certs/rengine_rsa.key") + ) + self.assertTrue( + os.path.exists(f"{RENGINE_PATH}/docker/secrets/certs/rengine.pem") + ) + + @with_cleanup + def test_down(self): + """ + Test the `make down` command. + This test verifies that all services can be stopped successfully. + """ + # First, generate certificates and start services + self.run_make_command("certs") + self.run_make_command("up") + + # Execute the 'down' command + returncode = self.run_make_command("down") + self.assertEqual(returncode, 0) + + # Verify that none of the expected services are running + running_containers = self.client.containers.list() + for service in self.expected_services: + self.assertFalse( + any(service in container.name for container in running_containers), + f"Service {service} is still running after 'down' command", + ) + + # Verify that all associated containers are stopped + all_containers = self.client.containers.list(all=True) + for container in all_containers: + if any(service in container.name for service in self.expected_services): + try: + container_info = container.attrs + self.assertIn( + container_info["State"]["Status"], + ["exited", "dead"], + f"Container {container.name} is not stopped after 'down' command", + ) + except NotFound: + # If the container is not found, it's considered stopped + pass + + def test_prune(self): + """ + Test the `make prune` command. + This test verifies that unused Docker volumes can be removed successfully. + """ + # Ensure all services are down before pruning + self.run_make_command("down") + + # Run the prune command + returncode = self.run_make_command("prune") + self.assertEqual(returncode, 0, "Prune command failed") + + # Check for reNgine-related volumes + volumes = self.client.volumes.list() + rengine_volumes = [v for v in volumes if v.name.startswith("rengine_")] + + if rengine_volumes: + volume_names = ", ".join([v.name for v in rengine_volumes]) + self.fail(f"reNgine volumes still exist after pruning: {volume_names}") + + print(f"Total volumes remaining: {len(volumes)}") + print("Volumes not removed:") + for volume in volumes: + print(f"- {volume.name}") + +def suite(tests_to_run=None, exclude_build=False): + """ + Create a test suite with specified or all tests. + + Args: + tests_to_run (list): List of test names to run. If None, all tests are run. + exclude_build (bool): If True, excludes the build test from the suite. + + Returns: + unittest.TestSuite: The test suite to run. + """ + all_tests = [ + "test_certs", + "test_pull", + "test_images", + "test_start_services_up", + "test_superuser", + "test_migrate", + "test_logs", + "test_restart_services", + "test_start_services_build", + "test_down", + "test_prune", + ] + + if exclude_build: + all_tests.remove("test_start_services_build") + + tests_to_execute = tests_to_run if tests_to_run else all_tests + + test_suite = unittest.TestSuite() + executed_tests = [] + skipped_tests = [] + + for test in tests_to_execute: + if test in all_tests: + test_method = getattr(TestMakefile, test, None) + if test_method and callable(test_method): + test_suite.addTest(TestMakefile(test)) + executed_tests.append(test) + else: + skipped_tests.append(test) + print(f"Warning: Test method '{test}' not found in TestMakefile. Skipping.") + else: + skipped_tests.append(test) + print(f"Warning: Test '{test}' not in the list of available tests. Skipping.") + + # Store test information for later display + test_info = { + 'executed': executed_tests, + 'skipped': skipped_tests + } + + return test_suite, test_info + + +if __name__ == "__main__": + import argparse + + parser = argparse.ArgumentParser(description="Run reNgine-ng Makefile tests") + parser.add_argument("--exclude-build", action="store_true", help="Exclude build test") + parser.add_argument("--tests", nargs="*", help="Specific tests to run") + args = parser.parse_args() + + runner = unittest.TextTestRunner(verbosity=1) + test_suite, test_info = suite(args.tests, args.exclude_build) + result = runner.run(test_suite) + + # Display test summary + print(f"\n{GREEN}Test Execution Summary:{ENDC}") + print(f"{YELLOW}Tests executed:{ENDC}") + for test in test_info['executed']: + print(f"- {test}") + if test_info['skipped']: + print(f"\n{RED}Tests skipped:{ENDC}") + for test in test_info['skipped']: + print(f"- {test}") + + sys.exit(not result.wasSuccessful()) From 25f4ed7cf3bc86d7f0fc1cb1af2eee8cdc771ae1 Mon Sep 17 00:00:00 2001 From: Psyray Date: Thu, 29 Aug 2024 15:56:09 +0200 Subject: [PATCH 242/262] fix(ui): tools settings page (#169) * bug(celery): fix tools settings page load * bug(celery): fix whois lookup, CMS Detector, wafw00f * bug(celery): create tool_config volume to share data between web & celery * bug(celery): fix some tools config file not found * bug(celery): change nuclei parameter to create config * bug(ui): fix bug on saving nuclei and gf pattern * bug(ui): remove bad file * docker(tools): add the missing gau config file * docker(tools): fix bad gau config name * docker(tools): update toml copy command to copy dot files * tools(ui): add the GAU config file to the tools settings page * tools(config): replace harvester default config file --- docker/celery/Dockerfile | 8 + docker/celery/config/.gau.toml | 19 + docker/celery/config/amass.ini | 263 + .../celery/config/the-harvester-api-keys.yaml | 65 + docker/docker-compose.yml | 13 +- web/api/urls.py | 4 + web/api/views.py | 140 +- web/reNgine/settings.py | 5 +- web/reNgine/tasks.py | 9322 +++++++++-------- .../static/scanEngine/js/custom_tools.js | 16 + .../templates/scanEngine/settings/tool.html | 16 + web/scanEngine/views.py | 46 +- web/static/custom/custom.js | 16 +- 13 files changed, 5227 insertions(+), 4706 deletions(-) create mode 100644 docker/celery/config/.gau.toml create mode 100644 docker/celery/config/amass.ini create mode 100644 docker/celery/config/the-harvester-api-keys.yaml diff --git a/docker/celery/Dockerfile b/docker/celery/Dockerfile index d14cefb8..f45f1dc6 100644 --- a/docker/celery/Dockerfile +++ b/docker/celery/Dockerfile @@ -165,6 +165,7 @@ RUN ln -s /usr/local/bin/geckodriver $BINPATH/geckodriver && \ mkdir -p /home/$USERNAME/nuclei-templates && wget https://raw.githubusercontent.com/NagliNagli/Shockwave-OSS/bd7445cd320a174d3073f0a61867a40849d28436/ssrf.yaml -O /home/$USERNAME/nuclei-templates/ssrf_nagli.yaml && \ mkdir -p /home/$USERNAME/results +# Copy poetry config files COPY --chown=$USERNAME:$USERNAME ./*.toml /home/$USERNAME # Install aliases for tools needing a pyproject.toml @@ -175,6 +176,13 @@ RUN cd $TOOLPATH/.github/OneForAll && mv /home/$USERNAME/oneforall-pyproject.tom cd $TOOLPATH/.github/Infoga && mv /home/$USERNAME/infoga-pyproject.toml pyproject.toml && poetry env use python3.10 && poetry install --no-cache && echo "#!/bin/bash\n\npoetry -C $TOOLPATH/.github/Infoga/ run python $TOOLPATH/.github/Infoga/infoga.py \"\$@\"" > $BINPATH/infoga && chmod +x $BINPATH/infoga && \ cd /home/$USERNAME && poetry install +# Create tools config files +RUN nuclei -silent && naabu -version && subfinder -version && mkdir -p /home/$USERNAME/.config/theHarvester +COPY --chown=$USERNAME:$USERNAME ./config/the-harvester-api-keys.yaml /home/$USERNAME/.config/theHarvester/api-keys.yaml +COPY --chown=$USERNAME:$USERNAME ./config/amass.ini /home/$USERNAME/.config/amass.ini +COPY --chown=$USERNAME:$USERNAME ./config/.gau.toml /home/$USERNAME/.config/.gau.toml +RUN ln -s /home/$USERNAME/.config/.gau.toml /home/$USERNAME/.gau.toml + COPY ./entrypoint.sh /entrypoint.sh RUN mkdir -p /home/$USERNAME/rengine /home/$USERNAME/scan_results \ && chown -R $USERNAME:$USERNAME /home/$USERNAME/rengine \ diff --git a/docker/celery/config/.gau.toml b/docker/celery/config/.gau.toml new file mode 100644 index 00000000..ff13611e --- /dev/null +++ b/docker/celery/config/.gau.toml @@ -0,0 +1,19 @@ +threads = 2 +verbose = false +retries = 15 +subdomains = false +parameters = false +providers = ["wayback","commoncrawl","otx","urlscan"] +blacklist = ["ttf","woff","svg","png","jpg"] +json = false + +[urlscan] + apikey = "" + +[filters] + from = "" + to = "" + matchstatuscodes = [] + matchmimetypes = [] + filterstatuscodes = [] + filtermimetypes = ["image/png", "image/jpg", "image/svg+xml"] diff --git a/docker/celery/config/amass.ini b/docker/celery/config/amass.ini new file mode 100644 index 00000000..72251950 --- /dev/null +++ b/docker/celery/config/amass.ini @@ -0,0 +1,263 @@ +# Copyright 2017-2020 Jeff Foley. All rights reserved. +# Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file. + +# Should results only be collected passively and without DNS resolution? Not recommended. +#mode = passive +# Would you like to use active techniques that communicate directly with the discovered assets, +# such as pulling TLS certificates from discovered IP addresses and attempting DNS zone transfers? +#mode = active + +# The directory that stores the Cayley graph database and other output files +# The default for Linux systems is: $HOME/.config/amass +#output_directory = amass + +# Another location (directory) where the user can provide ADS scripts to the engine. +#scripts_directory = + +# The maximum number of DNS queries that can be performed concurrently during the enumeration. +#maximum_dns_queries = 20000 + +# DNS resolvers used globally by the amass package. +#[resolvers] +#monitor_resolver_rate = true +#resolver = 1.1.1.1 ; Cloudflare +#resolver = 8.8.8.8 ; Google +#resolver = 64.6.64.6 ; Verisign +#resolver = 74.82.42.42 ; Hurricane Electric +#resolver = 1.0.0.1 ; Cloudflare Secondary +#resolver = 8.8.4.4 ; Google Secondary +#resolver = 64.6.65.6 ; Verisign Secondary +#resolver = 77.88.8.1 ; Yandex.DNS Secondary + +[scope] +# The network infrastructure settings expand scope, not restrict the scope. +# Single IP address or range (e.g. a.b.c.10-245) +#address = 192.168.1.1 +#cidr = 192.168.1.0/24 +#asn = 26808 +#port = 80 +port = 443 +#port = 8080 + +# Root domain names used in the enumeration. The findings are limited by the root domain names provided. +#[scope.domains] +#domain = owasp.org +#domain = appsecusa.org +#domain = appsec.eu +#domain = appsec-labs.com + +# Are there any subdomains that are out of scope? +#[scope.blacklisted] +#subdomain = education.appsec-labs.com +#subdomain = 2012.appsecusa.org + +# The graph database discovered DNS names, associated network infrastructure, results from data sources, etc. +# This information is then used in future enumerations and analysis of the discoveries. +#[graphdbs] +#local_database = true ; Set this to false to disable use of the local database. + +# postgres://[username:password@]host[:port]/database-name?sslmode=disable of the PostgreSQL +# database and credentials. Sslmode is optional, and can be disable, require, verify-ca, or verify-full. +#[graphdbs.postgres] +#primary = false ; Specify which graph database is the primary db, or the local database will be selected. +#url = "postgres://[username:password@]host[:port]/database-name?sslmode=disable" +#options="connect_timeout=10" + +# MqSQL database and credentials URL format: +# [username:password@]tcp(host[:3306])/database-name?timeout=10s +#[graphdbs.mysql] +#url = [username:password@]tcp(host[:3306])/database-name?timeout=10s + +# Settings related to DNS name brute forcing. +#[bruteforce] +#enabled = true +#recursive = true +# Number of discoveries made in a subdomain before performing recursive brute forcing: Default is 1. +#minimum_for_recursive = 1 +#wordlist_file = /usr/share/wordlists/all.txt +#wordlist_file = /usr/share/wordlists/all.txt # multiple lists can be used + +# Would you like to permute resolved names? +#[alterations] +#enabled = true +# edit_distance specifies the number of times a primitive edit operation will be +# performed on a name sample during fuzzy label searching. +#edit_distance = 1 ; Setting this to zero will disable this expensive feature. +#flip_words = true # test-dev.owasp.org -> test-prod.owasp.org +#flip_numbers = true # test1.owasp.org -> test2.owasp.org +#add_words = true # test.owasp.org -> test-dev.owasp.org +#add_numbers = true # test.owasp.org -> test1.owasp.org +# Multiple lists can be used. +#wordlist_file = /usr/share/wordlists/all.txt +#wordlist_file = /usr/share/wordlists/all.txt + +[data_sources] +# When set, this time-to-live is the minimum value applied to all data source caching. +minimum_ttl = 1440 ; One day + +# Are there any data sources that should be disabled? +#[data_sources.disabled] +#data_source = Ask +#data_source = Exalead +#data_source = IPv4Info + +# Provide data source configuration information. +# See the following format: +#[data_sources.SOURCENAME] ; The SOURCENAME must match the name in the data source implementation. +#ttl = 4320 ; Time-to-live value sets the number of minutes that the responses are cached. +# Unique identifier for this set of SOURCENAME credentials. +# Multiple sets of credentials can be provided and will be randomly selected. +#[data_sources.SOURCENAME.CredentialSetID] +#apikey = ; Each data source uses potentially different keys for authentication. +#secret = ; See the examples below for each data source. +#username = +#password = + +#https://otx.alienvault.com (Free) +#[data_sources.AlienVault] +#[data_sources.AlienVault.Credentials] +#apikey = + +#https://app.binaryedge.com (Free) +#[data_sources.BinaryEdge] +#ttl = 10080 +#[data_sources.BinaryEdge.Credentials] +#apikey = + +#https://c99.nl (Paid) +#[data_sources.C99] +#ttl = 4320 +#[data_sources.C99.account1] +#apikey = +#[data_sources.C99.account2] +#apikey = + +#https://censys.io (Free) +#[data_sources.Censys] +#ttl = 10080 +#[data_sources.Censys.Credentials] +#apikey = +#secret = + +#https://chaos.projectdiscovery.io (Free-InviteOnly) +#[data_sources.Chaos] +#ttl = 4320 +#[data_sources.Chaos.Credentials] +#apikey = + +#https://cloudflare.com (Free) +#[data_sources.Cloudflare] +#[data_sources.Cloudflare.Credentials] +#apikey = + +#Closed Source Invite Only +#[data_sources.CIRCL] +#[data_sources.CIRCL.Credentials] +#username = +#password = + +#https://dnsdb.info (Paid) +#[data_sources.DNSDB] +#ttl = 4320 +#[data_sources.DNSDB.Credentials] +#apikey = + +#https://developer.facebook.com (Free) +# Look here for how to obtain the Facebook credentials: +# https://goldplugins.com/documentation/wp-social-pro-documentation/how-to-get-an-app-id-and-secret-key-from-facebook/ +#[data_sources.FacebookCT] +#ttl = 4320 +#[data_sources.FacebookCT.app1] +#apikey = +#secret = +#[data_sources.FacebookCT.app2] +#apikey = +#secret = + +#https://github.com (Free) +#[data_sources.GitHub] +#ttl = 4320 +#[data_sources.GitHub.accountname] +#apikey = + +#https://networksdb.io (Free) +#[data_sources.NetworksDB] +#[data_sources.NetworksDB.Credentials] +#apikey = + +#https://passivetotal.com (Free) +#[data_sources.PassiveTotal] +#ttl = 10080 +#[data_sources.PassiveTotal.Credentials] +#username = +#apikey = + +#https://recon.dev (Free) +#[data_sources.ReconDev] +#[data_sources.ReconDev.free] +#apikey = +#[data_sources.ReconDev.paid] +#apikey = + +#https://securitytrails.com (Free) +#[data_sources.SecurityTrails] +#ttl = 1440 +#[data_sources.SecurityTrails.Credentials] +#apikey = + +#https://shodan.io (Free) +#[data_sources.Shodan] +#ttl = 10080 +#[data_sources.Shodan.Credentials] +#apikey = + +#https://spyse.com (Paid/Free-trial) +#[data_sources.Spyse] +#ttl = 4320 +#[data_sources.Spyse.Credentials] +#apikey = + +#https://developer.twitter.com (Free) +# Provide your Twitter App Consumer API key and Consumer API secrety key +#[data_sources.Twitter] +#[data_sources.Twitter.account1] +#apikey = +#secret = +#[data_sources.Twitter.account2] +#apikey = +#secret = + +#https://umbrella.cisco.com (Paid-Enterprise) +# The apikey must be an API access token created through the Investigate management UI +#[data_sources.Umbrella] +#[data_sources.Umbrella.Credentials] +#apikey = + +#https://urlscan.io (Free) +# URLScan can be used without an API key, but the key allows new submissions to be made +#[data_sources.URLScan] +#[data_sources.URLScan.Credentials] +#apikey = + +#https://virustotal.com (Free) +#[data_sources.VirusTotal] +#ttl = 10080 +#[data_sources.VirusTotal.Credentials] +#apikey = + +#https://whoisxmlapi.com (Free) +#[data_sources.WhoisXML] +#[data_sources.WhoisXML.Credentials] +#apikey = + +#https://zetalytics.com (Paid) +#[data_sources.ZETAlytics] +#ttl = 1440 +#[data_sources.ZETAlytics.Credentials] +#apikey = + +#[data_sources.ZoomEye] +#ttl = 1440 +#[data_sources.ZoomEye.Credentials] +#username = +#password = diff --git a/docker/celery/config/the-harvester-api-keys.yaml b/docker/celery/config/the-harvester-api-keys.yaml new file mode 100644 index 00000000..c0098ad5 --- /dev/null +++ b/docker/celery/config/the-harvester-api-keys.yaml @@ -0,0 +1,65 @@ +apikeys: + bevigil: + key: + + binaryedge: + key: + + bing: + key: + + bufferoverun: + key: + + censys: + id: + secret: + + criminalip: + key: + + fullhunt: + key: + + github: + key: + + hunter: + key: + + hunterhow: + key: + + intelx: + key: + + netlas: + key: + + onyphe: + key: + + pentestTools: + key: + + projectDiscovery: + key: + + rocketreach: + key: + + securityTrails: + key: + + shodan: + key: + + tomba: + key: + secret: + + virustotal: + key: + + zoomeye: + key: diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index a508f615..68d23c49 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -47,6 +47,10 @@ services: - ../web:/home/rengine/rengine:rw,z - ./celery/entrypoint.sh:/entrypoint.sh:ro - scan_results:/home/rengine/scan_results + - tool_config:/home/rengine/.config + - nuclei_templates:/home/rengine/nuclei-templates + - gf_patterns:/home/rengine/.gf + - wordlist:/home/rengine/wordlists healthcheck: test: ["CMD", "poetry", "-C", "/home/rengine", "run", "celery","-A","reNgine","status"] interval: 10s @@ -78,6 +82,10 @@ services: - ../web:/home/rengine/rengine:rw,z - ./beat/entrypoint.sh:/entrypoint.sh:ro - scan_results:/home/rengine/scan_results + - tool_config:/home/rengine/.config + - nuclei_templates:/home/rengine/nuclei-templates + - gf_patterns:/home/rengine/.gf + - wordlist:/home/rengine/wordlists networks: - rengine_network @@ -96,7 +104,10 @@ services: volumes: - ../web:/home/rengine/rengine:rw,z - ./web/entrypoint.sh:/entrypoint.sh:ro - + - tool_config:/home/rengine/.config + - nuclei_templates:/home/rengine/nuclei-templates + - gf_patterns:/home/rengine/.gf + - wordlist:/home/rengine/wordlists healthcheck: test: ["CMD", "curl", "-f", "-k", "http://localhost:8000"] interval: 10s diff --git a/web/api/urls.py b/web/api/urls.py index f4ee867e..dee58443 100644 --- a/web/api/urls.py +++ b/web/api/urls.py @@ -154,6 +154,10 @@ 'tools/waf_detector/', WafDetector.as_view(), name='waf_detector'), + path( + 'tools/gf_list/', + GfList.as_view(), + name='gf_list'), path( 'tools/gpt_vulnerability_report/', GPTVulnerabilityReportGenerator.as_view(), diff --git a/web/api/views.py b/web/api/views.py index 2cc724d4..9bf81801 100644 --- a/web/api/views.py +++ b/web/api/views.py @@ -266,26 +266,37 @@ def filter_queryset(self, qs): class WafDetector(APIView): - def get(self, request): - req = self.request - url= req.query_params.get('url') - response = {} - response['status'] = False - - wafw00f_command = f'wafw00f {url}' - output = subprocess.check_output(wafw00f_command, shell=True) - # use regex to get the waf - regex = "behind \\\\x1b\[1;96m(.*)\\\\x1b" - group = re.search(regex, str(output)) - - if group: - response['status'] = True - response['results'] = group.group(1) - else: - response['message'] = 'Could not detect any WAF!' - - return Response(response) - + def get(self, request): + req = self.request + url = req.query_params.get('url') + response = { + 'status': False, + 'message': '', + 'results': None + } + + if not url: + response['message'] = 'URL parameter is missing' + return Response(response) + + try: + logger.info(f"Initiating WAF detection for URL: {url}") + result = run_wafw00f.delay(url).get(timeout=30) + + if result.startswith("Unexpected error"): + response['message'] = result + elif result != "No WAF detected": + response['status'] = True + response['results'] = result + else: + response['message'] = 'Could not detect any WAF!' + + logger.info(f"WAF detection result: {response}") + except Exception as e: + logger.error(f"Error during WAF detection: {str(e)}") + response['message'] = f"An error occurred: {str(e)}" + + return Response(response) class SearchHistoryView(APIView): def get(self, request): @@ -1155,54 +1166,21 @@ def get(self, request): class CMSDetector(APIView): - def get(self, request): - req = self.request - url = req.query_params.get('url') - #save_db = True if 'save_db' in req.query_params else False - response = {'status': False} - try: - response = {} - cms_detector_command = f'cmseek' - cms_detector_command += ' --random-agent --batch --follow-redirect' - cms_detector_command += f' -u {url}' - - _, output = run_command(cms_detector_command, remove_ansi_sequence=True) - - response['message'] = 'Could not detect CMS!' - - parsed_url = urlparse(url) - - domain_name = parsed_url.hostname - port = parsed_url.port - - find_dir = domain_name - - if port: - find_dir += '_{}'.format(port) - # look for result path in output - path_regex = r"Result: (\/usr\/src[^\"\s]*)" - match = re.search(path_regex, output) - if match: - cms_json_path = match.group(1) - if os.path.isfile(cms_json_path): - cms_file_content = json.loads(open(cms_json_path, 'r').read()) - if not cms_file_content.get('cms_id'): - return response - response = {} - response = cms_file_content - response['status'] = True - try: - # remove results - cms_dir_path = os.path.dirname(cms_json_path) - shutil.rmtree(cms_dir_path) - except Exception as e: - logger.error(e) - return Response(response) - return Response(response) - except Exception as e: - response = {'status': False, 'message': str(e)} - return Response(response) - + def get(self, request): + url = request.query_params.get('url') + if not url: + return Response({'status': False, 'message': 'URL parameter is missing'}) + + try: + task = run_cmseek.delay(url) + result = task.get(timeout=300) # 5 minutes timeout + + if result['status']: + return Response(result) + else: + return Response({'status': False, 'message': 'Could not detect CMS!'}) + except Exception as e: + return Response({'status': False, 'message': str(e)}) class IPToDomain(APIView): def get(self, request): @@ -1269,7 +1247,7 @@ def get(self, request, format=None): return Response(response) if 'subfinder_config' in req.query_params: - path = str(Path.home() / ".config" / "subfinder" /" config.yaml") + path = str(Path.home() / ".config" / "subfinder" / "config.yaml") if not os.path.exists(path): run_command(f'touch {path}') response['message'] = 'File Created!' @@ -1289,7 +1267,7 @@ def get(self, request, format=None): return Response(response) if 'theharvester_config' in req.query_params: - path = str(Path(RENGINE_TOOL_PATH) / 'theHarvester' / 'api-keys.yaml') + path = str(Path.home() / ".config" / 'theHarvester' / 'api-keys.yaml') if not os.path.exists(path): run_command(f'touch {path}') response['message'] = 'File Created!' @@ -1333,9 +1311,31 @@ def get(self, request, format=None): response['status'] = False return Response(response) + if 'gau_config' in req.query_params: + path = str(Path.home() / ".config" / '.gau.toml') + if not os.path.exists(path): + run_command(f'touch {path}') + response['message'] = 'File Created!' + f = open(path, "r") + response['status'] = True + response['content'] = f.read() + return Response(response) + response['message'] = 'Invalid Query Params' return Response(response) +class GfList(APIView): + def get(self, request): + try: + task = run_gf_list.delay() + result = task.get(timeout=30) # 30 seconds timeout + + if result['status']: + return Response(result['output']) + else: + return Response({'error': result['message']}, status=500) + except Exception as e: + return Response({'error': str(e)}, status=500) class ListTodoNotes(APIView): def get(self, request, format=None): diff --git a/web/reNgine/settings.py b/web/reNgine/settings.py index d03a184f..ea156a29 100644 --- a/web/reNgine/settings.py +++ b/web/reNgine/settings.py @@ -23,7 +23,7 @@ RENGINE_CUSTOM_ENGINES = env('RENGINE_CUSTOM_ENGINES', default=str(Path.home() / 'custom_engines')) RENGINE_WORDLISTS = env('RENGINE_WORDLISTS', default=str(Path.home() / 'wordlists')) RENGINE_TOOL_PATH = env('RENGINE_TOOL_PATH', default=str(Path.home() / 'tools')) -RENGINE_TOOL_GITHUB_PATH = env('RENGINE_TOOL_GITHUB_PATH', default=str(Path(RENGINE_TOOL_PATH) / '.github.com')) +RENGINE_TOOL_GITHUB_PATH = env('RENGINE_TOOL_GITHUB_PATH', default=str(Path(RENGINE_TOOL_PATH) / '.github')) RENGINE_CACHE_ENABLED = env.bool('RENGINE_CACHE_ENABLED', default=False) RENGINE_RECORD_ENABLED = env.bool('RENGINE_RECORD_ENABLED', default=True) @@ -192,9 +192,6 @@ LOGIN_REDIRECT_URL = 'onboarding' LOGOUT_REDIRECT_URL = 'login' -# Tool Location -TOOL_LOCATION = '/home/rengine/tools/' - # Number of endpoints that have the same content_length DELETE_DUPLICATES_THRESHOLD = 10 diff --git a/web/reNgine/tasks.py b/web/reNgine/tasks.py index c1c32dd3..afe7609b 100644 --- a/web/reNgine/tasks.py +++ b/web/reNgine/tasks.py @@ -12,6 +12,7 @@ import concurrent.futures import base64 import uuid +import shutil from pathlib import Path from datetime import datetime @@ -39,7 +40,7 @@ from startScan.models import EndPoint, Subdomain, Vulnerability from targetApp.models import Domain if CELERY_REMOTE_DEBUG: - import debugpy + import debugpy """ Celery tasks. @@ -55,292 +56,292 @@ @app.task(name='initiate_scan', bind=False, queue='initiate_scan_queue') def initiate_scan( - scan_history_id, - domain_id, - engine_id=None, - scan_type=LIVE_SCAN, - results_dir=RENGINE_RESULTS, - imported_subdomains=[], - out_of_scope_subdomains=[], - url_filter=''): - """Initiate a new scan. - - Args: - scan_history_id (int): ScanHistory id. - domain_id (int): Domain id. - engine_id (int): Engine ID. - scan_type (int): Scan type (periodic, live). - results_dir (str): Results directory. - imported_subdomains (list): Imported subdomains. - out_of_scope_subdomains (list): Out-of-scope subdomains. - url_filter (str): URL path. Default: '' - """ - - if CELERY_REMOTE_DEBUG: - debug() - - # Get scan history - scan = ScanHistory.objects.get(pk=scan_history_id) - - # Get scan engine - engine_id = engine_id or scan.scan_type.id # scan history engine_id - engine = EngineType.objects.get(pk=engine_id) - - # Get YAML config - config = yaml.safe_load(engine.yaml_configuration) - enable_http_crawl = config.get(ENABLE_HTTP_CRAWL, DEFAULT_ENABLE_HTTP_CRAWL) - gf_patterns = config.get(GF_PATTERNS, []) - - # Get domain and set last_scan_date - domain = Domain.objects.get(pk=domain_id) - domain.last_scan_date = timezone.now() - domain.save() - - # Get path filter - url_filter = url_filter.rstrip('/') - - # Get or create ScanHistory() object - if scan_type == LIVE_SCAN: # immediate - scan = ScanHistory.objects.get(pk=scan_history_id) - scan.scan_status = RUNNING_TASK - elif scan_type == SCHEDULED_SCAN: # scheduled - scan = ScanHistory() - scan.scan_status = INITIATED_TASK - scan.scan_type = engine - scan.celery_ids = [initiate_scan.request.id] - scan.domain = domain - scan.start_scan_date = timezone.now() - scan.tasks = engine.tasks - uuid_scan = uuid.uuid1() - scan.results_dir = f'{results_dir}/{domain.name}/scans/{uuid_scan}' - add_gf_patterns = gf_patterns and 'fetch_url' in engine.tasks - if add_gf_patterns and is_iterable(gf_patterns): - scan.used_gf_patterns = ','.join(gf_patterns) - scan.save() - - try: - os.makedirs(scan.results_dir, exist_ok=True) - except: - import traceback - - traceback.print_exc() - raise - - # Build task context - ctx = { - 'scan_history_id': scan_history_id, - 'engine_id': engine_id, - 'domain_id': domain.id, - 'results_dir': scan.results_dir, - 'url_filter': url_filter, - 'yaml_configuration': config, - 'out_of_scope_subdomains': out_of_scope_subdomains - } - ctx_str = json.dumps(ctx, indent=2) - - # Send start notif - logger.warning(f'Starting scan {scan_history_id} with context:\n{ctx_str}') - send_scan_notif.delay( - scan_history_id, - subscan_id=None, - engine_id=engine_id, - status=CELERY_TASK_STATUS_MAP[scan.scan_status]) - - # Save imported subdomains in DB - save_imported_subdomains(imported_subdomains, ctx=ctx) - - # Create initial subdomain in DB: make a copy of domain as a subdomain so - # that other tasks using subdomains can use it. - subdomain_name = domain.name - subdomain, _ = save_subdomain(subdomain_name, ctx=ctx) - - # If enable_http_crawl is set, create an initial root HTTP endpoint so that - # HTTP crawling can start somewhere - http_url = f'{domain.name}{url_filter}' if url_filter else domain.name - endpoint, _ = save_endpoint( - http_url, - ctx=ctx, - crawl=enable_http_crawl, - is_default=True, - subdomain=subdomain - ) - save_subdomain_metadata(subdomain, endpoint) - - # Build Celery tasks, crafted according to the dependency graph below: - # subdomain_discovery --> port_scan --> fetch_url --> dir_file_fuzz - # osint vulnerability_scan - # osint dalfox xss scan - # screenshot - # waf_detection - workflow = chain( - group( - subdomain_discovery.si(ctx=ctx, description='Subdomain discovery'), - osint.si(ctx=ctx, description='OS Intelligence') - ), - port_scan.si(ctx=ctx, description='Port scan'), - fetch_url.si(ctx=ctx, description='Fetch URL'), - group( - dir_file_fuzz.si(ctx=ctx, description='Directories & files fuzz'), - vulnerability_scan.si(ctx=ctx, description='Vulnerability scan'), - screenshot.si(ctx=ctx, description='Screenshot'), - waf_detection.si(ctx=ctx, description='WAF detection') - ) - ) - - # Build callback - callback = report.si(ctx=ctx).set(link_error=[report.si(ctx=ctx)]) - - # Run Celery chord - logger.info(f'Running Celery workflow with {len(workflow.tasks) + 1} tasks') - task = chain(workflow, callback).on_error(callback).delay() - scan.celery_ids.append(task.id) - scan.save() - - return { - 'success': True, - 'task_id': task.id - } + scan_history_id, + domain_id, + engine_id=None, + scan_type=LIVE_SCAN, + results_dir=RENGINE_RESULTS, + imported_subdomains=[], + out_of_scope_subdomains=[], + url_filter=''): + """Initiate a new scan. + + Args: + scan_history_id (int): ScanHistory id. + domain_id (int): Domain id. + engine_id (int): Engine ID. + scan_type (int): Scan type (periodic, live). + results_dir (str): Results directory. + imported_subdomains (list): Imported subdomains. + out_of_scope_subdomains (list): Out-of-scope subdomains. + url_filter (str): URL path. Default: '' + """ + + if CELERY_REMOTE_DEBUG: + debug() + + # Get scan history + scan = ScanHistory.objects.get(pk=scan_history_id) + + # Get scan engine + engine_id = engine_id or scan.scan_type.id # scan history engine_id + engine = EngineType.objects.get(pk=engine_id) + + # Get YAML config + config = yaml.safe_load(engine.yaml_configuration) + enable_http_crawl = config.get(ENABLE_HTTP_CRAWL, DEFAULT_ENABLE_HTTP_CRAWL) + gf_patterns = config.get(GF_PATTERNS, []) + + # Get domain and set last_scan_date + domain = Domain.objects.get(pk=domain_id) + domain.last_scan_date = timezone.now() + domain.save() + + # Get path filter + url_filter = url_filter.rstrip('/') + + # Get or create ScanHistory() object + if scan_type == LIVE_SCAN: # immediate + scan = ScanHistory.objects.get(pk=scan_history_id) + scan.scan_status = RUNNING_TASK + elif scan_type == SCHEDULED_SCAN: # scheduled + scan = ScanHistory() + scan.scan_status = INITIATED_TASK + scan.scan_type = engine + scan.celery_ids = [initiate_scan.request.id] + scan.domain = domain + scan.start_scan_date = timezone.now() + scan.tasks = engine.tasks + uuid_scan = uuid.uuid1() + scan.results_dir = f'{results_dir}/{domain.name}/scans/{uuid_scan}' + add_gf_patterns = gf_patterns and 'fetch_url' in engine.tasks + if add_gf_patterns and is_iterable(gf_patterns): + scan.used_gf_patterns = ','.join(gf_patterns) + scan.save() + + try: + os.makedirs(scan.results_dir, exist_ok=True) + except: + import traceback + + traceback.print_exc() + raise + + # Build task context + ctx = { + 'scan_history_id': scan_history_id, + 'engine_id': engine_id, + 'domain_id': domain.id, + 'results_dir': scan.results_dir, + 'url_filter': url_filter, + 'yaml_configuration': config, + 'out_of_scope_subdomains': out_of_scope_subdomains + } + ctx_str = json.dumps(ctx, indent=2) + + # Send start notif + logger.warning(f'Starting scan {scan_history_id} with context:\n{ctx_str}') + send_scan_notif.delay( + scan_history_id, + subscan_id=None, + engine_id=engine_id, + status=CELERY_TASK_STATUS_MAP[scan.scan_status]) + + # Save imported subdomains in DB + save_imported_subdomains(imported_subdomains, ctx=ctx) + + # Create initial subdomain in DB: make a copy of domain as a subdomain so + # that other tasks using subdomains can use it. + subdomain_name = domain.name + subdomain, _ = save_subdomain(subdomain_name, ctx=ctx) + + # If enable_http_crawl is set, create an initial root HTTP endpoint so that + # HTTP crawling can start somewhere + http_url = f'{domain.name}{url_filter}' if url_filter else domain.name + endpoint, _ = save_endpoint( + http_url, + ctx=ctx, + crawl=enable_http_crawl, + is_default=True, + subdomain=subdomain + ) + save_subdomain_metadata(subdomain, endpoint) + + # Build Celery tasks, crafted according to the dependency graph below: + # subdomain_discovery --> port_scan --> fetch_url --> dir_file_fuzz + # osint vulnerability_scan + # osint dalfox xss scan + # screenshot + # waf_detection + workflow = chain( + group( + subdomain_discovery.si(ctx=ctx, description='Subdomain discovery'), + osint.si(ctx=ctx, description='OS Intelligence') + ), + port_scan.si(ctx=ctx, description='Port scan'), + fetch_url.si(ctx=ctx, description='Fetch URL'), + group( + dir_file_fuzz.si(ctx=ctx, description='Directories & files fuzz'), + vulnerability_scan.si(ctx=ctx, description='Vulnerability scan'), + screenshot.si(ctx=ctx, description='Screenshot'), + waf_detection.si(ctx=ctx, description='WAF detection') + ) + ) + + # Build callback + callback = report.si(ctx=ctx).set(link_error=[report.si(ctx=ctx)]) + + # Run Celery chord + logger.info(f'Running Celery workflow with {len(workflow.tasks) + 1} tasks') + task = chain(workflow, callback).on_error(callback).delay() + scan.celery_ids.append(task.id) + scan.save() + + return { + 'success': True, + 'task_id': task.id + } @app.task(name='initiate_subscan', bind=False, queue='subscan_queue') def initiate_subscan( - scan_history_id, - subdomain_id, - engine_id=None, - scan_type=None, - results_dir=RENGINE_RESULTS, - url_filter=''): - """Initiate a new subscan. - - Args: - scan_history_id (int): ScanHistory id. - subdomain_id (int): Subdomain id. - engine_id (int): Engine ID. - scan_type (int): Scan type (periodic, live). - results_dir (str): Results directory. - url_filter (str): URL path. Default: '' - """ - - if CELERY_REMOTE_DEBUG: - debug() - - # Get Subdomain, Domain and ScanHistory - subdomain = Subdomain.objects.get(pk=subdomain_id) - scan = ScanHistory.objects.get(pk=subdomain.scan_history.id) - domain = Domain.objects.get(pk=subdomain.target_domain.id) - - # Get EngineType - engine_id = engine_id or scan.scan_type.id - engine = EngineType.objects.get(pk=engine_id) - - # Get YAML config - config = yaml.safe_load(engine.yaml_configuration) - enable_http_crawl = config.get(ENABLE_HTTP_CRAWL, DEFAULT_ENABLE_HTTP_CRAWL) - - # Create scan activity of SubScan Model - subscan = SubScan( - start_scan_date=timezone.now(), - celery_ids=[initiate_subscan.request.id], - scan_history=scan, - subdomain=subdomain, - type=scan_type, - status=RUNNING_TASK, - engine=engine) - subscan.save() - - # Get YAML configuration - config = yaml.safe_load(engine.yaml_configuration) - - # Create results directory - uuid_scan = uuid.uuid1() - results_dir = f'{results_dir}/{domain.name}/subscans/{uuid_scan}' - os.makedirs(results_dir, exist_ok=True) - - # Run task - method = globals().get(scan_type) - if not method: - logger.warning(f'Task {scan_type} is not supported by reNgine. Skipping') - return - scan.tasks.append(scan_type) - scan.save() - - # Send start notif - send_scan_notif.delay( - scan.id, - subscan_id=subscan.id, - engine_id=engine_id, - status='RUNNING') - - # Build context - ctx = { - 'scan_history_id': scan.id, - 'subscan_id': subscan.id, - 'engine_id': engine_id, - 'domain_id': domain.id, - 'subdomain_id': subdomain.id, - 'yaml_configuration': config, - 'results_dir': results_dir, - 'url_filter': url_filter - } - - # Build header + callback - workflow = method.si(ctx=ctx) - callback = report.si(ctx=ctx).set(link_error=[report.si(ctx=ctx)]) - - # Run Celery tasks - task = chain(workflow, callback).on_error(callback).delay() - subscan.celery_ids.append(task.id) - subscan.save() - - return { - 'success': True, - 'task_id': task.id - } + scan_history_id, + subdomain_id, + engine_id=None, + scan_type=None, + results_dir=RENGINE_RESULTS, + url_filter=''): + """Initiate a new subscan. + + Args: + scan_history_id (int): ScanHistory id. + subdomain_id (int): Subdomain id. + engine_id (int): Engine ID. + scan_type (int): Scan type (periodic, live). + results_dir (str): Results directory. + url_filter (str): URL path. Default: '' + """ + + if CELERY_REMOTE_DEBUG: + debug() + + # Get Subdomain, Domain and ScanHistory + subdomain = Subdomain.objects.get(pk=subdomain_id) + scan = ScanHistory.objects.get(pk=subdomain.scan_history.id) + domain = Domain.objects.get(pk=subdomain.target_domain.id) + + # Get EngineType + engine_id = engine_id or scan.scan_type.id + engine = EngineType.objects.get(pk=engine_id) + + # Get YAML config + config = yaml.safe_load(engine.yaml_configuration) + enable_http_crawl = config.get(ENABLE_HTTP_CRAWL, DEFAULT_ENABLE_HTTP_CRAWL) + + # Create scan activity of SubScan Model + subscan = SubScan( + start_scan_date=timezone.now(), + celery_ids=[initiate_subscan.request.id], + scan_history=scan, + subdomain=subdomain, + type=scan_type, + status=RUNNING_TASK, + engine=engine) + subscan.save() + + # Get YAML configuration + config = yaml.safe_load(engine.yaml_configuration) + + # Create results directory + uuid_scan = uuid.uuid1() + results_dir = f'{results_dir}/{domain.name}/subscans/{uuid_scan}' + os.makedirs(results_dir, exist_ok=True) + + # Run task + method = globals().get(scan_type) + if not method: + logger.warning(f'Task {scan_type} is not supported by reNgine. Skipping') + return + scan.tasks.append(scan_type) + scan.save() + + # Send start notif + send_scan_notif.delay( + scan.id, + subscan_id=subscan.id, + engine_id=engine_id, + status='RUNNING') + + # Build context + ctx = { + 'scan_history_id': scan.id, + 'subscan_id': subscan.id, + 'engine_id': engine_id, + 'domain_id': domain.id, + 'subdomain_id': subdomain.id, + 'yaml_configuration': config, + 'results_dir': results_dir, + 'url_filter': url_filter + } + + # Build header + callback + workflow = method.si(ctx=ctx) + callback = report.si(ctx=ctx).set(link_error=[report.si(ctx=ctx)]) + + # Run Celery tasks + task = chain(workflow, callback).on_error(callback).delay() + subscan.celery_ids.append(task.id) + subscan.save() + + return { + 'success': True, + 'task_id': task.id + } @app.task(name='report', bind=False, queue='report_queue') def report(ctx={}, description=None): - """Report task running after all other tasks. - Mark ScanHistory or SubScan object as completed and update with final - status, log run details and send notification. - - Args: - description (str, optional): Task description shown in UI. - """ - # Get objects - subscan_id = ctx.get('subscan_id') - scan_id = ctx.get('scan_history_id') - engine_id = ctx.get('engine_id') - scan = ScanHistory.objects.filter(pk=scan_id).first() - subscan = SubScan.objects.filter(pk=subscan_id).first() - - # Get failed tasks - tasks = ScanActivity.objects.filter(scan_of=scan).all() - if subscan: - tasks = tasks.filter(celery_id__in=subscan.celery_ids) - failed_tasks = tasks.filter(status=FAILED_TASK) - - # Get task status - failed_count = failed_tasks.count() - status = SUCCESS_TASK if failed_count == 0 else FAILED_TASK - status_h = 'SUCCESS' if failed_count == 0 else 'FAILED' - - # Update scan / subscan status - if subscan: - subscan.stop_scan_date = timezone.now() - subscan.status = status - subscan.save() - else: - scan.scan_status = status - scan.stop_scan_date = timezone.now() - scan.save() - - # Send scan status notif - send_scan_notif.delay( - scan_history_id=scan_id, - subscan_id=subscan_id, - engine_id=engine_id, - status=status_h) + """Report task running after all other tasks. + Mark ScanHistory or SubScan object as completed and update with final + status, log run details and send notification. + + Args: + description (str, optional): Task description shown in UI. + """ + # Get objects + subscan_id = ctx.get('subscan_id') + scan_id = ctx.get('scan_history_id') + engine_id = ctx.get('engine_id') + scan = ScanHistory.objects.filter(pk=scan_id).first() + subscan = SubScan.objects.filter(pk=subscan_id).first() + + # Get failed tasks + tasks = ScanActivity.objects.filter(scan_of=scan).all() + if subscan: + tasks = tasks.filter(celery_id__in=subscan.celery_ids) + failed_tasks = tasks.filter(status=FAILED_TASK) + + # Get task status + failed_count = failed_tasks.count() + status = SUCCESS_TASK if failed_count == 0 else FAILED_TASK + status_h = 'SUCCESS' if failed_count == 0 else 'FAILED' + + # Update scan / subscan status + if subscan: + subscan.stop_scan_date = timezone.now() + subscan.status = status + subscan.save() + else: + scan.scan_status = status + scan.stop_scan_date = timezone.now() + scan.save() + + # Send scan status notif + send_scan_notif.delay( + scan_history_id=scan_id, + subscan_id=subscan_id, + engine_id=engine_id, + status=status_h) #------------------------- # @@ -349,2637 +350,2637 @@ def report(ctx={}, description=None): @app.task(name='subdomain_discovery', queue='main_scan_queue', base=RengineTask, bind=True) def subdomain_discovery( - self, - host=None, - ctx=None, - description=None): - """Uses a set of tools (see SUBDOMAIN_SCAN_DEFAULT_TOOLS) to scan all - subdomains associated with a domain. - - Args: - host (str): Hostname to scan. - - Returns: - subdomains (list): List of subdomain names. - """ - if not host: - host = self.subdomain.name if self.subdomain else self.domain.name - - if self.url_filter: - logger.warning(f'Ignoring subdomains scan as an URL path filter was passed ({self.url_filter}).') - return - - # Config - config = self.yaml_configuration.get(SUBDOMAIN_DISCOVERY) or {} - enable_http_crawl = config.get(ENABLE_HTTP_CRAWL) or self.yaml_configuration.get(ENABLE_HTTP_CRAWL, DEFAULT_ENABLE_HTTP_CRAWL) - threads = config.get(THREADS) or self.yaml_configuration.get(THREADS, DEFAULT_THREADS) - timeout = config.get(TIMEOUT) or self.yaml_configuration.get(TIMEOUT, DEFAULT_HTTP_TIMEOUT) - tools = config.get(USES_TOOLS, SUBDOMAIN_SCAN_DEFAULT_TOOLS) - default_subdomain_tools = [tool.name.lower() for tool in InstalledExternalTool.objects.filter(is_default=True).filter(is_subdomain_gathering=True)] - custom_subdomain_tools = [tool.name.lower() for tool in InstalledExternalTool.objects.filter(is_default=False).filter(is_subdomain_gathering=True)] - send_subdomain_changes, send_interesting = False, False - notif = Notification.objects.first() - if notif: - send_subdomain_changes = notif.send_subdomain_changes_notif - send_interesting = notif.send_interesting_notif - - # Gather tools to run for subdomain scan - if ALL in tools: - tools = SUBDOMAIN_SCAN_DEFAULT_TOOLS + custom_subdomain_tools - tools = [t.lower() for t in tools] - - # Make exception for amass since tool name is amass, but command is amass-active/passive - default_subdomain_tools.append('amass-passive') - default_subdomain_tools.append('amass-active') - - # Run tools - for tool in tools: - cmd = None - logger.info(f'Scanning subdomains for {host} with {tool}') - proxy = get_random_proxy() - if tool in default_subdomain_tools: - if tool == 'amass-passive': - use_amass_config = config.get(USE_AMASS_CONFIG, False) - cmd = f'amass enum -passive -d {host} -o ' + str(Path(self.results_dir) / 'subdomains_amass.txt') - cmd += (' -config ' + str(Path.home() / '.config' / 'amass.ini')) if use_amass_config else '' - - elif tool == 'amass-active': - use_amass_config = config.get(USE_AMASS_CONFIG, False) - amass_wordlist_name = config.get(AMASS_WORDLIST, 'deepmagic.com-prefixes-top50000') - wordlist_path = str(Path(RENGINE_WORDLISTS) / f'{amass_wordlist_name}.txt') - cmd = f'amass enum -active -d {host} -o ' + str(Path(self.results_dir) / 'subdomains_amass_active.txt') - cmd += (' -config ' + str(Path.home() / '.config' / 'amass.ini')) if use_amass_config else '' - cmd += f' -brute -w {wordlist_path}' - - elif tool == 'sublist3r': - cmd = f'sublist3r -d {host} -t {threads} -o ' + str(Path(self.results_dir) / 'subdomains_sublister.txt') - - elif tool == 'subfinder': - cmd = f'subfinder -d {host} -o ' + str(Path(self.results_dir) / 'subdomains_subfinder.txt') - use_subfinder_config = config.get(USE_SUBFINDER_CONFIG, False) - cmd += (' -config ' + str(Path.home() / '.config' / 'subfinder' / 'config.yaml')) if use_subfinder_config else '' - cmd += f' -proxy {proxy}' if proxy else '' - cmd += f' -timeout {timeout}' if timeout else '' - cmd += f' -t {threads}' if threads else '' - cmd += f' -silent' - - elif tool == 'oneforall': - cmd = f'oneforall --target {host} run' - cmd_extract = f'cut -d\',\' -f6 ' + str(Path(RENGINE_TOOL_GITHUB_PATH) / 'OneForAll' / 'results' / f'{host}.csv') + ' > ' + str(Path(self.results_dir) / 'subdomains_oneforall.txt') - cmd_rm = f'rm -rf ' + str(Path(RENGINE_TOOL_GITHUB_PATH) / 'OneForAll' / 'results'/ f'{host}.csv') - cmd += f' && {cmd_extract} && {cmd_rm}' - - elif tool == 'ctfr': - results_file = str(Path(self.results_dir) / 'subdomains_ctfr.txt') - cmd = f'ctfr -d {host} -o {results_file}' - cmd_extract = f"cat {results_file} | sed 's/\*.//g' | tail -n +12 | uniq | sort > {results_file}" - cmd += f' && {cmd_extract}' - - elif tool == 'tlsx': - results_file = str(Path(self.results_dir) / 'subdomains_tlsx.txt') - cmd = f'tlsx -san -cn -silent -ro -host {host}' - cmd += f" | sed -n '/^\([a-zA-Z0-9]\([-a-zA-Z0-9]*[a-zA-Z0-9]\)\?\.\)\+{host}$/p' | uniq | sort" - cmd += f' > {results_file}' - - elif tool == 'netlas': - results_file = str(Path(self.results_dir) / 'subdomains_netlas.txt') - cmd = f'netlas search -d domain -i domain domain:"*.{host}" -f json' - netlas_key = get_netlas_key() - cmd += f' -a {netlas_key}' if netlas_key else '' - cmd_extract = f"grep -oE '([a-zA-Z0-9]([-a-zA-Z0-9]*[a-zA-Z0-9])?\.)+{host}'" - cmd += f' | {cmd_extract} > {results_file}' - - elif tool in custom_subdomain_tools: - tool_query = InstalledExternalTool.objects.filter(name__icontains=tool.lower()) - if not tool_query.exists(): - logger.error(f'{tool} configuration does not exists. Skipping.') - continue - custom_tool = tool_query.first() - cmd = custom_tool.subdomain_gathering_command - if '{TARGET}' not in cmd: - logger.error(f'Missing {{TARGET}} placeholders in {tool} configuration. Skipping.') - continue - if '{OUTPUT}' not in cmd: - logger.error(f'Missing {{OUTPUT}} placeholders in {tool} configuration. Skipping.') - continue - - - cmd = cmd.replace('{TARGET}', host) - cmd = cmd.replace('{OUTPUT}', str(Path(self.results_dir) / f'subdomains_{tool}.txt')) - cmd = cmd.replace('{PATH}', custom_tool.github_clone_path) if '{PATH}' in cmd else cmd - else: - logger.warning( - f'Subdomain discovery tool "{tool}" is not supported by reNgine. Skipping.') - continue - - # Run tool - try: - run_command( - cmd, - shell=True, - history_file=self.history_file, - scan_id=self.scan_id, - activity_id=self.activity_id) - except Exception as e: - logger.error( - f'Subdomain discovery tool "{tool}" raised an exception') - logger.exception(e) - - # Gather all the tools' results in one single file. Write subdomains into - # separate files, and sort all subdomains. - run_command( - f'cat ' + str(Path(self.results_dir) / 'subdomains_*.txt') + f' > {self.output_path}', - shell=True, - history_file=self.history_file, - scan_id=self.scan_id, - activity_id=self.activity_id) - run_command( - f'sort -u {self.output_path} -o {self.output_path}', - shell=True, - history_file=self.history_file, - scan_id=self.scan_id, - activity_id=self.activity_id) - - with open(self.output_path) as f: - lines = f.readlines() - - # Parse the output_file file and store Subdomain and EndPoint objects found - # in db. - subdomain_count = 0 - subdomains = [] - urls = [] - for line in lines: - subdomain_name = line.strip() - valid_url = bool(validators.url(subdomain_name)) - valid_domain = ( - bool(validators.domain(subdomain_name)) or - bool(validators.ipv4(subdomain_name)) or - bool(validators.ipv6(subdomain_name)) or - valid_url - ) - if not valid_domain: - logger.error(f'Subdomain {subdomain_name} is not a valid domain, IP or URL. Skipping.') - continue - - if valid_url: - subdomain_name = urlparse(subdomain_name).netloc - - if subdomain_name in self.out_of_scope_subdomains: - logger.error(f'Subdomain {subdomain_name} is out of scope. Skipping.') - continue - - # Add subdomain - subdomain, _ = save_subdomain(subdomain_name, ctx=ctx) - if not isinstance(subdomain, Subdomain): - logger.error(f"Invalid subdomain encountered: {subdomain}") - continue - subdomain_count += 1 - subdomains.append(subdomain) - urls.append(subdomain.name) - - # Bulk crawl subdomains - if enable_http_crawl: - ctx['track'] = True - http_crawl(urls, ctx=ctx, update_subdomain_metadatas=True) - else: - url_filter = ctx.get('url_filter') - enable_http_crawl = config.get(ENABLE_HTTP_CRAWL, DEFAULT_ENABLE_HTTP_CRAWL) - # Find root subdomain endpoints - for subdomain in subdomains: - subdomain_name = subdomain.strip() - # Create base endpoint (for scan) - http_url = f'{subdomain.name}{url_filter}' if url_filter else subdomain.name - endpoint, _ = save_endpoint( - http_url, - ctx=ctx, - is_default=True, - subdomain=subdomain - ) - save_subdomain_metadata(subdomain, endpoint) - - # Send notifications - subdomains_str = '\n'.join([f'• `{subdomain.name}`' for subdomain in subdomains]) - self.notify(fields={ - 'Subdomain count': len(subdomains), - 'Subdomains': subdomains_str, - }) - if send_subdomain_changes and self.scan_id and self.domain_id: - added = get_new_added_subdomain(self.scan_id, self.domain_id) - removed = get_removed_subdomain(self.scan_id, self.domain_id) - - if added: - subdomains_str = '\n'.join([f'• `{subdomain}`' for subdomain in added]) - self.notify(fields={'Added subdomains': subdomains_str}) - - if removed: - subdomains_str = '\n'.join([f'• `{subdomain}`' for subdomain in removed]) - self.notify(fields={'Removed subdomains': subdomains_str}) - - if send_interesting and self.scan_id and self.domain_id: - interesting_subdomains = get_interesting_subdomains(self.scan_id, self.domain_id) - if interesting_subdomains: - subdomains_str = '\n'.join([f'• `{subdomain}`' for subdomain in interesting_subdomains]) - self.notify(fields={'Interesting subdomains': subdomains_str}) - - return SubdomainSerializer(subdomains, many=True).data + self, + host=None, + ctx=None, + description=None): + """Uses a set of tools (see SUBDOMAIN_SCAN_DEFAULT_TOOLS) to scan all + subdomains associated with a domain. + + Args: + host (str): Hostname to scan. + + Returns: + subdomains (list): List of subdomain names. + """ + if not host: + host = self.subdomain.name if self.subdomain else self.domain.name + + if self.url_filter: + logger.warning(f'Ignoring subdomains scan as an URL path filter was passed ({self.url_filter}).') + return + + # Config + config = self.yaml_configuration.get(SUBDOMAIN_DISCOVERY) or {} + enable_http_crawl = config.get(ENABLE_HTTP_CRAWL) or self.yaml_configuration.get(ENABLE_HTTP_CRAWL, DEFAULT_ENABLE_HTTP_CRAWL) + threads = config.get(THREADS) or self.yaml_configuration.get(THREADS, DEFAULT_THREADS) + timeout = config.get(TIMEOUT) or self.yaml_configuration.get(TIMEOUT, DEFAULT_HTTP_TIMEOUT) + tools = config.get(USES_TOOLS, SUBDOMAIN_SCAN_DEFAULT_TOOLS) + default_subdomain_tools = [tool.name.lower() for tool in InstalledExternalTool.objects.filter(is_default=True).filter(is_subdomain_gathering=True)] + custom_subdomain_tools = [tool.name.lower() for tool in InstalledExternalTool.objects.filter(is_default=False).filter(is_subdomain_gathering=True)] + send_subdomain_changes, send_interesting = False, False + notif = Notification.objects.first() + if notif: + send_subdomain_changes = notif.send_subdomain_changes_notif + send_interesting = notif.send_interesting_notif + + # Gather tools to run for subdomain scan + if ALL in tools: + tools = SUBDOMAIN_SCAN_DEFAULT_TOOLS + custom_subdomain_tools + tools = [t.lower() for t in tools] + + # Make exception for amass since tool name is amass, but command is amass-active/passive + default_subdomain_tools.append('amass-passive') + default_subdomain_tools.append('amass-active') + + # Run tools + for tool in tools: + cmd = None + logger.info(f'Scanning subdomains for {host} with {tool}') + proxy = get_random_proxy() + if tool in default_subdomain_tools: + if tool == 'amass-passive': + use_amass_config = config.get(USE_AMASS_CONFIG, False) + cmd = f'amass enum -passive -d {host} -o ' + str(Path(self.results_dir) / 'subdomains_amass.txt') + cmd += (' -config ' + str(Path.home() / '.config' / 'amass.ini')) if use_amass_config else '' + + elif tool == 'amass-active': + use_amass_config = config.get(USE_AMASS_CONFIG, False) + amass_wordlist_name = config.get(AMASS_WORDLIST, 'deepmagic.com-prefixes-top50000') + wordlist_path = str(Path(RENGINE_WORDLISTS) / f'{amass_wordlist_name}.txt') + cmd = f'amass enum -active -d {host} -o ' + str(Path(self.results_dir) / 'subdomains_amass_active.txt') + cmd += (' -config ' + str(Path.home() / '.config' / 'amass.ini')) if use_amass_config else '' + cmd += f' -brute -w {wordlist_path}' + + elif tool == 'sublist3r': + cmd = f'sublist3r -d {host} -t {threads} -o ' + str(Path(self.results_dir) / 'subdomains_sublister.txt') + + elif tool == 'subfinder': + cmd = f'subfinder -d {host} -o ' + str(Path(self.results_dir) / 'subdomains_subfinder.txt') + use_subfinder_config = config.get(USE_SUBFINDER_CONFIG, False) + cmd += (' -config ' + str(Path.home() / '.config' / 'subfinder' / 'config.yaml')) if use_subfinder_config else '' + cmd += f' -proxy {proxy}' if proxy else '' + cmd += f' -timeout {timeout}' if timeout else '' + cmd += f' -t {threads}' if threads else '' + cmd += f' -silent' + + elif tool == 'oneforall': + cmd = f'oneforall --target {host} run' + cmd_extract = f'cut -d\',\' -f6 ' + str(Path(RENGINE_TOOL_GITHUB_PATH) / 'OneForAll' / 'results' / f'{host}.csv') + ' > ' + str(Path(self.results_dir) / 'subdomains_oneforall.txt') + cmd_rm = f'rm -rf ' + str(Path(RENGINE_TOOL_GITHUB_PATH) / 'OneForAll' / 'results'/ f'{host}.csv') + cmd += f' && {cmd_extract} && {cmd_rm}' + + elif tool == 'ctfr': + results_file = str(Path(self.results_dir) / 'subdomains_ctfr.txt') + cmd = f'ctfr -d {host} -o {results_file}' + cmd_extract = f"cat {results_file} | sed 's/\*.//g' | tail -n +12 | uniq | sort > {results_file}" + cmd += f' && {cmd_extract}' + + elif tool == 'tlsx': + results_file = str(Path(self.results_dir) / 'subdomains_tlsx.txt') + cmd = f'tlsx -san -cn -silent -ro -host {host}' + cmd += f" | sed -n '/^\([a-zA-Z0-9]\([-a-zA-Z0-9]*[a-zA-Z0-9]\)\?\.\)\+{host}$/p' | uniq | sort" + cmd += f' > {results_file}' + + elif tool == 'netlas': + results_file = str(Path(self.results_dir) / 'subdomains_netlas.txt') + cmd = f'netlas search -d domain -i domain domain:"*.{host}" -f json' + netlas_key = get_netlas_key() + cmd += f' -a {netlas_key}' if netlas_key else '' + cmd_extract = f"grep -oE '([a-zA-Z0-9]([-a-zA-Z0-9]*[a-zA-Z0-9])?\.)+{host}'" + cmd += f' | {cmd_extract} > {results_file}' + + elif tool in custom_subdomain_tools: + tool_query = InstalledExternalTool.objects.filter(name__icontains=tool.lower()) + if not tool_query.exists(): + logger.error(f'{tool} configuration does not exists. Skipping.') + continue + custom_tool = tool_query.first() + cmd = custom_tool.subdomain_gathering_command + if '{TARGET}' not in cmd: + logger.error(f'Missing {{TARGET}} placeholders in {tool} configuration. Skipping.') + continue + if '{OUTPUT}' not in cmd: + logger.error(f'Missing {{OUTPUT}} placeholders in {tool} configuration. Skipping.') + continue + + + cmd = cmd.replace('{TARGET}', host) + cmd = cmd.replace('{OUTPUT}', str(Path(self.results_dir) / f'subdomains_{tool}.txt')) + cmd = cmd.replace('{PATH}', custom_tool.github_clone_path) if '{PATH}' in cmd else cmd + else: + logger.warning( + f'Subdomain discovery tool "{tool}" is not supported by reNgine. Skipping.') + continue + + # Run tool + try: + run_command( + cmd, + shell=True, + history_file=self.history_file, + scan_id=self.scan_id, + activity_id=self.activity_id) + except Exception as e: + logger.error( + f'Subdomain discovery tool "{tool}" raised an exception') + logger.exception(e) + + # Gather all the tools' results in one single file. Write subdomains into + # separate files, and sort all subdomains. + run_command( + f'cat ' + str(Path(self.results_dir) / 'subdomains_*.txt') + f' > {self.output_path}', + shell=True, + history_file=self.history_file, + scan_id=self.scan_id, + activity_id=self.activity_id) + run_command( + f'sort -u {self.output_path} -o {self.output_path}', + shell=True, + history_file=self.history_file, + scan_id=self.scan_id, + activity_id=self.activity_id) + + with open(self.output_path) as f: + lines = f.readlines() + + # Parse the output_file file and store Subdomain and EndPoint objects found + # in db. + subdomain_count = 0 + subdomains = [] + urls = [] + for line in lines: + subdomain_name = line.strip() + valid_url = bool(validators.url(subdomain_name)) + valid_domain = ( + bool(validators.domain(subdomain_name)) or + bool(validators.ipv4(subdomain_name)) or + bool(validators.ipv6(subdomain_name)) or + valid_url + ) + if not valid_domain: + logger.error(f'Subdomain {subdomain_name} is not a valid domain, IP or URL. Skipping.') + continue + + if valid_url: + subdomain_name = urlparse(subdomain_name).netloc + + if subdomain_name in self.out_of_scope_subdomains: + logger.error(f'Subdomain {subdomain_name} is out of scope. Skipping.') + continue + + # Add subdomain + subdomain, _ = save_subdomain(subdomain_name, ctx=ctx) + if not isinstance(subdomain, Subdomain): + logger.error(f"Invalid subdomain encountered: {subdomain}") + continue + subdomain_count += 1 + subdomains.append(subdomain) + urls.append(subdomain.name) + + # Bulk crawl subdomains + if enable_http_crawl: + ctx['track'] = True + http_crawl(urls, ctx=ctx, update_subdomain_metadatas=True) + else: + url_filter = ctx.get('url_filter') + enable_http_crawl = config.get(ENABLE_HTTP_CRAWL, DEFAULT_ENABLE_HTTP_CRAWL) + # Find root subdomain endpoints + for subdomain in subdomains: + subdomain_name = subdomain.strip() + # Create base endpoint (for scan) + http_url = f'{subdomain.name}{url_filter}' if url_filter else subdomain.name + endpoint, _ = save_endpoint( + http_url, + ctx=ctx, + is_default=True, + subdomain=subdomain + ) + save_subdomain_metadata(subdomain, endpoint) + + # Send notifications + subdomains_str = '\n'.join([f'• `{subdomain.name}`' for subdomain in subdomains]) + self.notify(fields={ + 'Subdomain count': len(subdomains), + 'Subdomains': subdomains_str, + }) + if send_subdomain_changes and self.scan_id and self.domain_id: + added = get_new_added_subdomain(self.scan_id, self.domain_id) + removed = get_removed_subdomain(self.scan_id, self.domain_id) + + if added: + subdomains_str = '\n'.join([f'• `{subdomain}`' for subdomain in added]) + self.notify(fields={'Added subdomains': subdomains_str}) + + if removed: + subdomains_str = '\n'.join([f'• `{subdomain}`' for subdomain in removed]) + self.notify(fields={'Removed subdomains': subdomains_str}) + + if send_interesting and self.scan_id and self.domain_id: + interesting_subdomains = get_interesting_subdomains(self.scan_id, self.domain_id) + if interesting_subdomains: + subdomains_str = '\n'.join([f'• `{subdomain}`' for subdomain in interesting_subdomains]) + self.notify(fields={'Interesting subdomains': subdomains_str}) + + return SubdomainSerializer(subdomains, many=True).data @app.task(name='osint', queue='main_scan_queue', base=RengineTask, bind=True) def osint(self, host=None, ctx={}, description=None): - """Run Open-Source Intelligence tools on selected domain. - - Args: - host (str): Hostname to scan. - - Returns: - dict: Results from osint discovery and dorking. - """ - config = self.yaml_configuration.get(OSINT) or OSINT_DEFAULT_CONFIG - results = {} - - grouped_tasks = [] - - if 'discover' in config: - logger.info('Starting OSINT Discovery') - ctx['track'] = False - _task = osint_discovery.si( - config=config, - host=self.scan.domain.name, - scan_history_id=self.scan.id, - activity_id=self.activity_id, - results_dir=self.results_dir, - ctx=ctx - ) - grouped_tasks.append(_task) - - if OSINT_DORK in config or OSINT_CUSTOM_DORK in config: - logger.info('Starting OSINT Dorking') - _task = dorking.si( - config=config, - host=self.scan.domain.name, - scan_history_id=self.scan.id, - results_dir=self.results_dir - ) - grouped_tasks.append(_task) - - celery_group = group(grouped_tasks) - job = celery_group.apply_async() - while not job.ready(): - # wait for all jobs to complete - time.sleep(5) - - logger.info('OSINT Tasks finished...') + """Run Open-Source Intelligence tools on selected domain. + + Args: + host (str): Hostname to scan. + + Returns: + dict: Results from osint discovery and dorking. + """ + config = self.yaml_configuration.get(OSINT) or OSINT_DEFAULT_CONFIG + results = {} + + grouped_tasks = [] + + if 'discover' in config: + logger.info('Starting OSINT Discovery') + ctx['track'] = False + _task = osint_discovery.si( + config=config, + host=self.scan.domain.name, + scan_history_id=self.scan.id, + activity_id=self.activity_id, + results_dir=self.results_dir, + ctx=ctx + ) + grouped_tasks.append(_task) + + if OSINT_DORK in config or OSINT_CUSTOM_DORK in config: + logger.info('Starting OSINT Dorking') + _task = dorking.si( + config=config, + host=self.scan.domain.name, + scan_history_id=self.scan.id, + results_dir=self.results_dir + ) + grouped_tasks.append(_task) + + celery_group = group(grouped_tasks) + job = celery_group.apply_async() + while not job.ready(): + # wait for all jobs to complete + time.sleep(5) + + logger.info('OSINT Tasks finished...') @app.task(name='osint_discovery', queue='osint_discovery_queue', bind=False) def osint_discovery(config, host, scan_history_id, activity_id, results_dir, ctx={}): - """Run OSINT discovery. - - Args: - config (dict): yaml_configuration - host (str): target name - scan_history_id (startScan.ScanHistory): Scan History ID - results_dir (str): Path to store scan results - - Returns: - dict: osint metadat and theHarvester and h8mail results. - """ - scan_history = ScanHistory.objects.get(pk=scan_history_id) - osint_lookup = config.get(OSINT_DISCOVER, []) - osint_intensity = config.get(INTENSITY, 'normal') - documents_limit = config.get(OSINT_DOCUMENTS_LIMIT, 50) - results = {} - meta_info = [] - emails = [] - creds = [] - - # Get and save meta info - if 'metainfo' in osint_lookup: - logger.info('Saving Metainfo') - if osint_intensity == 'normal': - meta_dict = DottedDict({ - 'osint_target': host, - 'domain': host, - 'scan_id': scan_history_id, - 'documents_limit': documents_limit - }) - meta_info.append(save_metadata_info(meta_dict)) - - # TODO: disabled for now - # elif osint_intensity == 'deep': - # subdomains = Subdomain.objects - # if self.scan: - # subdomains = subdomains.filter(scan_history=self.scan) - # for subdomain in subdomains: - # meta_dict = DottedDict({ - # 'osint_target': subdomain.name, - # 'domain': self.domain, - # 'scan_id': self.scan_id, - # 'documents_limit': documents_limit - # }) - # meta_info.append(save_metadata_info(meta_dict)) - - grouped_tasks = [] - - if 'emails' in osint_lookup: - logger.info('Lookup for emails') - _task = h8mail.si( - config=config, - host=host, - scan_history_id=scan_history_id, - activity_id=activity_id, - results_dir=results_dir, - ctx=ctx - ) - grouped_tasks.append(_task) - - if 'employees' in osint_lookup: - logger.info('Lookup for employees') - ctx['track'] = False - _task = theHarvester.si( - config=config, - host=host, - scan_history_id=scan_history_id, - activity_id=activity_id, - results_dir=results_dir, - ctx=ctx - ) - grouped_tasks.append(_task) - - celery_group = group(grouped_tasks) - job = celery_group.apply_async() - while not job.ready(): - # wait for all jobs to complete - time.sleep(5) - - # results['emails'] = results.get('emails', []) + emails - # results['creds'] = creds - # results['meta_info'] = meta_info - return results + """Run OSINT discovery. + + Args: + config (dict): yaml_configuration + host (str): target name + scan_history_id (startScan.ScanHistory): Scan History ID + results_dir (str): Path to store scan results + + Returns: + dict: osint metadat and theHarvester and h8mail results. + """ + scan_history = ScanHistory.objects.get(pk=scan_history_id) + osint_lookup = config.get(OSINT_DISCOVER, []) + osint_intensity = config.get(INTENSITY, 'normal') + documents_limit = config.get(OSINT_DOCUMENTS_LIMIT, 50) + results = {} + meta_info = [] + emails = [] + creds = [] + + # Get and save meta info + if 'metainfo' in osint_lookup: + logger.info('Saving Metainfo') + if osint_intensity == 'normal': + meta_dict = DottedDict({ + 'osint_target': host, + 'domain': host, + 'scan_id': scan_history_id, + 'documents_limit': documents_limit + }) + meta_info.append(save_metadata_info(meta_dict)) + + # TODO: disabled for now + # elif osint_intensity == 'deep': + # subdomains = Subdomain.objects + # if self.scan: + # subdomains = subdomains.filter(scan_history=self.scan) + # for subdomain in subdomains: + # meta_dict = DottedDict({ + # 'osint_target': subdomain.name, + # 'domain': self.domain, + # 'scan_id': self.scan_id, + # 'documents_limit': documents_limit + # }) + # meta_info.append(save_metadata_info(meta_dict)) + + grouped_tasks = [] + + if 'emails' in osint_lookup: + logger.info('Lookup for emails') + _task = h8mail.si( + config=config, + host=host, + scan_history_id=scan_history_id, + activity_id=activity_id, + results_dir=results_dir, + ctx=ctx + ) + grouped_tasks.append(_task) + + if 'employees' in osint_lookup: + logger.info('Lookup for employees') + ctx['track'] = False + _task = theHarvester.si( + config=config, + host=host, + scan_history_id=scan_history_id, + activity_id=activity_id, + results_dir=results_dir, + ctx=ctx + ) + grouped_tasks.append(_task) + + celery_group = group(grouped_tasks) + job = celery_group.apply_async() + while not job.ready(): + # wait for all jobs to complete + time.sleep(5) + + # results['emails'] = results.get('emails', []) + emails + # results['creds'] = creds + # results['meta_info'] = meta_info + return results @app.task(name='dorking', bind=False, queue='dorking_queue') def dorking(config, host, scan_history_id, results_dir): - """Run Google dorks. - - Args: - config (dict): yaml_configuration - host (str): target name - scan_history_id (startScan.ScanHistory): Scan History ID - results_dir (str): Path to store scan results - - Returns: - list: Dorking results for each dork ran. - """ - # Some dork sources: https://github.com/six2dez/degoogle_hunter/blob/master/degoogle_hunter.sh - scan_history = ScanHistory.objects.get(pk=scan_history_id) - dorks = config.get(OSINT_DORK, []) - custom_dorks = config.get(OSINT_CUSTOM_DORK, []) - results = [] - # custom dorking has higher priority - try: - for custom_dork in custom_dorks: - lookup_target = custom_dork.get('lookup_site') - # replace with original host if _target_ - lookup_target = host if lookup_target == '_target_' else lookup_target - if 'lookup_extensions' in custom_dork: - results = get_and_save_dork_results( - lookup_target=lookup_target, - results_dir=results_dir, - type='custom_dork', - lookup_extensions=custom_dork.get('lookup_extensions'), - scan_history=scan_history - ) - elif 'lookup_keywords' in custom_dork: - results = get_and_save_dork_results( - lookup_target=lookup_target, - results_dir=results_dir, - type='custom_dork', - lookup_keywords=custom_dork.get('lookup_keywords'), - scan_history=scan_history - ) - except Exception as e: - logger.exception(e) - - # default dorking - try: - for dork in dorks: - logger.info(f'Getting dork information for {dork}') - if dork == 'stackoverflow': - results = get_and_save_dork_results( - lookup_target='stackoverflow.com', - results_dir=results_dir, - type=dork, - lookup_keywords=host, - scan_history=scan_history - ) - - elif dork == 'login_pages': - results = get_and_save_dork_results( - lookup_target=host, - results_dir=results_dir, - type=dork, - lookup_keywords='/login/,login.html', - page_count=5, - scan_history=scan_history - ) - - elif dork == 'admin_panels': - results = get_and_save_dork_results( - lookup_target=host, - results_dir=results_dir, - type=dork, - lookup_keywords='/admin/,admin.html', - page_count=5, - scan_history=scan_history - ) - - elif dork == 'dashboard_pages': - results = get_and_save_dork_results( - lookup_target=host, - results_dir=results_dir, - type=dork, - lookup_keywords='/dashboard/,dashboard.html', - page_count=5, - scan_history=scan_history - ) - - elif dork == 'social_media' : - social_websites = [ - 'tiktok.com', - 'facebook.com', - 'twitter.com', - 'youtube.com', - 'reddit.com' - ] - for site in social_websites: - results = get_and_save_dork_results( - lookup_target=site, - results_dir=results_dir, - type=dork, - lookup_keywords=host, - scan_history=scan_history - ) - - elif dork == 'project_management' : - project_websites = [ - 'trello.com', - 'atlassian.net' - ] - for site in project_websites: - results = get_and_save_dork_results( - lookup_target=site, - results_dir=results_dir, - type=dork, - lookup_keywords=host, - scan_history=scan_history - ) - - elif dork == 'code_sharing' : - project_websites = [ - 'github.com', - 'gitlab.com', - 'bitbucket.org' - ] - for site in project_websites: - results = get_and_save_dork_results( - lookup_target=site, - results_dir=results_dir, - type=dork, - lookup_keywords=host, - scan_history=scan_history - ) - - elif dork == 'config_files' : - config_file_exts = [ - 'env', - 'xml', - 'conf', - 'toml', - 'yml', - 'yaml', - 'cnf', - 'inf', - 'rdp', - 'ora', - 'txt', - 'cfg', - 'ini' - ] - results = get_and_save_dork_results( - lookup_target=host, - results_dir=results_dir, - type=dork, - lookup_extensions=','.join(config_file_exts), - page_count=4, - scan_history=scan_history - ) - - elif dork == 'jenkins' : - lookup_keyword = 'Jenkins' - results = get_and_save_dork_results( - lookup_target=host, - results_dir=results_dir, - type=dork, - lookup_keywords=lookup_keyword, - page_count=1, - scan_history=scan_history - ) - - elif dork == 'wordpress_files' : - lookup_keywords = [ - '/wp-content/', - '/wp-includes/' - ] - results = get_and_save_dork_results( - lookup_target=host, - results_dir=results_dir, - type=dork, - lookup_keywords=','.join(lookup_keywords), - page_count=5, - scan_history=scan_history - ) - - elif dork == 'php_error' : - lookup_keywords = [ - 'PHP Parse error', - 'PHP Warning', - 'PHP Error' - ] - results = get_and_save_dork_results( - lookup_target=host, - results_dir=results_dir, - type=dork, - lookup_keywords=','.join(lookup_keywords), - page_count=5, - scan_history=scan_history - ) - - elif dork == 'jenkins' : - lookup_keywords = [ - 'PHP Parse error', - 'PHP Warning', - 'PHP Error' - ] - results = get_and_save_dork_results( - lookup_target=host, - results_dir=results_dir, - type=dork, - lookup_keywords=','.join(lookup_keywords), - page_count=5, - scan_history=scan_history - ) - - elif dork == 'exposed_documents' : - docs_file_ext = [ - 'doc', - 'docx', - 'odt', - 'pdf', - 'rtf', - 'sxw', - 'psw', - 'ppt', - 'pptx', - 'pps', - 'csv' - ] - results = get_and_save_dork_results( - lookup_target=host, - results_dir=results_dir, - type=dork, - lookup_extensions=','.join(docs_file_ext), - page_count=7, - scan_history=scan_history - ) - - elif dork == 'db_files' : - file_ext = [ - 'sql', - 'db', - 'dbf', - 'mdb' - ] - results = get_and_save_dork_results( - lookup_target=host, - results_dir=results_dir, - type=dork, - lookup_extensions=','.join(file_ext), - page_count=1, - scan_history=scan_history - ) - - elif dork == 'git_exposed' : - file_ext = [ - 'git', - ] - results = get_and_save_dork_results( - lookup_target=host, - results_dir=results_dir, - type=dork, - lookup_extensions=','.join(file_ext), - page_count=1, - scan_history=scan_history - ) - - except Exception as e: - logger.exception(e) - return results + """Run Google dorks. + + Args: + config (dict): yaml_configuration + host (str): target name + scan_history_id (startScan.ScanHistory): Scan History ID + results_dir (str): Path to store scan results + + Returns: + list: Dorking results for each dork ran. + """ + # Some dork sources: https://github.com/six2dez/degoogle_hunter/blob/master/degoogle_hunter.sh + scan_history = ScanHistory.objects.get(pk=scan_history_id) + dorks = config.get(OSINT_DORK, []) + custom_dorks = config.get(OSINT_CUSTOM_DORK, []) + results = [] + # custom dorking has higher priority + try: + for custom_dork in custom_dorks: + lookup_target = custom_dork.get('lookup_site') + # replace with original host if _target_ + lookup_target = host if lookup_target == '_target_' else lookup_target + if 'lookup_extensions' in custom_dork: + results = get_and_save_dork_results( + lookup_target=lookup_target, + results_dir=results_dir, + type='custom_dork', + lookup_extensions=custom_dork.get('lookup_extensions'), + scan_history=scan_history + ) + elif 'lookup_keywords' in custom_dork: + results = get_and_save_dork_results( + lookup_target=lookup_target, + results_dir=results_dir, + type='custom_dork', + lookup_keywords=custom_dork.get('lookup_keywords'), + scan_history=scan_history + ) + except Exception as e: + logger.exception(e) + + # default dorking + try: + for dork in dorks: + logger.info(f'Getting dork information for {dork}') + if dork == 'stackoverflow': + results = get_and_save_dork_results( + lookup_target='stackoverflow.com', + results_dir=results_dir, + type=dork, + lookup_keywords=host, + scan_history=scan_history + ) + + elif dork == 'login_pages': + results = get_and_save_dork_results( + lookup_target=host, + results_dir=results_dir, + type=dork, + lookup_keywords='/login/,login.html', + page_count=5, + scan_history=scan_history + ) + + elif dork == 'admin_panels': + results = get_and_save_dork_results( + lookup_target=host, + results_dir=results_dir, + type=dork, + lookup_keywords='/admin/,admin.html', + page_count=5, + scan_history=scan_history + ) + + elif dork == 'dashboard_pages': + results = get_and_save_dork_results( + lookup_target=host, + results_dir=results_dir, + type=dork, + lookup_keywords='/dashboard/,dashboard.html', + page_count=5, + scan_history=scan_history + ) + + elif dork == 'social_media' : + social_websites = [ + 'tiktok.com', + 'facebook.com', + 'twitter.com', + 'youtube.com', + 'reddit.com' + ] + for site in social_websites: + results = get_and_save_dork_results( + lookup_target=site, + results_dir=results_dir, + type=dork, + lookup_keywords=host, + scan_history=scan_history + ) + + elif dork == 'project_management' : + project_websites = [ + 'trello.com', + 'atlassian.net' + ] + for site in project_websites: + results = get_and_save_dork_results( + lookup_target=site, + results_dir=results_dir, + type=dork, + lookup_keywords=host, + scan_history=scan_history + ) + + elif dork == 'code_sharing' : + project_websites = [ + 'github.com', + 'gitlab.com', + 'bitbucket.org' + ] + for site in project_websites: + results = get_and_save_dork_results( + lookup_target=site, + results_dir=results_dir, + type=dork, + lookup_keywords=host, + scan_history=scan_history + ) + + elif dork == 'config_files' : + config_file_exts = [ + 'env', + 'xml', + 'conf', + 'toml', + 'yml', + 'yaml', + 'cnf', + 'inf', + 'rdp', + 'ora', + 'txt', + 'cfg', + 'ini' + ] + results = get_and_save_dork_results( + lookup_target=host, + results_dir=results_dir, + type=dork, + lookup_extensions=','.join(config_file_exts), + page_count=4, + scan_history=scan_history + ) + + elif dork == 'jenkins' : + lookup_keyword = 'Jenkins' + results = get_and_save_dork_results( + lookup_target=host, + results_dir=results_dir, + type=dork, + lookup_keywords=lookup_keyword, + page_count=1, + scan_history=scan_history + ) + + elif dork == 'wordpress_files' : + lookup_keywords = [ + '/wp-content/', + '/wp-includes/' + ] + results = get_and_save_dork_results( + lookup_target=host, + results_dir=results_dir, + type=dork, + lookup_keywords=','.join(lookup_keywords), + page_count=5, + scan_history=scan_history + ) + + elif dork == 'php_error' : + lookup_keywords = [ + 'PHP Parse error', + 'PHP Warning', + 'PHP Error' + ] + results = get_and_save_dork_results( + lookup_target=host, + results_dir=results_dir, + type=dork, + lookup_keywords=','.join(lookup_keywords), + page_count=5, + scan_history=scan_history + ) + + elif dork == 'jenkins' : + lookup_keywords = [ + 'PHP Parse error', + 'PHP Warning', + 'PHP Error' + ] + results = get_and_save_dork_results( + lookup_target=host, + results_dir=results_dir, + type=dork, + lookup_keywords=','.join(lookup_keywords), + page_count=5, + scan_history=scan_history + ) + + elif dork == 'exposed_documents' : + docs_file_ext = [ + 'doc', + 'docx', + 'odt', + 'pdf', + 'rtf', + 'sxw', + 'psw', + 'ppt', + 'pptx', + 'pps', + 'csv' + ] + results = get_and_save_dork_results( + lookup_target=host, + results_dir=results_dir, + type=dork, + lookup_extensions=','.join(docs_file_ext), + page_count=7, + scan_history=scan_history + ) + + elif dork == 'db_files' : + file_ext = [ + 'sql', + 'db', + 'dbf', + 'mdb' + ] + results = get_and_save_dork_results( + lookup_target=host, + results_dir=results_dir, + type=dork, + lookup_extensions=','.join(file_ext), + page_count=1, + scan_history=scan_history + ) + + elif dork == 'git_exposed' : + file_ext = [ + 'git', + ] + results = get_and_save_dork_results( + lookup_target=host, + results_dir=results_dir, + type=dork, + lookup_extensions=','.join(file_ext), + page_count=1, + scan_history=scan_history + ) + + except Exception as e: + logger.exception(e) + return results @app.task(name='theHarvester', queue='theHarvester_queue', bind=False) def theHarvester(config, host, scan_history_id, activity_id, results_dir, ctx={}): - """Run theHarvester to get save emails, hosts, employees found in domain. - - Args: - config (dict): yaml_configuration - host (str): target name - scan_history_id (startScan.ScanHistory): Scan History ID - activity_id: ScanActivity ID - results_dir (str): Path to store scan results - ctx (dict): context of scan - - Returns: - dict: Dict of emails, employees, hosts and ips found during crawling. - """ - scan_history = ScanHistory.objects.get(pk=scan_history_id) - enable_http_crawl = config.get(ENABLE_HTTP_CRAWL, DEFAULT_ENABLE_HTTP_CRAWL) - output_path_json = str(Path(results_dir) / 'theHarvester.json') - theHarvester_dir = str(Path(RENGINE_TOOL_GITHUB_PATH) / 'theHarvester') - history_file = str(Path(results_dir) / 'commands.txt') - cmd = f'theHarvester -d {host} -b all -f {output_path_json}' - - # Update proxies.yaml - proxy_query = Proxy.objects.all() - if proxy_query.exists(): - proxy = proxy_query.first() - if proxy.use_proxy: - proxy_list = proxy.proxies.splitlines() - yaml_data = {'http' : proxy_list} - with open(Path(theHarvester_dir) / 'proxies.yaml', 'w') as file: - yaml.dump(yaml_data, file) - - # Run cmd - run_command( - cmd, - shell=False, - cwd=theHarvester_dir, - history_file=history_file, - scan_id=scan_history_id, - activity_id=activity_id) - - # Get file location - if not os.path.isfile(output_path_json): - logger.error(f'Could not open {output_path_json}') - return {} - - # Load theHarvester results - with open(output_path_json, 'r') as f: - data = json.load(f) - - # Re-indent theHarvester JSON - with open(output_path_json, 'w') as f: - json.dump(data, f, indent=4) - - emails = data.get('emails', []) - for email_address in emails: - email, _ = save_email(email_address, scan_history=scan_history) - # if email: - # self.notify(fields={'Emails': f'• `{email.address}`'}) - - linkedin_people = data.get('linkedin_people', []) - for people in linkedin_people: - employee, _ = save_employee( - people, - designation='linkedin', - scan_history=scan_history) - # if employee: - # self.notify(fields={'LinkedIn people': f'• {employee.name}'}) - - twitter_people = data.get('twitter_people', []) - for people in twitter_people: - employee, _ = save_employee( - people, - designation='twitter', - scan_history=scan_history) - # if employee: - # self.notify(fields={'Twitter people': f'• {employee.name}'}) - - hosts = data.get('hosts', []) - urls = [] - for host in hosts: - split = tuple(host.split(':')) - http_url = split[0] - subdomain_name = get_subdomain_from_url(http_url) - subdomain, _ = save_subdomain(subdomain_name, ctx=ctx) - if not isinstance(subdomain, Subdomain): - logger.error(f"Invalid subdomain encountered: {subdomain}") - continue - endpoint, _ = save_endpoint( - http_url, - crawl=False, - ctx=ctx, - subdomain=subdomain) - # if endpoint: - # urls.append(endpoint.http_url) - # self.notify(fields={'Hosts': f'• {endpoint.http_url}'}) - - # if enable_http_crawl: - # ctx['track'] = False - # http_crawl(urls, ctx=ctx) - - # TODO: Lots of ips unrelated with our domain are found, disabling - # this for now. - # ips = data.get('ips', []) - # for ip_address in ips: - # ip, created = save_ip_address( - # ip_address, - # subscan=subscan) - # if ip: - # send_task_notif.delay( - # 'osint', - # scan_history_id=scan_history_id, - # subscan_id=subscan_id, - # severity='success', - # update_fields={'IPs': f'{ip.address}'}) - return data + """Run theHarvester to get save emails, hosts, employees found in domain. + + Args: + config (dict): yaml_configuration + host (str): target name + scan_history_id (startScan.ScanHistory): Scan History ID + activity_id: ScanActivity ID + results_dir (str): Path to store scan results + ctx (dict): context of scan + + Returns: + dict: Dict of emails, employees, hosts and ips found during crawling. + """ + scan_history = ScanHistory.objects.get(pk=scan_history_id) + enable_http_crawl = config.get(ENABLE_HTTP_CRAWL, DEFAULT_ENABLE_HTTP_CRAWL) + output_path_json = str(Path(results_dir) / 'theHarvester.json') + theHarvester_dir = str(Path.home() / ".config" / 'theHarvester') + history_file = str(Path(results_dir) / 'commands.txt') + cmd = f'theHarvester -d {host} -b all -f {output_path_json}' + + # Update proxies.yaml + proxy_query = Proxy.objects.all() + if proxy_query.exists(): + proxy = proxy_query.first() + if proxy.use_proxy: + proxy_list = proxy.proxies.splitlines() + yaml_data = {'http' : proxy_list} + with open(Path(theHarvester_dir) / 'proxies.yaml', 'w') as file: + yaml.dump(yaml_data, file) + + # Run cmd + run_command( + cmd, + shell=False, + cwd=theHarvester_dir, + history_file=history_file, + scan_id=scan_history_id, + activity_id=activity_id) + + # Get file location + if not os.path.isfile(output_path_json): + logger.error(f'Could not open {output_path_json}') + return {} + + # Load theHarvester results + with open(output_path_json, 'r') as f: + data = json.load(f) + + # Re-indent theHarvester JSON + with open(output_path_json, 'w') as f: + json.dump(data, f, indent=4) + + emails = data.get('emails', []) + for email_address in emails: + email, _ = save_email(email_address, scan_history=scan_history) + # if email: + # self.notify(fields={'Emails': f'• `{email.address}`'}) + + linkedin_people = data.get('linkedin_people', []) + for people in linkedin_people: + employee, _ = save_employee( + people, + designation='linkedin', + scan_history=scan_history) + # if employee: + # self.notify(fields={'LinkedIn people': f'• {employee.name}'}) + + twitter_people = data.get('twitter_people', []) + for people in twitter_people: + employee, _ = save_employee( + people, + designation='twitter', + scan_history=scan_history) + # if employee: + # self.notify(fields={'Twitter people': f'• {employee.name}'}) + + hosts = data.get('hosts', []) + urls = [] + for host in hosts: + split = tuple(host.split(':')) + http_url = split[0] + subdomain_name = get_subdomain_from_url(http_url) + subdomain, _ = save_subdomain(subdomain_name, ctx=ctx) + if not isinstance(subdomain, Subdomain): + logger.error(f"Invalid subdomain encountered: {subdomain}") + continue + endpoint, _ = save_endpoint( + http_url, + crawl=False, + ctx=ctx, + subdomain=subdomain) + # if endpoint: + # urls.append(endpoint.http_url) + # self.notify(fields={'Hosts': f'• {endpoint.http_url}'}) + + # if enable_http_crawl: + # ctx['track'] = False + # http_crawl(urls, ctx=ctx) + + # TODO: Lots of ips unrelated with our domain are found, disabling + # this for now. + # ips = data.get('ips', []) + # for ip_address in ips: + # ip, created = save_ip_address( + # ip_address, + # subscan=subscan) + # if ip: + # send_task_notif.delay( + # 'osint', + # scan_history_id=scan_history_id, + # subscan_id=subscan_id, + # severity='success', + # update_fields={'IPs': f'{ip.address}'}) + return data @app.task(name='h8mail', queue='h8mail_queue', bind=False) def h8mail(config, host, scan_history_id, activity_id, results_dir, ctx={}): - """Run h8mail. - - Args: - config (dict): yaml_configuration - host (str): target name - scan_history_id (startScan.ScanHistory): Scan History ID - activity_id: ScanActivity ID - results_dir (str): Path to store scan results - ctx (dict): context of scan - - Returns: - list[dict]: List of credentials info. - """ - logger.warning('Getting leaked credentials') - scan_history = ScanHistory.objects.get(pk=scan_history_id) - input_path = str(Path(results_dir) / 'emails.txt') - output_file = str(Path(results_dir) / 'h8mail.json') - - cmd = f'h8mail -t {input_path} --json {output_file}' - history_file = str(Path(results_dir) / 'commands.txt') - - run_command( - cmd, - history_file=history_file, - scan_id=scan_history_id, - activity_id=activity_id) - - with open(output_file) as f: - data = json.load(f) - creds = data.get('targets', []) - - # TODO: go through h8mail output and save emails to DB - for cred in creds: - logger.warning(cred) - email_address = cred['target'] - pwn_num = cred['pwn_num'] - pwn_data = cred.get('data', []) - email, created = save_email(email_address, scan_history=scan) - # if email: - # self.notify(fields={'Emails': f'• `{email.address}`'}) - return creds + """Run h8mail. + + Args: + config (dict): yaml_configuration + host (str): target name + scan_history_id (startScan.ScanHistory): Scan History ID + activity_id: ScanActivity ID + results_dir (str): Path to store scan results + ctx (dict): context of scan + + Returns: + list[dict]: List of credentials info. + """ + logger.warning('Getting leaked credentials') + scan_history = ScanHistory.objects.get(pk=scan_history_id) + input_path = str(Path(results_dir) / 'emails.txt') + output_file = str(Path(results_dir) / 'h8mail.json') + + cmd = f'h8mail -t {input_path} --json {output_file}' + history_file = str(Path(results_dir) / 'commands.txt') + + run_command( + cmd, + history_file=history_file, + scan_id=scan_history_id, + activity_id=activity_id) + + with open(output_file) as f: + data = json.load(f) + creds = data.get('targets', []) + + # TODO: go through h8mail output and save emails to DB + for cred in creds: + logger.warning(cred) + email_address = cred['target'] + pwn_num = cred['pwn_num'] + pwn_data = cred.get('data', []) + email, created = save_email(email_address, scan_history=scan) + # if email: + # self.notify(fields={'Emails': f'• `{email.address}`'}) + return creds @app.task(name='screenshot', queue='main_scan_queue', base=RengineTask, bind=True) def screenshot(self, ctx={}, description=None): - """Uses EyeWitness to gather screenshot of a domain and/or url. - - Args: - description (str, optional): Task description shown in UI. - """ - - # Config - screenshots_path = str(Path(self.results_dir) / 'screenshots') - output_path = str(Path(self.results_dir) / 'screenshots' / self.filename) - alive_endpoints_file = str(Path(self.results_dir) / 'endpoints_alive.txt') - config = self.yaml_configuration.get(SCREENSHOT) or {} - enable_http_crawl = config.get(ENABLE_HTTP_CRAWL, DEFAULT_ENABLE_HTTP_CRAWL) - intensity = config.get(INTENSITY) or self.yaml_configuration.get(INTENSITY, DEFAULT_SCAN_INTENSITY) - timeout = config.get(TIMEOUT) or self.yaml_configuration.get(TIMEOUT, DEFAULT_HTTP_TIMEOUT + 5) - threads = config.get(THREADS) or self.yaml_configuration.get(THREADS, DEFAULT_THREADS) - - # If intensity is normal, grab only the root endpoints of each subdomain - strict = True if intensity == 'normal' else False - - # Get URLs to take screenshot of - get_http_urls( - is_alive=enable_http_crawl, - strict=strict, - write_filepath=alive_endpoints_file, - get_only_default_urls=True, - ctx=ctx - ) - - # Send start notif - notification = Notification.objects.first() - send_output_file = notification.send_scan_output_file if notification else False - - # Run cmd - cmd = f'EyeWitness -f {alive_endpoints_file} -d {screenshots_path} --no-prompt' - cmd += f' --timeout {timeout}' if timeout > 0 else '' - cmd += f' --threads {threads}' if threads > 0 else '' - run_command( - cmd, - shell=False, - history_file=self.history_file, - scan_id=self.scan_id, - activity_id=self.activity_id) - if not os.path.isfile(output_path): - logger.error(f'Could not load EyeWitness results at {output_path} for {self.domain.name}.') - return - - # Loop through results and save objects in DB - screenshot_paths = [] - with open(output_path, 'r') as file: - reader = csv.reader(file) - header = next(reader) # Skip header row - indices = [header.index(col) for col in ["Protocol", "Port", "Domain", "Request Status", "Screenshot Path", " Source Path"]] - for row in reader: - protocol, port, subdomain_name, status, screenshot_path, source_path = extract_columns(row, indices) - logger.info(f'{protocol}:{port}:{subdomain_name}:{status}') - subdomain_query = Subdomain.objects.filter(name=subdomain_name) - if self.scan: - subdomain_query = subdomain_query.filter(scan_history=self.scan) - if status == 'Successful' and subdomain_query.exists(): - subdomain = subdomain_query.first() - screenshot_paths.append(screenshot_path) - subdomain.screenshot_path = screenshot_path.replace(RENGINE_RESULTS, '') - subdomain.save() - logger.warning(f'Added screenshot for {subdomain.name} to DB') - - # Remove all db, html extra files in screenshot results - run_command( - f'rm -rf {screenshots_path}/*.csv {screenshots_path}/*.db {screenshots_path}/*.js {screenshots_path}/*.html {screenshots_path}/*.css', - shell=True, - history_file=self.history_file, - scan_id=self.scan_id, - activity_id=self.activity_id) - run_command( - f'rm -rf ' + str(Path(screenshots_path) / 'source'), - shell=True, - history_file=self.history_file, - scan_id=self.scan_id, - activity_id=self.activity_id) - - # Send finish notifs - screenshots_str = '• ' + '\n• '.join([f'`{path}`' for path in screenshot_paths]) - self.notify(fields={'Screenshots': screenshots_str}) - if send_output_file: - for path in screenshot_paths: - title = get_output_file_name( - self.scan_id, - self.subscan_id, - self.filename) - send_file_to_discord.delay(path, title) + """Uses EyeWitness to gather screenshot of a domain and/or url. + + Args: + description (str, optional): Task description shown in UI. + """ + + # Config + screenshots_path = str(Path(self.results_dir) / 'screenshots') + output_path = str(Path(self.results_dir) / 'screenshots' / self.filename) + alive_endpoints_file = str(Path(self.results_dir) / 'endpoints_alive.txt') + config = self.yaml_configuration.get(SCREENSHOT) or {} + enable_http_crawl = config.get(ENABLE_HTTP_CRAWL, DEFAULT_ENABLE_HTTP_CRAWL) + intensity = config.get(INTENSITY) or self.yaml_configuration.get(INTENSITY, DEFAULT_SCAN_INTENSITY) + timeout = config.get(TIMEOUT) or self.yaml_configuration.get(TIMEOUT, DEFAULT_HTTP_TIMEOUT + 5) + threads = config.get(THREADS) or self.yaml_configuration.get(THREADS, DEFAULT_THREADS) + + # If intensity is normal, grab only the root endpoints of each subdomain + strict = True if intensity == 'normal' else False + + # Get URLs to take screenshot of + get_http_urls( + is_alive=enable_http_crawl, + strict=strict, + write_filepath=alive_endpoints_file, + get_only_default_urls=True, + ctx=ctx + ) + + # Send start notif + notification = Notification.objects.first() + send_output_file = notification.send_scan_output_file if notification else False + + # Run cmd + cmd = f'EyeWitness -f {alive_endpoints_file} -d {screenshots_path} --no-prompt' + cmd += f' --timeout {timeout}' if timeout > 0 else '' + cmd += f' --threads {threads}' if threads > 0 else '' + run_command( + cmd, + shell=False, + history_file=self.history_file, + scan_id=self.scan_id, + activity_id=self.activity_id) + if not os.path.isfile(output_path): + logger.error(f'Could not load EyeWitness results at {output_path} for {self.domain.name}.') + return + + # Loop through results and save objects in DB + screenshot_paths = [] + with open(output_path, 'r') as file: + reader = csv.reader(file) + header = next(reader) # Skip header row + indices = [header.index(col) for col in ["Protocol", "Port", "Domain", "Request Status", "Screenshot Path", " Source Path"]] + for row in reader: + protocol, port, subdomain_name, status, screenshot_path, source_path = extract_columns(row, indices) + logger.info(f'{protocol}:{port}:{subdomain_name}:{status}') + subdomain_query = Subdomain.objects.filter(name=subdomain_name) + if self.scan: + subdomain_query = subdomain_query.filter(scan_history=self.scan) + if status == 'Successful' and subdomain_query.exists(): + subdomain = subdomain_query.first() + screenshot_paths.append(screenshot_path) + subdomain.screenshot_path = screenshot_path.replace(RENGINE_RESULTS, '') + subdomain.save() + logger.warning(f'Added screenshot for {subdomain.name} to DB') + + # Remove all db, html extra files in screenshot results + run_command( + f'rm -rf {screenshots_path}/*.csv {screenshots_path}/*.db {screenshots_path}/*.js {screenshots_path}/*.html {screenshots_path}/*.css', + shell=True, + history_file=self.history_file, + scan_id=self.scan_id, + activity_id=self.activity_id) + run_command( + f'rm -rf ' + str(Path(screenshots_path) / 'source'), + shell=True, + history_file=self.history_file, + scan_id=self.scan_id, + activity_id=self.activity_id) + + # Send finish notifs + screenshots_str = '• ' + '\n• '.join([f'`{path}`' for path in screenshot_paths]) + self.notify(fields={'Screenshots': screenshots_str}) + if send_output_file: + for path in screenshot_paths: + title = get_output_file_name( + self.scan_id, + self.subscan_id, + self.filename) + send_file_to_discord.delay(path, title) @app.task(name='port_scan', queue='main_scan_queue', base=RengineTask, bind=True) def port_scan(self, hosts=[], ctx={}, description=None): - """Run port scan. - - Args: - hosts (list, optional): Hosts to run port scan on. - description (str, optional): Task description shown in UI. - - Returns: - list: List of open ports (dict). - """ - input_file = str(Path(self.results_dir) / 'input_subdomains_port_scan.txt') - proxy = get_random_proxy() - - # Config - config = self.yaml_configuration.get(PORT_SCAN) or {} - enable_http_crawl = config.get(ENABLE_HTTP_CRAWL, DEFAULT_ENABLE_HTTP_CRAWL) - timeout = config.get(TIMEOUT) or self.yaml_configuration.get(TIMEOUT, DEFAULT_HTTP_TIMEOUT) - exclude_ports = config.get(NAABU_EXCLUDE_PORTS, []) - exclude_subdomains = config.get(NAABU_EXCLUDE_SUBDOMAINS, False) - ports = config.get(PORTS, NAABU_DEFAULT_PORTS) - ports = [str(port) for port in ports] - rate_limit = config.get(NAABU_RATE) or self.yaml_configuration.get(RATE_LIMIT, DEFAULT_RATE_LIMIT) - threads = config.get(THREADS) or self.yaml_configuration.get(THREADS, DEFAULT_THREADS) - passive = config.get(NAABU_PASSIVE, False) - use_naabu_config = config.get(USE_NAABU_CONFIG, False) - exclude_ports_str = ','.join(return_iterable(exclude_ports)) - # nmap args - nmap_enabled = config.get(ENABLE_NMAP, False) - nmap_cmd = config.get(NMAP_COMMAND, '') - nmap_script = config.get(NMAP_SCRIPT, '') - nmap_script = ','.join(return_iterable(nmap_script)) - nmap_script_args = config.get(NMAP_SCRIPT_ARGS) - - if hosts: - with open(input_file, 'w') as f: - f.write('\n'.join(hosts)) - else: - hosts = get_subdomains( - write_filepath=input_file, - exclude_subdomains=exclude_subdomains, - ctx=ctx) - - # Build cmd - cmd = 'naabu -json -exclude-cdn' - cmd += f' -list {input_file}' if len(hosts) > 0 else f' -host {hosts[0]}' - if 'full' in ports or 'all' in ports: - ports_str = ' -p "-"' - elif 'top-100' in ports: - ports_str = ' -top-ports 100' - elif 'top-1000' in ports: - ports_str = ' -top-ports 1000' - else: - ports_str = ','.join(ports) - ports_str = f' -p {ports_str}' - cmd += ports_str - cmd += (' -config ' + str(Path.home() / '.config' / 'naabu' / 'config.yaml')) if use_naabu_config else '' - cmd += f' -proxy "{proxy}"' if proxy else '' - cmd += f' -c {threads}' if threads else '' - cmd += f' -rate {rate_limit}' if rate_limit > 0 else '' - cmd += f' -timeout {timeout*1000}' if timeout > 0 else '' - cmd += f' -passive' if passive else '' - cmd += f' -exclude-ports {exclude_ports_str}' if exclude_ports else '' - cmd += f' -silent' - - # Execute cmd and gather results - results = [] - urls = [] - ports_data = {} - for line in stream_command( - cmd, - shell=True, - history_file=self.history_file, - scan_id=self.scan_id, - activity_id=self.activity_id): - - if not isinstance(line, dict): - continue - results.append(line) - port_number = line['port'] - ip_address = line['ip'] - host = line.get('host') or ip_address - if port_number == 0: - continue - - # Grab subdomain - subdomain = Subdomain.objects.filter( - name=host, - target_domain=self.domain, - scan_history=self.scan - ).first() - - # Add IP DB - ip, _ = save_ip_address(ip_address, subdomain, subscan=self.subscan) - if self.subscan: - ip.ip_subscan_ids.add(self.subscan) - ip.save() - - # Add endpoint to DB - # port 80 and 443 not needed as http crawl already does that. - if port_number not in [80, 443]: - http_url = f'{host}:{port_number}' - endpoint, _ = save_endpoint( - http_url, - crawl=enable_http_crawl, - ctx=ctx, - subdomain=subdomain) - if endpoint: - http_url = endpoint.http_url - urls.append(http_url) - - # Add Port in DB - port_details = whatportis.get_ports(str(port_number)) - service_name = port_details[0].name if len(port_details) > 0 else 'unknown' - description = port_details[0].description if len(port_details) > 0 else '' - - # get or create port - port, created = Port.objects.get_or_create( - number=port_number, - service_name=service_name, - description=description - ) - if port_number in UNCOMMON_WEB_PORTS: - port.is_uncommon = True - port.save() - ip.ports.add(port) - ip.save() - if host in ports_data: - ports_data[host].append(port_number) - else: - ports_data[host] = [port_number] - - # Send notification - logger.warning(f'Found opened port {port_number} on {ip_address} ({host})') - - if len(ports_data) == 0: - logger.info('Finished running naabu port scan - No open ports found.') - if nmap_enabled: - logger.info('Nmap scans skipped') - return ports_data - - # Send notification - fields_str = '' - for host, ports in ports_data.items(): - ports_str = ', '.join([f'`{port}`' for port in ports]) - fields_str += f'• `{host}`: {ports_str}\n' - self.notify(fields={'Ports discovered': fields_str}) - - # Save output to file - with open(self.output_path, 'w') as f: - json.dump(results, f, indent=4) - - logger.info('Finished running naabu port scan.') - - # Process nmap results: 1 process per host - sigs = [] - if nmap_enabled: - logger.warning(f'Starting nmap scans ...') - logger.warning(ports_data) - for host, port_list in ports_data.items(): - ports_str = '_'.join([str(p) for p in port_list]) - ctx_nmap = ctx.copy() - ctx_nmap['description'] = get_task_title(f'nmap_{host}', self.scan_id, self.subscan_id) - ctx_nmap['track'] = False - sig = nmap.si( - cmd=nmap_cmd, - ports=port_list, - host=host, - script=nmap_script, - script_args=nmap_script_args, - max_rate=rate_limit, - ctx=ctx_nmap) - sigs.append(sig) - task = group(sigs).apply_async() - with allow_join_result(): - results = task.get() - - return ports_data + """Run port scan. + + Args: + hosts (list, optional): Hosts to run port scan on. + description (str, optional): Task description shown in UI. + + Returns: + list: List of open ports (dict). + """ + input_file = str(Path(self.results_dir) / 'input_subdomains_port_scan.txt') + proxy = get_random_proxy() + + # Config + config = self.yaml_configuration.get(PORT_SCAN) or {} + enable_http_crawl = config.get(ENABLE_HTTP_CRAWL, DEFAULT_ENABLE_HTTP_CRAWL) + timeout = config.get(TIMEOUT) or self.yaml_configuration.get(TIMEOUT, DEFAULT_HTTP_TIMEOUT) + exclude_ports = config.get(NAABU_EXCLUDE_PORTS, []) + exclude_subdomains = config.get(NAABU_EXCLUDE_SUBDOMAINS, False) + ports = config.get(PORTS, NAABU_DEFAULT_PORTS) + ports = [str(port) for port in ports] + rate_limit = config.get(NAABU_RATE) or self.yaml_configuration.get(RATE_LIMIT, DEFAULT_RATE_LIMIT) + threads = config.get(THREADS) or self.yaml_configuration.get(THREADS, DEFAULT_THREADS) + passive = config.get(NAABU_PASSIVE, False) + use_naabu_config = config.get(USE_NAABU_CONFIG, False) + exclude_ports_str = ','.join(return_iterable(exclude_ports)) + # nmap args + nmap_enabled = config.get(ENABLE_NMAP, False) + nmap_cmd = config.get(NMAP_COMMAND, '') + nmap_script = config.get(NMAP_SCRIPT, '') + nmap_script = ','.join(return_iterable(nmap_script)) + nmap_script_args = config.get(NMAP_SCRIPT_ARGS) + + if hosts: + with open(input_file, 'w') as f: + f.write('\n'.join(hosts)) + else: + hosts = get_subdomains( + write_filepath=input_file, + exclude_subdomains=exclude_subdomains, + ctx=ctx) + + # Build cmd + cmd = 'naabu -json -exclude-cdn' + cmd += f' -list {input_file}' if len(hosts) > 0 else f' -host {hosts[0]}' + if 'full' in ports or 'all' in ports: + ports_str = ' -p "-"' + elif 'top-100' in ports: + ports_str = ' -top-ports 100' + elif 'top-1000' in ports: + ports_str = ' -top-ports 1000' + else: + ports_str = ','.join(ports) + ports_str = f' -p {ports_str}' + cmd += ports_str + cmd += (' -config ' + str(Path.home() / '.config' / 'naabu' / 'config.yaml')) if use_naabu_config else '' + cmd += f' -proxy "{proxy}"' if proxy else '' + cmd += f' -c {threads}' if threads else '' + cmd += f' -rate {rate_limit}' if rate_limit > 0 else '' + cmd += f' -timeout {timeout*1000}' if timeout > 0 else '' + cmd += f' -passive' if passive else '' + cmd += f' -exclude-ports {exclude_ports_str}' if exclude_ports else '' + cmd += f' -silent' + + # Execute cmd and gather results + results = [] + urls = [] + ports_data = {} + for line in stream_command( + cmd, + shell=True, + history_file=self.history_file, + scan_id=self.scan_id, + activity_id=self.activity_id): + + if not isinstance(line, dict): + continue + results.append(line) + port_number = line['port'] + ip_address = line['ip'] + host = line.get('host') or ip_address + if port_number == 0: + continue + + # Grab subdomain + subdomain = Subdomain.objects.filter( + name=host, + target_domain=self.domain, + scan_history=self.scan + ).first() + + # Add IP DB + ip, _ = save_ip_address(ip_address, subdomain, subscan=self.subscan) + if self.subscan: + ip.ip_subscan_ids.add(self.subscan) + ip.save() + + # Add endpoint to DB + # port 80 and 443 not needed as http crawl already does that. + if port_number not in [80, 443]: + http_url = f'{host}:{port_number}' + endpoint, _ = save_endpoint( + http_url, + crawl=enable_http_crawl, + ctx=ctx, + subdomain=subdomain) + if endpoint: + http_url = endpoint.http_url + urls.append(http_url) + + # Add Port in DB + port_details = whatportis.get_ports(str(port_number)) + service_name = port_details[0].name if len(port_details) > 0 else 'unknown' + description = port_details[0].description if len(port_details) > 0 else '' + + # get or create port + port, created = Port.objects.get_or_create( + number=port_number, + service_name=service_name, + description=description + ) + if port_number in UNCOMMON_WEB_PORTS: + port.is_uncommon = True + port.save() + ip.ports.add(port) + ip.save() + if host in ports_data: + ports_data[host].append(port_number) + else: + ports_data[host] = [port_number] + + # Send notification + logger.warning(f'Found opened port {port_number} on {ip_address} ({host})') + + if len(ports_data) == 0: + logger.info('Finished running naabu port scan - No open ports found.') + if nmap_enabled: + logger.info('Nmap scans skipped') + return ports_data + + # Send notification + fields_str = '' + for host, ports in ports_data.items(): + ports_str = ', '.join([f'`{port}`' for port in ports]) + fields_str += f'• `{host}`: {ports_str}\n' + self.notify(fields={'Ports discovered': fields_str}) + + # Save output to file + with open(self.output_path, 'w') as f: + json.dump(results, f, indent=4) + + logger.info('Finished running naabu port scan.') + + # Process nmap results: 1 process per host + sigs = [] + if nmap_enabled: + logger.warning(f'Starting nmap scans ...') + logger.warning(ports_data) + for host, port_list in ports_data.items(): + ports_str = '_'.join([str(p) for p in port_list]) + ctx_nmap = ctx.copy() + ctx_nmap['description'] = get_task_title(f'nmap_{host}', self.scan_id, self.subscan_id) + ctx_nmap['track'] = False + sig = nmap.si( + cmd=nmap_cmd, + ports=port_list, + host=host, + script=nmap_script, + script_args=nmap_script_args, + max_rate=rate_limit, + ctx=ctx_nmap) + sigs.append(sig) + task = group(sigs).apply_async() + with allow_join_result(): + results = task.get() + + return ports_data @app.task(name='nmap', queue='main_scan_queue', base=RengineTask, bind=True) def nmap( - self, - cmd=None, - ports=[], - host=None, - input_file=None, - script=None, - script_args=None, - max_rate=None, - ctx={}, - description=None): - """Run nmap on a host. - - Args: - cmd (str, optional): Existing nmap command to complete. - ports (list, optional): List of ports to scan. - host (str, optional): Host to scan. - input_file (str, optional): Input hosts file. - script (str, optional): NSE script to run. - script_args (str, optional): NSE script args. - max_rate (int): Max rate. - description (str, optional): Task description shown in UI. - """ - notif = Notification.objects.first() - ports_str = ','.join(str(port) for port in ports) - self.filename = self.filename.replace('.txt', '.xml') - filename_vulns = self.filename.replace('.xml', '_vulns.json') - output_file = self.output_path - output_file_xml = f'{self.results_dir}/{host}_{self.filename}' - vulns_file = f'{self.results_dir}/{host}_{filename_vulns}' - logger.warning(f'Running nmap on {host}:{ports}') - - # Build cmd - nmap_cmd = get_nmap_cmd( - cmd=cmd, - ports=ports_str, - script=script, - script_args=script_args, - max_rate=max_rate, - host=host, - input_file=input_file, - output_file=output_file_xml) - - # Run cmd - run_command( - nmap_cmd, - shell=True, - history_file=self.history_file, - scan_id=self.scan_id, - activity_id=self.activity_id) - - # Get nmap XML results and convert to JSON - vulns = parse_nmap_results(output_file_xml, output_file) - with open(vulns_file, 'w') as f: - json.dump(vulns, f, indent=4) - - # Save vulnerabilities found by nmap - vulns_str = '' - for vuln_data in vulns: - # URL is not necessarily an HTTP URL when running nmap (can be any - # other vulnerable protocols). Look for existing endpoint and use its - # URL as vulnerability.http_url if it exists. - url = vuln_data['http_url'] - endpoint = EndPoint.objects.filter(http_url__contains=url).first() - if endpoint: - vuln_data['http_url'] = endpoint.http_url - vuln, created = save_vulnerability( - target_domain=self.domain, - subdomain=self.subdomain, - scan_history=self.scan, - subscan=self.subscan, - endpoint=endpoint, - **vuln_data) - vulns_str += f'• {str(vuln)}\n' - if created: - logger.warning(str(vuln)) - - # Send only 1 notif for all vulns to reduce number of notifs - if notif and notif.send_vuln_notif and vulns_str: - logger.warning(vulns_str) - self.notify(fields={'CVEs': vulns_str}) - return vulns + self, + cmd=None, + ports=[], + host=None, + input_file=None, + script=None, + script_args=None, + max_rate=None, + ctx={}, + description=None): + """Run nmap on a host. + + Args: + cmd (str, optional): Existing nmap command to complete. + ports (list, optional): List of ports to scan. + host (str, optional): Host to scan. + input_file (str, optional): Input hosts file. + script (str, optional): NSE script to run. + script_args (str, optional): NSE script args. + max_rate (int): Max rate. + description (str, optional): Task description shown in UI. + """ + notif = Notification.objects.first() + ports_str = ','.join(str(port) for port in ports) + self.filename = self.filename.replace('.txt', '.xml') + filename_vulns = self.filename.replace('.xml', '_vulns.json') + output_file = self.output_path + output_file_xml = f'{self.results_dir}/{host}_{self.filename}' + vulns_file = f'{self.results_dir}/{host}_{filename_vulns}' + logger.warning(f'Running nmap on {host}:{ports}') + + # Build cmd + nmap_cmd = get_nmap_cmd( + cmd=cmd, + ports=ports_str, + script=script, + script_args=script_args, + max_rate=max_rate, + host=host, + input_file=input_file, + output_file=output_file_xml) + + # Run cmd + run_command( + nmap_cmd, + shell=True, + history_file=self.history_file, + scan_id=self.scan_id, + activity_id=self.activity_id) + + # Get nmap XML results and convert to JSON + vulns = parse_nmap_results(output_file_xml, output_file) + with open(vulns_file, 'w') as f: + json.dump(vulns, f, indent=4) + + # Save vulnerabilities found by nmap + vulns_str = '' + for vuln_data in vulns: + # URL is not necessarily an HTTP URL when running nmap (can be any + # other vulnerable protocols). Look for existing endpoint and use its + # URL as vulnerability.http_url if it exists. + url = vuln_data['http_url'] + endpoint = EndPoint.objects.filter(http_url__contains=url).first() + if endpoint: + vuln_data['http_url'] = endpoint.http_url + vuln, created = save_vulnerability( + target_domain=self.domain, + subdomain=self.subdomain, + scan_history=self.scan, + subscan=self.subscan, + endpoint=endpoint, + **vuln_data) + vulns_str += f'• {str(vuln)}\n' + if created: + logger.warning(str(vuln)) + + # Send only 1 notif for all vulns to reduce number of notifs + if notif and notif.send_vuln_notif and vulns_str: + logger.warning(vulns_str) + self.notify(fields={'CVEs': vulns_str}) + return vulns @app.task(name='waf_detection', queue='main_scan_queue', base=RengineTask, bind=True) def waf_detection(self, ctx={}, description=None): - """ - Uses wafw00f to check for the presence of a WAF. - - Args: - description (str, optional): Task description shown in UI. - - Returns: - list: List of startScan.models.Waf objects. - """ - input_path = str(Path(self.results_dir) / 'input_endpoints_waf_detection.txt') - config = self.yaml_configuration.get(WAF_DETECTION) or {} - enable_http_crawl = config.get(ENABLE_HTTP_CRAWL, DEFAULT_ENABLE_HTTP_CRAWL) - - # Get alive endpoints from DB - get_http_urls( - is_alive=enable_http_crawl, - write_filepath=input_path, - get_only_default_urls=True, - ctx=ctx - ) - - cmd = f'wafw00f -i {input_path} -o {self.output_path}' - run_command( - cmd, - history_file=self.history_file, - scan_id=self.scan_id, - activity_id=self.activity_id) - if not os.path.isfile(self.output_path): - logger.error(f'Could not find {self.output_path}') - return - - with open(self.output_path) as file: - wafs = file.readlines() - - for line in wafs: - line = " ".join(line.split()) - splitted = line.split(' ', 1) - waf_info = splitted[1].strip() - waf_name = waf_info[:waf_info.find('(')].strip() - waf_manufacturer = waf_info[waf_info.find('(')+1:waf_info.find(')')].strip().replace('.', '') - http_url = sanitize_url(splitted[0].strip()) - if not waf_name or waf_name == 'None': - continue - - # Add waf to db - waf, _ = Waf.objects.get_or_create( - name=waf_name, - manufacturer=waf_manufacturer - ) - - # Add waf info to Subdomain in DB - subdomain_name = get_subdomain_from_url(http_url) - logger.info(f'Wafw00f Subdomain : {subdomain_name}') - - try: - subdomain = Subdomain.objects.get( - name=subdomain_name, - scan_history=self.scan, - ) - except: - logger.warning(f'Subdomain {subdomain_name} was not found in the db, skipping waf detection for this domain.') - continue - - subdomain.waf.add(waf) - subdomain.save() - return wafs + """ + Uses wafw00f to check for the presence of a WAF. + + Args: + description (str, optional): Task description shown in UI. + + Returns: + list: List of startScan.models.Waf objects. + """ + input_path = str(Path(self.results_dir) / 'input_endpoints_waf_detection.txt') + config = self.yaml_configuration.get(WAF_DETECTION) or {} + enable_http_crawl = config.get(ENABLE_HTTP_CRAWL, DEFAULT_ENABLE_HTTP_CRAWL) + + # Get alive endpoints from DB + get_http_urls( + is_alive=enable_http_crawl, + write_filepath=input_path, + get_only_default_urls=True, + ctx=ctx + ) + + cmd = f'wafw00f -i {input_path} -o {self.output_path}' + run_command( + cmd, + history_file=self.history_file, + scan_id=self.scan_id, + activity_id=self.activity_id) + if not os.path.isfile(self.output_path): + logger.error(f'Could not find {self.output_path}') + return + + with open(self.output_path) as file: + wafs = file.readlines() + + for line in wafs: + line = " ".join(line.split()) + splitted = line.split(' ', 1) + waf_info = splitted[1].strip() + waf_name = waf_info[:waf_info.find('(')].strip() + waf_manufacturer = waf_info[waf_info.find('(')+1:waf_info.find(')')].strip().replace('.', '') + http_url = sanitize_url(splitted[0].strip()) + if not waf_name or waf_name == 'None': + continue + + # Add waf to db + waf, _ = Waf.objects.get_or_create( + name=waf_name, + manufacturer=waf_manufacturer + ) + + # Add waf info to Subdomain in DB + subdomain_name = get_subdomain_from_url(http_url) + logger.info(f'Wafw00f Subdomain : {subdomain_name}') + + try: + subdomain = Subdomain.objects.get( + name=subdomain_name, + scan_history=self.scan, + ) + except: + logger.warning(f'Subdomain {subdomain_name} was not found in the db, skipping waf detection for this domain.') + continue + + subdomain.waf.add(waf) + subdomain.save() + return wafs @app.task(name='dir_file_fuzz', queue='main_scan_queue', base=RengineTask, bind=True) def dir_file_fuzz(self, ctx={}, description=None): - """Perform directory scan, and currently uses `ffuf` as a default tool. - - Args: - description (str, optional): Task description shown in UI. - - Returns: - list: List of URLs discovered. - """ - # Config - cmd = 'ffuf' - config = self.yaml_configuration.get(DIR_FILE_FUZZ) or {} - custom_header = config.get(CUSTOM_HEADER) or self.yaml_configuration.get(CUSTOM_HEADER) - if custom_header: - custom_header = generate_header_param(custom_header,'common') - auto_calibration = config.get(AUTO_CALIBRATION, True) - enable_http_crawl = config.get(ENABLE_HTTP_CRAWL, DEFAULT_ENABLE_HTTP_CRAWL) - rate_limit = config.get(RATE_LIMIT) or self.yaml_configuration.get(RATE_LIMIT, DEFAULT_RATE_LIMIT) - extensions = config.get(EXTENSIONS, DEFAULT_DIR_FILE_FUZZ_EXTENSIONS) - # prepend . on extensions - extensions = [ext if ext.startswith('.') else '.' + ext for ext in extensions] - extensions_str = ','.join(map(str, extensions)) - follow_redirect = config.get(FOLLOW_REDIRECT, FFUF_DEFAULT_FOLLOW_REDIRECT) - max_time = config.get(MAX_TIME, 0) - match_http_status = config.get(MATCH_HTTP_STATUS, FFUF_DEFAULT_MATCH_HTTP_STATUS) - mc = ','.join([str(c) for c in match_http_status]) - recursive_level = config.get(RECURSIVE_LEVEL, FFUF_DEFAULT_RECURSIVE_LEVEL) - stop_on_error = config.get(STOP_ON_ERROR, False) - timeout = config.get(TIMEOUT) or self.yaml_configuration.get(TIMEOUT, DEFAULT_HTTP_TIMEOUT) - threads = config.get(THREADS) or self.yaml_configuration.get(THREADS, DEFAULT_THREADS) - wordlist_name = config.get(WORDLIST, 'dicc') - delay = rate_limit / (threads * 100) # calculate request pause delay from rate_limit and number of threads - input_path = str(Path(self.results_dir) / 'input_dir_file_fuzz.txt') - - # Get wordlist - wordlist_name = 'dicc' if wordlist_name == 'default' else wordlist_name - wordlist_path = str(Path(RENGINE_WORDLISTS) / f'{wordlist_name}.txt') - - # Build command - cmd += f' -w {wordlist_path}' - cmd += f' -e {extensions_str}' if extensions else '' - cmd += f' -maxtime {max_time}' if max_time > 0 else '' - cmd += f' -p {delay}' if delay > 0 else '' - cmd += f' -recursion -recursion-depth {recursive_level} ' if recursive_level > 0 else '' - cmd += f' -t {threads}' if threads and threads > 0 else '' - cmd += f' -timeout {timeout}' if timeout and timeout > 0 else '' - cmd += ' -se' if stop_on_error else '' - cmd += ' -fr' if follow_redirect else '' - cmd += ' -ac' if auto_calibration else '' - cmd += f' -mc {mc}' if mc else '' - cmd += f' {custom_header}' if custom_header else '' - - # Grab URLs to fuzz - urls = get_http_urls( - is_alive=True, - ignore_files=False, - write_filepath=input_path, - get_only_default_urls=True, - ctx=ctx - ) - logger.warning(urls) - - # Loop through URLs and run command - results = [] - for url in urls: - ''' - Above while fetching urls, we are not ignoring files, because some - default urls may redirect to https://example.com/login.php - so, ignore_files is set to False - but, during fuzzing, we will only need part of the path, in above example - it is still a good idea to ffuf base url https://example.com - so files from base url - ''' - url_parse = urlparse(url) - url = url_parse.scheme + '://' + url_parse.netloc - url += '/FUZZ' # TODO: fuzz not only URL but also POST / PUT / headers - proxy = get_random_proxy() - - # Build final cmd - fcmd = cmd - fcmd += f' -x {proxy}' if proxy else '' - fcmd += f' -u {url} -json' - - # Initialize DirectoryScan object - dirscan = DirectoryScan() - dirscan.scanned_date = timezone.now() - dirscan.command_line = fcmd - dirscan.save() - - # Loop through results and populate EndPoint and DirectoryFile in DB - results = [] - for line in stream_command( - fcmd, - shell=True, - history_file=self.history_file, - scan_id=self.scan_id, - activity_id=self.activity_id): - - # Empty line, continue to the next record - if not isinstance(line, dict): - continue - - # Append line to results - results.append(line) - - # Retrieve FFUF output - url = line['url'] - # Extract path and convert to base64 (need byte string encode & decode) - name = base64.b64encode(extract_path_from_url(url).encode()).decode() - length = line['length'] - status = line['status'] - words = line['words'] - lines = line['lines'] - content_type = line['content-type'] - duration = line['duration'] - - # If name empty log error and continue - if not name: - logger.error(f'FUZZ not found for "{url}"') - continue - - # Get or create endpoint from URL - endpoint, created = save_endpoint(url, crawl=False, ctx=ctx) - - # Continue to next line if endpoint returned is None - if endpoint == None: - continue - - # Save endpoint data from FFUF output - endpoint.http_status = status - endpoint.content_length = length - endpoint.response_time = duration / 1000000000 - endpoint.content_type = content_type - endpoint.content_length = length - endpoint.save() - - # Save directory file output from FFUF output - dfile, created = DirectoryFile.objects.get_or_create( - name=name, - length=length, - words=words, - lines=lines, - content_type=content_type, - url=url, - http_status=status) - - # Log newly created file or directory if debug activated - if created and CELERY_DEBUG: - logger.warning(f'Found new directory or file {url}') - - # Add file to current dirscan - dirscan.directory_files.add(dfile) - - # Add subscan relation to dirscan if exists - if self.subscan: - dirscan.dir_subscan_ids.add(self.subscan) - - # Save dirscan datas - dirscan.save() - - # Get subdomain and add dirscan - if ctx.get('subdomain_id') and ctx['subdomain_id'] > 0: - subdomain = Subdomain.objects.get(id=ctx['subdomain_id']) - else: - subdomain_name = get_subdomain_from_url(endpoint.http_url) - subdomain = Subdomain.objects.get(name=subdomain_name, scan_history=self.scan) - subdomain.directories.add(dirscan) - subdomain.save() - - # Crawl discovered URLs - if enable_http_crawl: - ctx['track'] = False - http_crawl(urls, ctx=ctx) - - return results + """Perform directory scan, and currently uses `ffuf` as a default tool. + + Args: + description (str, optional): Task description shown in UI. + + Returns: + list: List of URLs discovered. + """ + # Config + cmd = 'ffuf' + config = self.yaml_configuration.get(DIR_FILE_FUZZ) or {} + custom_header = config.get(CUSTOM_HEADER) or self.yaml_configuration.get(CUSTOM_HEADER) + if custom_header: + custom_header = generate_header_param(custom_header,'common') + auto_calibration = config.get(AUTO_CALIBRATION, True) + enable_http_crawl = config.get(ENABLE_HTTP_CRAWL, DEFAULT_ENABLE_HTTP_CRAWL) + rate_limit = config.get(RATE_LIMIT) or self.yaml_configuration.get(RATE_LIMIT, DEFAULT_RATE_LIMIT) + extensions = config.get(EXTENSIONS, DEFAULT_DIR_FILE_FUZZ_EXTENSIONS) + # prepend . on extensions + extensions = [ext if ext.startswith('.') else '.' + ext for ext in extensions] + extensions_str = ','.join(map(str, extensions)) + follow_redirect = config.get(FOLLOW_REDIRECT, FFUF_DEFAULT_FOLLOW_REDIRECT) + max_time = config.get(MAX_TIME, 0) + match_http_status = config.get(MATCH_HTTP_STATUS, FFUF_DEFAULT_MATCH_HTTP_STATUS) + mc = ','.join([str(c) for c in match_http_status]) + recursive_level = config.get(RECURSIVE_LEVEL, FFUF_DEFAULT_RECURSIVE_LEVEL) + stop_on_error = config.get(STOP_ON_ERROR, False) + timeout = config.get(TIMEOUT) or self.yaml_configuration.get(TIMEOUT, DEFAULT_HTTP_TIMEOUT) + threads = config.get(THREADS) or self.yaml_configuration.get(THREADS, DEFAULT_THREADS) + wordlist_name = config.get(WORDLIST, 'dicc') + delay = rate_limit / (threads * 100) # calculate request pause delay from rate_limit and number of threads + input_path = str(Path(self.results_dir) / 'input_dir_file_fuzz.txt') + + # Get wordlist + wordlist_name = 'dicc' if wordlist_name == 'default' else wordlist_name + wordlist_path = str(Path(RENGINE_WORDLISTS) / f'{wordlist_name}.txt') + + # Build command + cmd += f' -w {wordlist_path}' + cmd += f' -e {extensions_str}' if extensions else '' + cmd += f' -maxtime {max_time}' if max_time > 0 else '' + cmd += f' -p {delay}' if delay > 0 else '' + cmd += f' -recursion -recursion-depth {recursive_level} ' if recursive_level > 0 else '' + cmd += f' -t {threads}' if threads and threads > 0 else '' + cmd += f' -timeout {timeout}' if timeout and timeout > 0 else '' + cmd += ' -se' if stop_on_error else '' + cmd += ' -fr' if follow_redirect else '' + cmd += ' -ac' if auto_calibration else '' + cmd += f' -mc {mc}' if mc else '' + cmd += f' {custom_header}' if custom_header else '' + + # Grab URLs to fuzz + urls = get_http_urls( + is_alive=True, + ignore_files=False, + write_filepath=input_path, + get_only_default_urls=True, + ctx=ctx + ) + logger.warning(urls) + + # Loop through URLs and run command + results = [] + for url in urls: + ''' + Above while fetching urls, we are not ignoring files, because some + default urls may redirect to https://example.com/login.php + so, ignore_files is set to False + but, during fuzzing, we will only need part of the path, in above example + it is still a good idea to ffuf base url https://example.com + so files from base url + ''' + url_parse = urlparse(url) + url = url_parse.scheme + '://' + url_parse.netloc + url += '/FUZZ' # TODO: fuzz not only URL but also POST / PUT / headers + proxy = get_random_proxy() + + # Build final cmd + fcmd = cmd + fcmd += f' -x {proxy}' if proxy else '' + fcmd += f' -u {url} -json' + + # Initialize DirectoryScan object + dirscan = DirectoryScan() + dirscan.scanned_date = timezone.now() + dirscan.command_line = fcmd + dirscan.save() + + # Loop through results and populate EndPoint and DirectoryFile in DB + results = [] + for line in stream_command( + fcmd, + shell=True, + history_file=self.history_file, + scan_id=self.scan_id, + activity_id=self.activity_id): + + # Empty line, continue to the next record + if not isinstance(line, dict): + continue + + # Append line to results + results.append(line) + + # Retrieve FFUF output + url = line['url'] + # Extract path and convert to base64 (need byte string encode & decode) + name = base64.b64encode(extract_path_from_url(url).encode()).decode() + length = line['length'] + status = line['status'] + words = line['words'] + lines = line['lines'] + content_type = line['content-type'] + duration = line['duration'] + + # If name empty log error and continue + if not name: + logger.error(f'FUZZ not found for "{url}"') + continue + + # Get or create endpoint from URL + endpoint, created = save_endpoint(url, crawl=False, ctx=ctx) + + # Continue to next line if endpoint returned is None + if endpoint == None: + continue + + # Save endpoint data from FFUF output + endpoint.http_status = status + endpoint.content_length = length + endpoint.response_time = duration / 1000000000 + endpoint.content_type = content_type + endpoint.content_length = length + endpoint.save() + + # Save directory file output from FFUF output + dfile, created = DirectoryFile.objects.get_or_create( + name=name, + length=length, + words=words, + lines=lines, + content_type=content_type, + url=url, + http_status=status) + + # Log newly created file or directory if debug activated + if created and CELERY_DEBUG: + logger.warning(f'Found new directory or file {url}') + + # Add file to current dirscan + dirscan.directory_files.add(dfile) + + # Add subscan relation to dirscan if exists + if self.subscan: + dirscan.dir_subscan_ids.add(self.subscan) + + # Save dirscan datas + dirscan.save() + + # Get subdomain and add dirscan + if ctx.get('subdomain_id') and ctx['subdomain_id'] > 0: + subdomain = Subdomain.objects.get(id=ctx['subdomain_id']) + else: + subdomain_name = get_subdomain_from_url(endpoint.http_url) + subdomain = Subdomain.objects.get(name=subdomain_name, scan_history=self.scan) + subdomain.directories.add(dirscan) + subdomain.save() + + # Crawl discovered URLs + if enable_http_crawl: + ctx['track'] = False + http_crawl(urls, ctx=ctx) + + return results @app.task(name='fetch_url', queue='main_scan_queue', base=RengineTask, bind=True) def fetch_url(self, urls=[], ctx={}, description=None): - """Fetch URLs using different tools like gauplus, gau, gospider, waybackurls ... - - Args: - urls (list): List of URLs to start from. - description (str, optional): Task description shown in UI. - """ - input_path = str(Path(self.results_dir) / 'input_endpoints_fetch_url.txt') - proxy = get_random_proxy() - - # Config - config = self.yaml_configuration.get(FETCH_URL) or {} - should_remove_duplicate_endpoints = config.get(REMOVE_DUPLICATE_ENDPOINTS, True) - duplicate_removal_fields = config.get(DUPLICATE_REMOVAL_FIELDS, ENDPOINT_SCAN_DEFAULT_DUPLICATE_FIELDS) - enable_http_crawl = config.get(ENABLE_HTTP_CRAWL, DEFAULT_ENABLE_HTTP_CRAWL) - gf_patterns = config.get(GF_PATTERNS, DEFAULT_GF_PATTERNS) - ignore_file_extension = config.get(IGNORE_FILE_EXTENSION, DEFAULT_IGNORE_FILE_EXTENSIONS) - tools = config.get(USES_TOOLS, ENDPOINT_SCAN_DEFAULT_TOOLS) - threads = config.get(THREADS) or self.yaml_configuration.get(THREADS, DEFAULT_THREADS) - domain_request_headers = self.domain.request_headers if self.domain else None - custom_header = config.get(CUSTOM_HEADER) or self.yaml_configuration.get(CUSTOM_HEADER) - follow_redirect = config.get(FOLLOW_REDIRECT, False) # Get follow redirect setting - if domain_request_headers or custom_header: - custom_header = domain_request_headers or custom_header - exclude_subdomains = config.get(EXCLUDED_SUBDOMAINS, False) - - # Initialize the URLs - if urls and is_iterable(urls): - with open(input_path, 'w') as f: - f.write('\n'.join(urls)) - else: - urls = get_http_urls( - is_alive=enable_http_crawl, - write_filepath=input_path, - exclude_subdomains=exclude_subdomains, - get_only_default_urls=True, - ctx=ctx - ) - - # Log initial URLs - logger.debug(f'Initial URLs: {urls}') - - # Initialize command map for tools - cmd_map = { - 'gau': f'gau', - 'hakrawler': 'hakrawler -subs -u', - 'waybackurls': 'waybackurls', - 'gospider': f'gospider --js -d 2 --sitemap --robots -w -r -a', - 'katana': f'katana -silent -jc -kf all -d 3 -fs rdn', - } - if proxy: - cmd_map['gau'] += f' --proxy "{proxy}"' - cmd_map['gospider'] += f' -p {proxy}' - cmd_map['hakrawler'] += f' -proxy {proxy}' - cmd_map['katana'] += f' -proxy {proxy}' - if threads > 0: - cmd_map['gau'] += f' --threads {threads}' - cmd_map['gospider'] += f' -t {threads}' - cmd_map['hakrawler'] += f' -t {threads}' - cmd_map['katana'] += f' -c {threads}' - if custom_header: - cmd_map['gospider'] += generate_header_param(custom_header, 'gospider') - cmd_map['hakrawler'] += generate_header_param(custom_header, 'hakrawler') - cmd_map['katana'] += generate_header_param(custom_header, 'common') - - # Add follow_redirect option to tools that support it - if follow_redirect is False: - cmd_map['gospider'] += f' --no-redirect' - cmd_map['hakrawler'] += f' -dr' - cmd_map['katana'] += f' -dr' - - tasks = [] - - # Iterate over each URL and generate commands for each tool - for url in urls: - parsed_url = urlparse(url) - base_domain = parsed_url.netloc.split(':')[0] # Remove port if present - host_regex = f"'https?://{re.escape(base_domain)}(:[0-9]+)?(/.*)?$'" - - # Log the generated regex for the current URL - logger.debug(f'Generated regex for domain {base_domain}: {host_regex}') - - cat_input = f'echo "{url}"' - - # Generate commands for each tool for the current URL - for tool in tools: # Only use tools specified in the config - if tool in cmd_map: - cmd = cmd_map[tool] - tool_cmd = f'{cat_input} | {cmd} | grep -Eo {host_regex} > {self.results_dir}/urls_{tool}_{base_domain}.txt' - tasks.append(run_command.si( - tool_cmd, - shell=True, - scan_id=self.scan_id, - activity_id=self.activity_id) - ) - logger.debug(f'Generated command for tool {tool}: {tool_cmd}') - - # Group the tasks - task_group = group(tasks) - - # Cleanup task - sort_output = [ - f'cat ' + str(Path(self.results_dir) / 'urls_*') + f' > {self.output_path}', - f'cat {input_path} >> {self.output_path}', - f'sort -u {self.output_path} -o {self.output_path}', - ] - if ignore_file_extension and is_iterable(ignore_file_extension): - ignore_exts = '|'.join(ignore_file_extension) - grep_ext_filtered_output = [ - f'cat {self.output_path} | grep -Eiv "\\.({ignore_exts}).*" > ' + str(Path(self.results_dir) / 'urls_filtered.txt'), - f'mv ' + str(Path(self.results_dir) / 'urls_filtered.txt') + f' {self.output_path}' - ] - sort_output.extend(grep_ext_filtered_output) - cleanup = chain( - run_command.si( - cmd, - shell=True, - scan_id=self.scan_id, - activity_id=self.activity_id) - for cmd in sort_output - ) - - # Run all commands - task = chord(task_group)(cleanup) - with allow_join_result(): - task.get() - - # Store all the endpoints and run httpx - all_urls = [] - tool_mapping = {} # New dictionary to map URLs to tools - for tool in tools: - for url in urls: - parsed_url = urlparse(url) - base_domain = parsed_url.netloc.split(':')[0] # Remove port if present - tool_output_file = f'{self.results_dir}/urls_{tool}_{base_domain}.txt' - if os.path.exists(tool_output_file): - with open(tool_output_file, 'r') as f: - discovered_urls = f.readlines() - for url in discovered_urls: - url = url.strip() - urlpath = None - base_url = None - if '] ' in url: # found JS scraped endpoint e.g from gospider - split = tuple(url.split('] ')) - if not len(split) == 2: - logger.warning(f'URL format not recognized for "{url}". Skipping.') - continue - base_url, urlpath = split - urlpath = urlpath.lstrip('- ') - elif ' - ' in url: # found JS scraped endpoint e.g from gospider - base_url, urlpath = tuple(url.split(' - ')) - - if base_url and urlpath: - subdomain = urlparse(base_url) - url = f'{subdomain.scheme}://{subdomain.netloc}{urlpath}' - - if not validators.url(url): - logger.warning(f'Invalid URL "{url}". Skipping.') - continue - - if url not in tool_mapping: - tool_mapping[url] = set() - tool_mapping[url].add(tool) # Use a set to ensure uniqueness - - all_urls = list(tool_mapping.keys()) - for url, found_tools in tool_mapping.items(): - unique_tools = ', '.join(found_tools) - logger.info(f'URL {url} found by tools: {unique_tools}') - - # Filter out URLs if a path filter was passed - if self.url_filter: - all_urls = [url for url in all_urls if self.url_filter in url] - - # Write result to output path - with open(self.output_path, 'w') as f: - f.write('\n'.join(all_urls)) - logger.warning(f'Found {len(all_urls)} usable URLs') - - # Crawl discovered URLs - if enable_http_crawl: - ctx['track'] = False - http_crawl( - all_urls, - ctx=ctx, - should_remove_duplicate_endpoints=should_remove_duplicate_endpoints, - duplicate_removal_fields=duplicate_removal_fields - ) - - #-------------------# - # GF PATTERNS MATCH # - #-------------------# - - # Combine old gf patterns with new ones - if gf_patterns and is_iterable(gf_patterns): - self.scan.used_gf_patterns = ','.join(gf_patterns) - self.scan.save() - - # Run gf patterns on saved endpoints - # TODO: refactor to Celery task - for gf_pattern in gf_patterns: - # TODO: js var is causing issues, removing for now - if gf_pattern == 'jsvar': - logger.info('Ignoring jsvar as it is causing issues.') - continue - - # Run gf on current pattern - logger.warning(f'Running gf on pattern "{gf_pattern}"') - gf_output_file = str(Path(self.results_dir) / f'gf_patterns_{gf_pattern}.txt') - cmd = f'cat {self.output_path} | gf {gf_pattern} | grep -Eo {host_regex} >> {gf_output_file}' - run_command( - cmd, - shell=True, - history_file=self.history_file, - scan_id=self.scan_id, - activity_id=self.activity_id) - - # Check output file - if not os.path.exists(gf_output_file): - logger.error(f'Could not find GF output file {gf_output_file}. Skipping GF pattern "{gf_pattern}"') - continue - - # Read output file line by line and - with open(gf_output_file, 'r') as f: - lines = f.readlines() - - # Add endpoints / subdomains to DB - for url in lines: - http_url = sanitize_url(url) - subdomain_name = get_subdomain_from_url(http_url) - subdomain, _ = save_subdomain(subdomain_name, ctx=ctx) - if not isinstance(subdomain, Subdomain): - logger.error(f"Invalid subdomain encountered: {subdomain}") - continue - endpoint, created = save_endpoint( - http_url, - crawl=False, - subdomain=subdomain, - ctx=ctx) - if not endpoint: - continue - earlier_pattern = None - if not created: - earlier_pattern = endpoint.matched_gf_patterns - pattern = f'{earlier_pattern},{gf_pattern}' if earlier_pattern else gf_pattern - endpoint.matched_gf_patterns = pattern - # TODO Add tool that found the URL to the db (need to update db model) - # endpoint.found_by_tools = ','.join(tool_mapping.get(url, [])) # Save tools in the endpoint - endpoint.save() - - return all_urls + """Fetch URLs using different tools like gauplus, gau, gospider, waybackurls ... + + Args: + urls (list): List of URLs to start from. + description (str, optional): Task description shown in UI. + """ + input_path = str(Path(self.results_dir) / 'input_endpoints_fetch_url.txt') + proxy = get_random_proxy() + + # Config + config = self.yaml_configuration.get(FETCH_URL) or {} + should_remove_duplicate_endpoints = config.get(REMOVE_DUPLICATE_ENDPOINTS, True) + duplicate_removal_fields = config.get(DUPLICATE_REMOVAL_FIELDS, ENDPOINT_SCAN_DEFAULT_DUPLICATE_FIELDS) + enable_http_crawl = config.get(ENABLE_HTTP_CRAWL, DEFAULT_ENABLE_HTTP_CRAWL) + gf_patterns = config.get(GF_PATTERNS, DEFAULT_GF_PATTERNS) + ignore_file_extension = config.get(IGNORE_FILE_EXTENSION, DEFAULT_IGNORE_FILE_EXTENSIONS) + tools = config.get(USES_TOOLS, ENDPOINT_SCAN_DEFAULT_TOOLS) + threads = config.get(THREADS) or self.yaml_configuration.get(THREADS, DEFAULT_THREADS) + domain_request_headers = self.domain.request_headers if self.domain else None + custom_header = config.get(CUSTOM_HEADER) or self.yaml_configuration.get(CUSTOM_HEADER) + follow_redirect = config.get(FOLLOW_REDIRECT, False) # Get follow redirect setting + if domain_request_headers or custom_header: + custom_header = domain_request_headers or custom_header + exclude_subdomains = config.get(EXCLUDED_SUBDOMAINS, False) + + # Initialize the URLs + if urls and is_iterable(urls): + with open(input_path, 'w') as f: + f.write('\n'.join(urls)) + else: + urls = get_http_urls( + is_alive=enable_http_crawl, + write_filepath=input_path, + exclude_subdomains=exclude_subdomains, + get_only_default_urls=True, + ctx=ctx + ) + + # Log initial URLs + logger.debug(f'Initial URLs: {urls}') + + # Initialize command map for tools + cmd_map = { + 'gau': f'gau', + 'hakrawler': 'hakrawler -subs -u', + 'waybackurls': 'waybackurls', + 'gospider': f'gospider --js -d 2 --sitemap --robots -w -r -a', + 'katana': f'katana -silent -jc -kf all -d 3 -fs rdn', + } + if proxy: + cmd_map['gau'] += f' --proxy "{proxy}"' + cmd_map['gospider'] += f' -p {proxy}' + cmd_map['hakrawler'] += f' -proxy {proxy}' + cmd_map['katana'] += f' -proxy {proxy}' + if threads > 0: + cmd_map['gau'] += f' --threads {threads}' + cmd_map['gospider'] += f' -t {threads}' + cmd_map['hakrawler'] += f' -t {threads}' + cmd_map['katana'] += f' -c {threads}' + if custom_header: + cmd_map['gospider'] += generate_header_param(custom_header, 'gospider') + cmd_map['hakrawler'] += generate_header_param(custom_header, 'hakrawler') + cmd_map['katana'] += generate_header_param(custom_header, 'common') + + # Add follow_redirect option to tools that support it + if follow_redirect is False: + cmd_map['gospider'] += f' --no-redirect' + cmd_map['hakrawler'] += f' -dr' + cmd_map['katana'] += f' -dr' + + tasks = [] + + # Iterate over each URL and generate commands for each tool + for url in urls: + parsed_url = urlparse(url) + base_domain = parsed_url.netloc.split(':')[0] # Remove port if present + host_regex = f"'https?://{re.escape(base_domain)}(:[0-9]+)?(/.*)?$'" + + # Log the generated regex for the current URL + logger.debug(f'Generated regex for domain {base_domain}: {host_regex}') + + cat_input = f'echo "{url}"' + + # Generate commands for each tool for the current URL + for tool in tools: # Only use tools specified in the config + if tool in cmd_map: + cmd = cmd_map[tool] + tool_cmd = f'{cat_input} | {cmd} | grep -Eo {host_regex} > {self.results_dir}/urls_{tool}_{base_domain}.txt' + tasks.append(run_command.si( + tool_cmd, + shell=True, + scan_id=self.scan_id, + activity_id=self.activity_id) + ) + logger.debug(f'Generated command for tool {tool}: {tool_cmd}') + + # Group the tasks + task_group = group(tasks) + + # Cleanup task + sort_output = [ + f'cat ' + str(Path(self.results_dir) / 'urls_*') + f' > {self.output_path}', + f'cat {input_path} >> {self.output_path}', + f'sort -u {self.output_path} -o {self.output_path}', + ] + if ignore_file_extension and is_iterable(ignore_file_extension): + ignore_exts = '|'.join(ignore_file_extension) + grep_ext_filtered_output = [ + f'cat {self.output_path} | grep -Eiv "\\.({ignore_exts}).*" > ' + str(Path(self.results_dir) / 'urls_filtered.txt'), + f'mv ' + str(Path(self.results_dir) / 'urls_filtered.txt') + f' {self.output_path}' + ] + sort_output.extend(grep_ext_filtered_output) + cleanup = chain( + run_command.si( + cmd, + shell=True, + scan_id=self.scan_id, + activity_id=self.activity_id) + for cmd in sort_output + ) + + # Run all commands + task = chord(task_group)(cleanup) + with allow_join_result(): + task.get() + + # Store all the endpoints and run httpx + all_urls = [] + tool_mapping = {} # New dictionary to map URLs to tools + for tool in tools: + for url in urls: + parsed_url = urlparse(url) + base_domain = parsed_url.netloc.split(':')[0] # Remove port if present + tool_output_file = f'{self.results_dir}/urls_{tool}_{base_domain}.txt' + if os.path.exists(tool_output_file): + with open(tool_output_file, 'r') as f: + discovered_urls = f.readlines() + for url in discovered_urls: + url = url.strip() + urlpath = None + base_url = None + if '] ' in url: # found JS scraped endpoint e.g from gospider + split = tuple(url.split('] ')) + if not len(split) == 2: + logger.warning(f'URL format not recognized for "{url}". Skipping.') + continue + base_url, urlpath = split + urlpath = urlpath.lstrip('- ') + elif ' - ' in url: # found JS scraped endpoint e.g from gospider + base_url, urlpath = tuple(url.split(' - ')) + + if base_url and urlpath: + subdomain = urlparse(base_url) + url = f'{subdomain.scheme}://{subdomain.netloc}{urlpath}' + + if not validators.url(url): + logger.warning(f'Invalid URL "{url}". Skipping.') + continue + + if url not in tool_mapping: + tool_mapping[url] = set() + tool_mapping[url].add(tool) # Use a set to ensure uniqueness + + all_urls = list(tool_mapping.keys()) + for url, found_tools in tool_mapping.items(): + unique_tools = ', '.join(found_tools) + logger.info(f'URL {url} found by tools: {unique_tools}') + + # Filter out URLs if a path filter was passed + if self.url_filter: + all_urls = [url for url in all_urls if self.url_filter in url] + + # Write result to output path + with open(self.output_path, 'w') as f: + f.write('\n'.join(all_urls)) + logger.warning(f'Found {len(all_urls)} usable URLs') + + # Crawl discovered URLs + if enable_http_crawl: + ctx['track'] = False + http_crawl( + all_urls, + ctx=ctx, + should_remove_duplicate_endpoints=should_remove_duplicate_endpoints, + duplicate_removal_fields=duplicate_removal_fields + ) + + #-------------------# + # GF PATTERNS MATCH # + #-------------------# + + # Combine old gf patterns with new ones + if gf_patterns and is_iterable(gf_patterns): + self.scan.used_gf_patterns = ','.join(gf_patterns) + self.scan.save() + + # Run gf patterns on saved endpoints + # TODO: refactor to Celery task + for gf_pattern in gf_patterns: + # TODO: js var is causing issues, removing for now + if gf_pattern == 'jsvar': + logger.info('Ignoring jsvar as it is causing issues.') + continue + + # Run gf on current pattern + logger.warning(f'Running gf on pattern "{gf_pattern}"') + gf_output_file = str(Path(self.results_dir) / f'gf_patterns_{gf_pattern}.txt') + cmd = f'cat {self.output_path} | gf {gf_pattern} | grep -Eo {host_regex} >> {gf_output_file}' + run_command( + cmd, + shell=True, + history_file=self.history_file, + scan_id=self.scan_id, + activity_id=self.activity_id) + + # Check output file + if not os.path.exists(gf_output_file): + logger.error(f'Could not find GF output file {gf_output_file}. Skipping GF pattern "{gf_pattern}"') + continue + + # Read output file line by line and + with open(gf_output_file, 'r') as f: + lines = f.readlines() + + # Add endpoints / subdomains to DB + for url in lines: + http_url = sanitize_url(url) + subdomain_name = get_subdomain_from_url(http_url) + subdomain, _ = save_subdomain(subdomain_name, ctx=ctx) + if not isinstance(subdomain, Subdomain): + logger.error(f"Invalid subdomain encountered: {subdomain}") + continue + endpoint, created = save_endpoint( + http_url, + crawl=False, + subdomain=subdomain, + ctx=ctx) + if not endpoint: + continue + earlier_pattern = None + if not created: + earlier_pattern = endpoint.matched_gf_patterns + pattern = f'{earlier_pattern},{gf_pattern}' if earlier_pattern else gf_pattern + endpoint.matched_gf_patterns = pattern + # TODO Add tool that found the URL to the db (need to update db model) + # endpoint.found_by_tools = ','.join(tool_mapping.get(url, [])) # Save tools in the endpoint + endpoint.save() + + return all_urls def parse_curl_output(response): - # TODO: Enrich from other cURL fields. - CURL_REGEX_HTTP_STATUS = f'HTTP\/(?:(?:\d\.?)+)\s(\d+)\s(?:\w+)' - http_status = 0 - if response: - failed = False - regex = re.compile(CURL_REGEX_HTTP_STATUS, re.MULTILINE) - try: - http_status = int(regex.findall(response)[0]) - except (KeyError, TypeError, IndexError): - pass - return { - 'http_status': http_status, - } + # TODO: Enrich from other cURL fields. + CURL_REGEX_HTTP_STATUS = f'HTTP\/(?:(?:\d\.?)+)\s(\d+)\s(?:\w+)' + http_status = 0 + if response: + failed = False + regex = re.compile(CURL_REGEX_HTTP_STATUS, re.MULTILINE) + try: + http_status = int(regex.findall(response)[0]) + except (KeyError, TypeError, IndexError): + pass + return { + 'http_status': http_status, + } @app.task(name='vulnerability_scan', queue='main_scan_queue', bind=True, base=RengineTask) def vulnerability_scan(self, urls=[], ctx={}, description=None): - """ - This function will serve as an entrypoint to vulnerability scan. - All other vulnerability scan will be run from here including nuclei, crlfuzz, etc - """ - logger.info('Running Vulnerability Scan Queue') - config = self.yaml_configuration.get(VULNERABILITY_SCAN) or {} - should_run_nuclei = config.get(RUN_NUCLEI, True) - should_run_crlfuzz = config.get(RUN_CRLFUZZ, False) - should_run_dalfox = config.get(RUN_DALFOX, False) - should_run_s3scanner = config.get(RUN_S3SCANNER, True) - - grouped_tasks = [] - if should_run_nuclei: - _task = nuclei_scan.si( - urls=urls, - ctx=ctx, - description=f'Nuclei Scan' - ) - grouped_tasks.append(_task) - - if should_run_crlfuzz: - _task = crlfuzz_scan.si( - urls=urls, - ctx=ctx, - description=f'CRLFuzz Scan' - ) - grouped_tasks.append(_task) - - if should_run_dalfox: - _task = dalfox_xss_scan.si( - urls=urls, - ctx=ctx, - description=f'Dalfox XSS Scan' - ) - grouped_tasks.append(_task) - - if should_run_s3scanner: - _task = s3scanner.si( - ctx=ctx, - description=f'Misconfigured S3 Buckets Scanner' - ) - grouped_tasks.append(_task) - - celery_group = group(grouped_tasks) - job = celery_group.apply_async() - - while not job.ready(): - # wait for all jobs to complete - time.sleep(5) - - logger.info('Vulnerability scan completed...') - - # return results - return None + """ + This function will serve as an entrypoint to vulnerability scan. + All other vulnerability scan will be run from here including nuclei, crlfuzz, etc + """ + logger.info('Running Vulnerability Scan Queue') + config = self.yaml_configuration.get(VULNERABILITY_SCAN) or {} + should_run_nuclei = config.get(RUN_NUCLEI, True) + should_run_crlfuzz = config.get(RUN_CRLFUZZ, False) + should_run_dalfox = config.get(RUN_DALFOX, False) + should_run_s3scanner = config.get(RUN_S3SCANNER, True) + + grouped_tasks = [] + if should_run_nuclei: + _task = nuclei_scan.si( + urls=urls, + ctx=ctx, + description=f'Nuclei Scan' + ) + grouped_tasks.append(_task) + + if should_run_crlfuzz: + _task = crlfuzz_scan.si( + urls=urls, + ctx=ctx, + description=f'CRLFuzz Scan' + ) + grouped_tasks.append(_task) + + if should_run_dalfox: + _task = dalfox_xss_scan.si( + urls=urls, + ctx=ctx, + description=f'Dalfox XSS Scan' + ) + grouped_tasks.append(_task) + + if should_run_s3scanner: + _task = s3scanner.si( + ctx=ctx, + description=f'Misconfigured S3 Buckets Scanner' + ) + grouped_tasks.append(_task) + + celery_group = group(grouped_tasks) + job = celery_group.apply_async() + + while not job.ready(): + # wait for all jobs to complete + time.sleep(5) + + logger.info('Vulnerability scan completed...') + + # return results + return None @app.task(name='nuclei_individual_severity_module', queue='main_scan_queue', base=RengineTask, bind=True) def nuclei_individual_severity_module(self, cmd, severity, enable_http_crawl, should_fetch_gpt_report, ctx={}, description=None): - ''' - This celery task will run vulnerability scan in parallel. - All severities supplied should run in parallel as grouped tasks. - ''' - results = [] - logger.info(f'Running vulnerability scan with severity: {severity}') - cmd += f' -severity {severity}' - # Send start notification - notif = Notification.objects.first() - send_status = notif.send_scan_status_notif if notif else False - - for line in stream_command( - cmd, - history_file=self.history_file, - scan_id=self.scan_id, - activity_id=self.activity_id): - - if not isinstance(line, dict): - continue - - results.append(line) - - # Gather nuclei results - vuln_data = parse_nuclei_result(line) - - # Get corresponding subdomain - http_url = sanitize_url(line.get('matched-at')) - subdomain_name = get_subdomain_from_url(http_url) - - try: - subdomain = Subdomain.objects.get( - name=subdomain_name, - scan_history=self.scan, - target_domain=self.domain - ) - except: - logger.warning(f'Subdomain {subdomain_name} was not found in the db, skipping vulnerability scan for this subdomain.') - continue - - # Look for duplicate vulnerabilities by excluding records that might change but are irrelevant. - object_comparison_exclude = ['response', 'curl_command', 'tags', 'references', 'cve_ids', 'cwe_ids'] - - # Add subdomain and target domain to the duplicate check - vuln_data_copy = vuln_data.copy() - vuln_data_copy['subdomain'] = subdomain - vuln_data_copy['target_domain'] = self.domain - - # Check if record exists, if exists do not save it - if record_exists(Vulnerability, data=vuln_data_copy, exclude_keys=object_comparison_exclude): - logger.warning(f'Nuclei vulnerability of severity {severity} : {vuln_data_copy["name"]} for {subdomain_name} already exists') - continue - - # Get or create EndPoint object - response = line.get('response') - httpx_crawl = False if response else enable_http_crawl # avoid yet another httpx crawl - endpoint, _ = save_endpoint( - http_url, - crawl=httpx_crawl, - subdomain=subdomain, - ctx=ctx) - if endpoint: - http_url = endpoint.http_url - if not httpx_crawl: - output = parse_curl_output(response) - endpoint.http_status = output['http_status'] - endpoint.save() - - # Get or create Vulnerability object - vuln, _ = save_vulnerability( - target_domain=self.domain, - http_url=http_url, - scan_history=self.scan, - subscan=self.subscan, - subdomain=subdomain, - **vuln_data) - if not vuln: - continue - - # Print vuln - severity = line['info'].get('severity', 'unknown') - logger.warning(str(vuln)) - - - # Send notification for all vulnerabilities except info - url = vuln.http_url or vuln.subdomain - send_vuln = ( - notif and - notif.send_vuln_notif and - vuln and - severity in ['low', 'medium', 'high', 'critical']) - if send_vuln: - fields = { - 'Severity': f'**{severity.upper()}**', - 'URL': http_url, - 'Subdomain': subdomain_name, - 'Name': vuln.name, - 'Type': vuln.type, - 'Description': vuln.description, - 'Template': vuln.template_url, - 'Tags': vuln.get_tags_str(), - 'CVEs': vuln.get_cve_str(), - 'CWEs': vuln.get_cwe_str(), - 'References': vuln.get_refs_str() - } - severity_map = { - 'low': 'info', - 'medium': 'warning', - 'high': 'error', - 'critical': 'error' - } - self.notify( - f'vulnerability_scan_#{vuln.id}', - severity_map[severity], - fields, - add_meta_info=False) - - # Send report to hackerone - hackerone_query = Hackerone.objects.all() - send_report = ( - hackerone_query.exists() and - severity not in ('info', 'low') and - vuln.target_domain.h1_team_handle - ) - if send_report: - hackerone = hackerone_query.first() - if hackerone.send_critical and severity == 'critical': - send_hackerone_report.delay(vuln.id) - elif hackerone.send_high and severity == 'high': - send_hackerone_report.delay(vuln.id) - elif hackerone.send_medium and severity == 'medium': - send_hackerone_report.delay(vuln.id) - - # Write results to JSON file - with open(self.output_path, 'w') as f: - json.dump(results, f, indent=4) - - # Send finish notif - if send_status: - vulns = Vulnerability.objects.filter(scan_history__id=self.scan_id) - info_count = vulns.filter(severity=0).count() - low_count = vulns.filter(severity=1).count() - medium_count = vulns.filter(severity=2).count() - high_count = vulns.filter(severity=3).count() - critical_count = vulns.filter(severity=4).count() - unknown_count = vulns.filter(severity=-1).count() - vulnerability_count = info_count + low_count + medium_count + high_count + critical_count + unknown_count - fields = { - 'Total': vulnerability_count, - 'Critical': critical_count, - 'High': high_count, - 'Medium': medium_count, - 'Low': low_count, - 'Info': info_count, - 'Unknown': unknown_count - } - self.notify(fields=fields) - - # after vulnerability scan is done, we need to run gpt if - # should_fetch_gpt_report and openapi key exists - - if should_fetch_gpt_report and OpenAiAPIKey.objects.all().first(): - logger.info('Getting Vulnerability GPT Report') - vulns = Vulnerability.objects.filter( - scan_history__id=self.scan_id - ).filter( - source=NUCLEI - ).exclude( - severity=0 - ) - # find all unique vulnerabilities based on path and title - # all unique vulnerability will go thru gpt function and get report - # once report is got, it will be matched with other vulnerabilities and saved - unique_vulns = set() - for vuln in vulns: - unique_vulns.add((vuln.name, vuln.get_path())) - - unique_vulns = list(unique_vulns) - - with concurrent.futures.ThreadPoolExecutor(max_workers=DEFAULT_THREADS) as executor: - future_to_gpt = {executor.submit(get_vulnerability_gpt_report, vuln): vuln for vuln in unique_vulns} - - # Wait for all tasks to complete - for future in concurrent.futures.as_completed(future_to_gpt): - gpt = future_to_gpt[future] - try: - future.result() - except Exception as e: - logger.error(f"Exception for Vulnerability {vuln}: {e}") - - return None + ''' + This celery task will run vulnerability scan in parallel. + All severities supplied should run in parallel as grouped tasks. + ''' + results = [] + logger.info(f'Running vulnerability scan with severity: {severity}') + cmd += f' -severity {severity}' + # Send start notification + notif = Notification.objects.first() + send_status = notif.send_scan_status_notif if notif else False + + for line in stream_command( + cmd, + history_file=self.history_file, + scan_id=self.scan_id, + activity_id=self.activity_id): + + if not isinstance(line, dict): + continue + + results.append(line) + + # Gather nuclei results + vuln_data = parse_nuclei_result(line) + + # Get corresponding subdomain + http_url = sanitize_url(line.get('matched-at')) + subdomain_name = get_subdomain_from_url(http_url) + + try: + subdomain = Subdomain.objects.get( + name=subdomain_name, + scan_history=self.scan, + target_domain=self.domain + ) + except: + logger.warning(f'Subdomain {subdomain_name} was not found in the db, skipping vulnerability scan for this subdomain.') + continue + + # Look for duplicate vulnerabilities by excluding records that might change but are irrelevant. + object_comparison_exclude = ['response', 'curl_command', 'tags', 'references', 'cve_ids', 'cwe_ids'] + + # Add subdomain and target domain to the duplicate check + vuln_data_copy = vuln_data.copy() + vuln_data_copy['subdomain'] = subdomain + vuln_data_copy['target_domain'] = self.domain + + # Check if record exists, if exists do not save it + if record_exists(Vulnerability, data=vuln_data_copy, exclude_keys=object_comparison_exclude): + logger.warning(f'Nuclei vulnerability of severity {severity} : {vuln_data_copy["name"]} for {subdomain_name} already exists') + continue + + # Get or create EndPoint object + response = line.get('response') + httpx_crawl = False if response else enable_http_crawl # avoid yet another httpx crawl + endpoint, _ = save_endpoint( + http_url, + crawl=httpx_crawl, + subdomain=subdomain, + ctx=ctx) + if endpoint: + http_url = endpoint.http_url + if not httpx_crawl: + output = parse_curl_output(response) + endpoint.http_status = output['http_status'] + endpoint.save() + + # Get or create Vulnerability object + vuln, _ = save_vulnerability( + target_domain=self.domain, + http_url=http_url, + scan_history=self.scan, + subscan=self.subscan, + subdomain=subdomain, + **vuln_data) + if not vuln: + continue + + # Print vuln + severity = line['info'].get('severity', 'unknown') + logger.warning(str(vuln)) + + + # Send notification for all vulnerabilities except info + url = vuln.http_url or vuln.subdomain + send_vuln = ( + notif and + notif.send_vuln_notif and + vuln and + severity in ['low', 'medium', 'high', 'critical']) + if send_vuln: + fields = { + 'Severity': f'**{severity.upper()}**', + 'URL': http_url, + 'Subdomain': subdomain_name, + 'Name': vuln.name, + 'Type': vuln.type, + 'Description': vuln.description, + 'Template': vuln.template_url, + 'Tags': vuln.get_tags_str(), + 'CVEs': vuln.get_cve_str(), + 'CWEs': vuln.get_cwe_str(), + 'References': vuln.get_refs_str() + } + severity_map = { + 'low': 'info', + 'medium': 'warning', + 'high': 'error', + 'critical': 'error' + } + self.notify( + f'vulnerability_scan_#{vuln.id}', + severity_map[severity], + fields, + add_meta_info=False) + + # Send report to hackerone + hackerone_query = Hackerone.objects.all() + send_report = ( + hackerone_query.exists() and + severity not in ('info', 'low') and + vuln.target_domain.h1_team_handle + ) + if send_report: + hackerone = hackerone_query.first() + if hackerone.send_critical and severity == 'critical': + send_hackerone_report.delay(vuln.id) + elif hackerone.send_high and severity == 'high': + send_hackerone_report.delay(vuln.id) + elif hackerone.send_medium and severity == 'medium': + send_hackerone_report.delay(vuln.id) + + # Write results to JSON file + with open(self.output_path, 'w') as f: + json.dump(results, f, indent=4) + + # Send finish notif + if send_status: + vulns = Vulnerability.objects.filter(scan_history__id=self.scan_id) + info_count = vulns.filter(severity=0).count() + low_count = vulns.filter(severity=1).count() + medium_count = vulns.filter(severity=2).count() + high_count = vulns.filter(severity=3).count() + critical_count = vulns.filter(severity=4).count() + unknown_count = vulns.filter(severity=-1).count() + vulnerability_count = info_count + low_count + medium_count + high_count + critical_count + unknown_count + fields = { + 'Total': vulnerability_count, + 'Critical': critical_count, + 'High': high_count, + 'Medium': medium_count, + 'Low': low_count, + 'Info': info_count, + 'Unknown': unknown_count + } + self.notify(fields=fields) + + # after vulnerability scan is done, we need to run gpt if + # should_fetch_gpt_report and openapi key exists + + if should_fetch_gpt_report and OpenAiAPIKey.objects.all().first(): + logger.info('Getting Vulnerability GPT Report') + vulns = Vulnerability.objects.filter( + scan_history__id=self.scan_id + ).filter( + source=NUCLEI + ).exclude( + severity=0 + ) + # find all unique vulnerabilities based on path and title + # all unique vulnerability will go thru gpt function and get report + # once report is got, it will be matched with other vulnerabilities and saved + unique_vulns = set() + for vuln in vulns: + unique_vulns.add((vuln.name, vuln.get_path())) + + unique_vulns = list(unique_vulns) + + with concurrent.futures.ThreadPoolExecutor(max_workers=DEFAULT_THREADS) as executor: + future_to_gpt = {executor.submit(get_vulnerability_gpt_report, vuln): vuln for vuln in unique_vulns} + + # Wait for all tasks to complete + for future in concurrent.futures.as_completed(future_to_gpt): + gpt = future_to_gpt[future] + try: + future.result() + except Exception as e: + logger.error(f"Exception for Vulnerability {vuln}: {e}") + + return None def get_vulnerability_gpt_report(vuln): - title = vuln[0] - path = vuln[1] - logger.info(f'Getting GPT Report for {title}, PATH: {path}') - # check if in db already exists - stored = GPTVulnerabilityReport.objects.filter( - url_path=path - ).filter( - title=title - ).first() - if stored: - response = { - 'description': stored.description, - 'impact': stored.impact, - 'remediation': stored.remediation, - 'references': [url.url for url in stored.references.all()] - } - else: - report = GPTVulnerabilityReportGenerator() - vulnerability_description = get_gpt_vuln_input_description( - title, - path - ) - response = report.get_vulnerability_description(vulnerability_description) - add_gpt_description_db( - title, - path, - response.get('description'), - response.get('impact'), - response.get('remediation'), - response.get('references', []) - ) - - - for vuln in Vulnerability.objects.filter(name=title, http_url__icontains=path): - vuln.description = response.get('description', vuln.description) - vuln.impact = response.get('impact') - vuln.remediation = response.get('remediation') - vuln.is_gpt_used = True - vuln.save() - - for url in response.get('references', []): - ref, created = VulnerabilityReference.objects.get_or_create(url=url) - vuln.references.add(ref) - vuln.save() + title = vuln[0] + path = vuln[1] + logger.info(f'Getting GPT Report for {title}, PATH: {path}') + # check if in db already exists + stored = GPTVulnerabilityReport.objects.filter( + url_path=path + ).filter( + title=title + ).first() + if stored: + response = { + 'description': stored.description, + 'impact': stored.impact, + 'remediation': stored.remediation, + 'references': [url.url for url in stored.references.all()] + } + else: + report = GPTVulnerabilityReportGenerator() + vulnerability_description = get_gpt_vuln_input_description( + title, + path + ) + response = report.get_vulnerability_description(vulnerability_description) + add_gpt_description_db( + title, + path, + response.get('description'), + response.get('impact'), + response.get('remediation'), + response.get('references', []) + ) + + + for vuln in Vulnerability.objects.filter(name=title, http_url__icontains=path): + vuln.description = response.get('description', vuln.description) + vuln.impact = response.get('impact') + vuln.remediation = response.get('remediation') + vuln.is_gpt_used = True + vuln.save() + + for url in response.get('references', []): + ref, created = VulnerabilityReference.objects.get_or_create(url=url) + vuln.references.add(ref) + vuln.save() def add_gpt_description_db(title, path, description, impact, remediation, references): - gpt_report = GPTVulnerabilityReport() - gpt_report.url_path = path - gpt_report.title = title - gpt_report.description = description - gpt_report.impact = impact - gpt_report.remediation = remediation - gpt_report.save() - - for url in references: - ref, created = VulnerabilityReference.objects.get_or_create(url=url) - gpt_report.references.add(ref) - gpt_report.save() + gpt_report = GPTVulnerabilityReport() + gpt_report.url_path = path + gpt_report.title = title + gpt_report.description = description + gpt_report.impact = impact + gpt_report.remediation = remediation + gpt_report.save() + + for url in references: + ref, created = VulnerabilityReference.objects.get_or_create(url=url) + gpt_report.references.add(ref) + gpt_report.save() @app.task(name='nuclei_scan', queue='main_scan_queue', base=RengineTask, bind=True) def nuclei_scan(self, urls=[], ctx={}, description=None): - """HTTP vulnerability scan using Nuclei - - Args: - urls (list, optional): If passed, filter on those URLs. - description (str, optional): Task description shown in UI. - - Notes: - Unfurl the urls to keep only domain and path, will be sent to vuln scan and - ignore certain file extensions. Thanks: https://github.com/six2dez/reconftw - """ - # Config - config = self.yaml_configuration.get(VULNERABILITY_SCAN) or {} - input_path = str(Path(self.results_dir) / 'input_endpoints_vulnerability_scan.txt') - enable_http_crawl = config.get(ENABLE_HTTP_CRAWL, DEFAULT_ENABLE_HTTP_CRAWL) - concurrency = config.get(NUCLEI_CONCURRENCY) or self.yaml_configuration.get(THREADS, DEFAULT_THREADS) - intensity = config.get(INTENSITY) or self.yaml_configuration.get(INTENSITY, DEFAULT_SCAN_INTENSITY) - rate_limit = config.get(RATE_LIMIT) or self.yaml_configuration.get(RATE_LIMIT, DEFAULT_RATE_LIMIT) - retries = config.get(RETRIES) or self.yaml_configuration.get(RETRIES, DEFAULT_RETRIES) - timeout = config.get(TIMEOUT) or self.yaml_configuration.get(TIMEOUT, DEFAULT_HTTP_TIMEOUT) - custom_header = config.get(CUSTOM_HEADER) or self.yaml_configuration.get(CUSTOM_HEADER) - if custom_header: - custom_header = generate_header_param(custom_header, 'common') - should_fetch_gpt_report = config.get(FETCH_GPT_REPORT, DEFAULT_GET_GPT_REPORT) - proxy = get_random_proxy() - nuclei_specific_config = config.get('nuclei', {}) - use_nuclei_conf = nuclei_specific_config.get(USE_NUCLEI_CONFIG, False) - severities = nuclei_specific_config.get(NUCLEI_SEVERITY, NUCLEI_DEFAULT_SEVERITIES) - tags = nuclei_specific_config.get(NUCLEI_TAGS, []) - tags = ','.join(tags) - nuclei_templates = nuclei_specific_config.get(NUCLEI_TEMPLATE) - custom_nuclei_templates = nuclei_specific_config.get(NUCLEI_CUSTOM_TEMPLATE) - # severities_str = ','.join(severities) - - # Get alive endpoints - if urls and is_iterable(urls): - with open(input_path, 'w') as f: - f.write('\n'.join(urls)) - else: - get_http_urls( - is_alive=enable_http_crawl, - ignore_files=True, - write_filepath=input_path, - ctx=ctx - ) - - if intensity == 'normal': # reduce number of endpoints to scan - unfurl_filter = str(Path(self.results_dir) / 'urls_unfurled.txt') - run_command( - f"cat {input_path} | unfurl -u format %s://%d%p |uro > {unfurl_filter}", - shell=True, - history_file=self.history_file, - scan_id=self.scan_id, - activity_id=self.activity_id) - run_command( - f'sort -u {unfurl_filter} -o {unfurl_filter}', - shell=True, - history_file=self.history_file, - scan_id=self.scan_id, - activity_id=self.activity_id) - input_path = unfurl_filter - - # Build templates - # logger.info('Updating Nuclei templates ...') - run_command( - 'nuclei -update-templates', - shell=True, - history_file=self.history_file, - scan_id=self.scan_id, - activity_id=self.activity_id) - templates = [] - if not (nuclei_templates or custom_nuclei_templates): - templates.append(NUCLEI_DEFAULT_TEMPLATES_PATH) - - if nuclei_templates: - if ALL in nuclei_templates: - template = NUCLEI_DEFAULT_TEMPLATES_PATH - templates.append(template) - else: - templates.extend(nuclei_templates) - - if custom_nuclei_templates: - custom_nuclei_template_paths = [f'{str(elem)}.yaml' for elem in custom_nuclei_templates] - template = templates.extend(custom_nuclei_template_paths) - - # Build CMD - cmd = 'nuclei -j' - cmd += (' -config ' + str(Path.home() / '.config' / 'nuclei' / 'config.yaml')) if use_nuclei_conf else '' - cmd += f' -irr' - cmd += f' {custom_header}' if custom_header else '' - cmd += f' -l {input_path}' - cmd += f' -c {str(concurrency)}' if concurrency > 0 else '' - cmd += f' -proxy {proxy} ' if proxy else '' - cmd += f' -retries {retries}' if retries > 0 else '' - cmd += f' -rl {rate_limit}' if rate_limit > 0 else '' - # cmd += f' -severity {severities_str}' - cmd += f' -timeout {str(timeout)}' if timeout and timeout > 0 else '' - cmd += f' -tags {tags}' if tags else '' - cmd += f' -silent' - for tpl in templates: - cmd += f' -t {tpl}' - - - grouped_tasks = [] - custom_ctx = ctx - for severity in severities: - custom_ctx['track'] = True - _task = nuclei_individual_severity_module.si( - cmd, - severity, - enable_http_crawl, - should_fetch_gpt_report, - ctx=custom_ctx, - description=f'Nuclei Scan with severity {severity}' - ) - grouped_tasks.append(_task) - - celery_group = group(grouped_tasks) - job = celery_group.apply_async() - - while not job.ready(): - # wait for all jobs to complete - time.sleep(5) - - logger.info('Vulnerability scan with all severities completed...') - - return None + """HTTP vulnerability scan using Nuclei + + Args: + urls (list, optional): If passed, filter on those URLs. + description (str, optional): Task description shown in UI. + + Notes: + Unfurl the urls to keep only domain and path, will be sent to vuln scan and + ignore certain file extensions. Thanks: https://github.com/six2dez/reconftw + """ + # Config + config = self.yaml_configuration.get(VULNERABILITY_SCAN) or {} + input_path = str(Path(self.results_dir) / 'input_endpoints_vulnerability_scan.txt') + enable_http_crawl = config.get(ENABLE_HTTP_CRAWL, DEFAULT_ENABLE_HTTP_CRAWL) + concurrency = config.get(NUCLEI_CONCURRENCY) or self.yaml_configuration.get(THREADS, DEFAULT_THREADS) + intensity = config.get(INTENSITY) or self.yaml_configuration.get(INTENSITY, DEFAULT_SCAN_INTENSITY) + rate_limit = config.get(RATE_LIMIT) or self.yaml_configuration.get(RATE_LIMIT, DEFAULT_RATE_LIMIT) + retries = config.get(RETRIES) or self.yaml_configuration.get(RETRIES, DEFAULT_RETRIES) + timeout = config.get(TIMEOUT) or self.yaml_configuration.get(TIMEOUT, DEFAULT_HTTP_TIMEOUT) + custom_header = config.get(CUSTOM_HEADER) or self.yaml_configuration.get(CUSTOM_HEADER) + if custom_header: + custom_header = generate_header_param(custom_header, 'common') + should_fetch_gpt_report = config.get(FETCH_GPT_REPORT, DEFAULT_GET_GPT_REPORT) + proxy = get_random_proxy() + nuclei_specific_config = config.get('nuclei', {}) + use_nuclei_conf = nuclei_specific_config.get(USE_NUCLEI_CONFIG, False) + severities = nuclei_specific_config.get(NUCLEI_SEVERITY, NUCLEI_DEFAULT_SEVERITIES) + tags = nuclei_specific_config.get(NUCLEI_TAGS, []) + tags = ','.join(tags) + nuclei_templates = nuclei_specific_config.get(NUCLEI_TEMPLATE) + custom_nuclei_templates = nuclei_specific_config.get(NUCLEI_CUSTOM_TEMPLATE) + # severities_str = ','.join(severities) + + # Get alive endpoints + if urls and is_iterable(urls): + with open(input_path, 'w') as f: + f.write('\n'.join(urls)) + else: + get_http_urls( + is_alive=enable_http_crawl, + ignore_files=True, + write_filepath=input_path, + ctx=ctx + ) + + if intensity == 'normal': # reduce number of endpoints to scan + unfurl_filter = str(Path(self.results_dir) / 'urls_unfurled.txt') + run_command( + f"cat {input_path} | unfurl -u format %s://%d%p |uro > {unfurl_filter}", + shell=True, + history_file=self.history_file, + scan_id=self.scan_id, + activity_id=self.activity_id) + run_command( + f'sort -u {unfurl_filter} -o {unfurl_filter}', + shell=True, + history_file=self.history_file, + scan_id=self.scan_id, + activity_id=self.activity_id) + input_path = unfurl_filter + + # Build templates + # logger.info('Updating Nuclei templates ...') + run_command( + 'nuclei -update-templates', + shell=True, + history_file=self.history_file, + scan_id=self.scan_id, + activity_id=self.activity_id) + templates = [] + if not (nuclei_templates or custom_nuclei_templates): + templates.append(NUCLEI_DEFAULT_TEMPLATES_PATH) + + if nuclei_templates: + if ALL in nuclei_templates: + template = NUCLEI_DEFAULT_TEMPLATES_PATH + templates.append(template) + else: + templates.extend(nuclei_templates) + + if custom_nuclei_templates: + custom_nuclei_template_paths = [f'{str(elem)}.yaml' for elem in custom_nuclei_templates] + template = templates.extend(custom_nuclei_template_paths) + + # Build CMD + cmd = 'nuclei -j' + cmd += (' -config ' + str(Path.home() / '.config' / 'nuclei' / 'config.yaml')) if use_nuclei_conf else '' + cmd += f' -irr' + cmd += f' {custom_header}' if custom_header else '' + cmd += f' -l {input_path}' + cmd += f' -c {str(concurrency)}' if concurrency > 0 else '' + cmd += f' -proxy {proxy} ' if proxy else '' + cmd += f' -retries {retries}' if retries > 0 else '' + cmd += f' -rl {rate_limit}' if rate_limit > 0 else '' + # cmd += f' -severity {severities_str}' + cmd += f' -timeout {str(timeout)}' if timeout and timeout > 0 else '' + cmd += f' -tags {tags}' if tags else '' + cmd += f' -silent' + for tpl in templates: + cmd += f' -t {tpl}' + + + grouped_tasks = [] + custom_ctx = ctx + for severity in severities: + custom_ctx['track'] = True + _task = nuclei_individual_severity_module.si( + cmd, + severity, + enable_http_crawl, + should_fetch_gpt_report, + ctx=custom_ctx, + description=f'Nuclei Scan with severity {severity}' + ) + grouped_tasks.append(_task) + + celery_group = group(grouped_tasks) + job = celery_group.apply_async() + + while not job.ready(): + # wait for all jobs to complete + time.sleep(5) + + logger.info('Vulnerability scan with all severities completed...') + + return None @app.task(name='dalfox_xss_scan', queue='main_scan_queue', base=RengineTask, bind=True) def dalfox_xss_scan(self, urls=[], ctx={}, description=None): - """XSS Scan using dalfox - - Args: - urls (list, optional): If passed, filter on those URLs. - description (str, optional): Task description shown in UI. - """ - vuln_config = self.yaml_configuration.get(VULNERABILITY_SCAN) or {} - should_fetch_gpt_report = vuln_config.get(FETCH_GPT_REPORT, DEFAULT_GET_GPT_REPORT) - dalfox_config = vuln_config.get(DALFOX) or {} - custom_header = dalfox_config.get(CUSTOM_HEADER) or self.yaml_configuration.get(CUSTOM_HEADER) - if custom_header: - custom_header = generate_header_param(custom_header, 'dalfox') - proxy = get_random_proxy() - is_waf_evasion = dalfox_config.get(WAF_EVASION, False) - blind_xss_server = dalfox_config.get(BLIND_XSS_SERVER) - user_agent = dalfox_config.get(USER_AGENT) or self.yaml_configuration.get(USER_AGENT) - timeout = dalfox_config.get(TIMEOUT) - delay = dalfox_config.get(DELAY) - threads = dalfox_config.get(THREADS) or self.yaml_configuration.get(THREADS, DEFAULT_THREADS) - input_path = str(Path(self.results_dir) / 'input_endpoints_dalfox_xss.txt') - - if urls and is_iterable(urls): - with open(input_path, 'w') as f: - f.write('\n'.join(urls)) - else: - get_http_urls( - is_alive=False, - ignore_files=False, - write_filepath=input_path, - ctx=ctx - ) - - notif = Notification.objects.first() - send_status = notif.send_scan_status_notif if notif else False - - # command builder - cmd = 'dalfox --silence --no-color --no-spinner' - cmd += f' --only-poc r ' - cmd += f' --ignore-return 302,404,403' - cmd += f' --skip-bav' - cmd += f' file {input_path}' - cmd += f' --proxy {proxy}' if proxy else '' - cmd += f' --waf-evasion' if is_waf_evasion else '' - cmd += f' -b {blind_xss_server}' if blind_xss_server else '' - cmd += f' --delay {delay}' if delay else '' - cmd += f' --timeout {timeout}' if timeout else '' - cmd += f' --user-agent {user_agent}' if user_agent else '' - cmd += f' {custom_header}' if custom_header else '' - cmd += f' --worker {threads}' if threads else '' - cmd += f' --format json' - - results = [] - for line in stream_command( - cmd, - history_file=self.history_file, - scan_id=self.scan_id, - activity_id=self.activity_id, - trunc_char=',' - ): - if not isinstance(line, dict): - continue - - results.append(line) - - vuln_data = parse_dalfox_result(line) - - http_url = sanitize_url(line.get('data')) - subdomain_name = get_subdomain_from_url(http_url) - - try: - subdomain = Subdomain.objects.get( - name=subdomain_name, - scan_history=self.scan, - target_domain=self.domain - ) - except: - logger.warning(f'Subdomain {subdomain_name} was not found in the db, skipping dalfox scan for this subdomain.') - continue - - endpoint, _ = save_endpoint( - http_url, - crawl=True, - subdomain=subdomain, - ctx=ctx - ) - if endpoint: - http_url = endpoint.http_url - endpoint.save() - - vuln, _ = save_vulnerability( - target_domain=self.domain, - http_url=http_url, - scan_history=self.scan, - subscan=self.subscan, - **vuln_data - ) - - if not vuln: - continue - - # after vulnerability scan is done, we need to run gpt if - # should_fetch_gpt_report and openapi key exists - - if should_fetch_gpt_report and OpenAiAPIKey.objects.all().first(): - logger.info('Getting Dalfox Vulnerability GPT Report') - vulns = Vulnerability.objects.filter( - scan_history__id=self.scan_id - ).filter( - source=DALFOX - ).exclude( - severity=0 - ) - - _vulns = [] - for vuln in vulns: - _vulns.append((vuln.name, vuln.http_url)) - - with concurrent.futures.ThreadPoolExecutor(max_workers=DEFAULT_THREADS) as executor: - future_to_gpt = {executor.submit(get_vulnerability_gpt_report, vuln): vuln for vuln in _vulns} - - # Wait for all tasks to complete - for future in concurrent.futures.as_completed(future_to_gpt): - gpt = future_to_gpt[future] - try: - future.result() - except Exception as e: - logger.error(f"Exception for Vulnerability {vuln}: {e}") - return results + """XSS Scan using dalfox + + Args: + urls (list, optional): If passed, filter on those URLs. + description (str, optional): Task description shown in UI. + """ + vuln_config = self.yaml_configuration.get(VULNERABILITY_SCAN) or {} + should_fetch_gpt_report = vuln_config.get(FETCH_GPT_REPORT, DEFAULT_GET_GPT_REPORT) + dalfox_config = vuln_config.get(DALFOX) or {} + custom_header = dalfox_config.get(CUSTOM_HEADER) or self.yaml_configuration.get(CUSTOM_HEADER) + if custom_header: + custom_header = generate_header_param(custom_header, 'dalfox') + proxy = get_random_proxy() + is_waf_evasion = dalfox_config.get(WAF_EVASION, False) + blind_xss_server = dalfox_config.get(BLIND_XSS_SERVER) + user_agent = dalfox_config.get(USER_AGENT) or self.yaml_configuration.get(USER_AGENT) + timeout = dalfox_config.get(TIMEOUT) + delay = dalfox_config.get(DELAY) + threads = dalfox_config.get(THREADS) or self.yaml_configuration.get(THREADS, DEFAULT_THREADS) + input_path = str(Path(self.results_dir) / 'input_endpoints_dalfox_xss.txt') + + if urls and is_iterable(urls): + with open(input_path, 'w') as f: + f.write('\n'.join(urls)) + else: + get_http_urls( + is_alive=False, + ignore_files=False, + write_filepath=input_path, + ctx=ctx + ) + + notif = Notification.objects.first() + send_status = notif.send_scan_status_notif if notif else False + + # command builder + cmd = 'dalfox --silence --no-color --no-spinner' + cmd += f' --only-poc r ' + cmd += f' --ignore-return 302,404,403' + cmd += f' --skip-bav' + cmd += f' file {input_path}' + cmd += f' --proxy {proxy}' if proxy else '' + cmd += f' --waf-evasion' if is_waf_evasion else '' + cmd += f' -b {blind_xss_server}' if blind_xss_server else '' + cmd += f' --delay {delay}' if delay else '' + cmd += f' --timeout {timeout}' if timeout else '' + cmd += f' --user-agent {user_agent}' if user_agent else '' + cmd += f' {custom_header}' if custom_header else '' + cmd += f' --worker {threads}' if threads else '' + cmd += f' --format json' + + results = [] + for line in stream_command( + cmd, + history_file=self.history_file, + scan_id=self.scan_id, + activity_id=self.activity_id, + trunc_char=',' + ): + if not isinstance(line, dict): + continue + + results.append(line) + + vuln_data = parse_dalfox_result(line) + + http_url = sanitize_url(line.get('data')) + subdomain_name = get_subdomain_from_url(http_url) + + try: + subdomain = Subdomain.objects.get( + name=subdomain_name, + scan_history=self.scan, + target_domain=self.domain + ) + except: + logger.warning(f'Subdomain {subdomain_name} was not found in the db, skipping dalfox scan for this subdomain.') + continue + + endpoint, _ = save_endpoint( + http_url, + crawl=True, + subdomain=subdomain, + ctx=ctx + ) + if endpoint: + http_url = endpoint.http_url + endpoint.save() + + vuln, _ = save_vulnerability( + target_domain=self.domain, + http_url=http_url, + scan_history=self.scan, + subscan=self.subscan, + **vuln_data + ) + + if not vuln: + continue + + # after vulnerability scan is done, we need to run gpt if + # should_fetch_gpt_report and openapi key exists + + if should_fetch_gpt_report and OpenAiAPIKey.objects.all().first(): + logger.info('Getting Dalfox Vulnerability GPT Report') + vulns = Vulnerability.objects.filter( + scan_history__id=self.scan_id + ).filter( + source=DALFOX + ).exclude( + severity=0 + ) + + _vulns = [] + for vuln in vulns: + _vulns.append((vuln.name, vuln.http_url)) + + with concurrent.futures.ThreadPoolExecutor(max_workers=DEFAULT_THREADS) as executor: + future_to_gpt = {executor.submit(get_vulnerability_gpt_report, vuln): vuln for vuln in _vulns} + + # Wait for all tasks to complete + for future in concurrent.futures.as_completed(future_to_gpt): + gpt = future_to_gpt[future] + try: + future.result() + except Exception as e: + logger.error(f"Exception for Vulnerability {vuln}: {e}") + return results @app.task(name='crlfuzz_scan', queue='main_scan_queue', base=RengineTask, bind=True) def crlfuzz_scan(self, urls=[], ctx={}, description=None): - """CRLF Fuzzing with CRLFuzz - - Args: - urls (list, optional): If passed, filter on those URLs. - description (str, optional): Task description shown in UI. - """ - vuln_config = self.yaml_configuration.get(VULNERABILITY_SCAN) or {} - should_fetch_gpt_report = vuln_config.get(FETCH_GPT_REPORT, DEFAULT_GET_GPT_REPORT) - custom_header = vuln_config.get(CUSTOM_HEADER) or self.yaml_configuration.get(CUSTOM_HEADER) - if custom_header: - custom_header = generate_header_param(custom_header, 'common') - proxy = get_random_proxy() - user_agent = vuln_config.get(USER_AGENT) or self.yaml_configuration.get(USER_AGENT) - threads = vuln_config.get(THREADS) or self.yaml_configuration.get(THREADS, DEFAULT_THREADS) - input_path = str(Path(self.results_dir) / 'input_endpoints_crlf.txt') - output_path = str(Path(self.results_dir) / f'{self.filename}') - - if urls and is_iterable(urls): - with open(input_path, 'w') as f: - f.write('\n'.join(urls)) - else: - get_http_urls( - is_alive=False, - ignore_files=True, - write_filepath=input_path, - ctx=ctx - ) - - notif = Notification.objects.first() - send_status = notif.send_scan_status_notif if notif else False - - # command builder - cmd = 'crlfuzz -s' - cmd += f' -l {input_path}' - cmd += f' -x {proxy}' if proxy else '' - cmd += f' {custom_header}' if custom_header else '' - cmd += f' -o {output_path}' - - run_command( - cmd, - shell=False, - history_file=self.history_file, - scan_id=self.scan_id, - activity_id=self.activity_id - ) - - if not os.path.isfile(output_path): - logger.info('No Results from CRLFuzz') - return - - crlfs = [] - results = [] - with open(output_path, 'r') as file: - crlfs = file.readlines() - - for crlf in crlfs: - url = crlf.strip() - - vuln_data = parse_crlfuzz_result(url) - - http_url = sanitize_url(url) - subdomain_name = get_subdomain_from_url(http_url) - - try: - subdomain = Subdomain.objects.get( - name=subdomain_name, - scan_history=self.scan, - target_domain=self.domain - ) - except: - logger.warning(f'Subdomain {subdomain_name} was not found in the db, skipping crlfuzz scan for this subdomain.') - continue - - endpoint, _ = save_endpoint( - http_url, - crawl=True, - subdomain=subdomain, - ctx=ctx - ) - if endpoint: - http_url = endpoint.http_url - endpoint.save() - - vuln, _ = save_vulnerability( - target_domain=self.domain, - http_url=http_url, - scan_history=self.scan, - subscan=self.subscan, - **vuln_data - ) - - if not vuln: - continue - - # after vulnerability scan is done, we need to run gpt if - # should_fetch_gpt_report and openapi key exists - - if should_fetch_gpt_report and OpenAiAPIKey.objects.all().first(): - logger.info('Getting CRLFuzz Vulnerability GPT Report') - vulns = Vulnerability.objects.filter( - scan_history__id=self.scan_id - ).filter( - source=CRLFUZZ - ).exclude( - severity=0 - ) - - _vulns = [] - for vuln in vulns: - _vulns.append((vuln.name, vuln.http_url)) - - with concurrent.futures.ThreadPoolExecutor(max_workers=DEFAULT_THREADS) as executor: - future_to_gpt = {executor.submit(get_vulnerability_gpt_report, vuln): vuln for vuln in _vulns} - - # Wait for all tasks to complete - for future in concurrent.futures.as_completed(future_to_gpt): - gpt = future_to_gpt[future] - try: - future.result() - except Exception as e: - logger.error(f"Exception for Vulnerability {vuln}: {e}") - - return results + """CRLF Fuzzing with CRLFuzz + + Args: + urls (list, optional): If passed, filter on those URLs. + description (str, optional): Task description shown in UI. + """ + vuln_config = self.yaml_configuration.get(VULNERABILITY_SCAN) or {} + should_fetch_gpt_report = vuln_config.get(FETCH_GPT_REPORT, DEFAULT_GET_GPT_REPORT) + custom_header = vuln_config.get(CUSTOM_HEADER) or self.yaml_configuration.get(CUSTOM_HEADER) + if custom_header: + custom_header = generate_header_param(custom_header, 'common') + proxy = get_random_proxy() + user_agent = vuln_config.get(USER_AGENT) or self.yaml_configuration.get(USER_AGENT) + threads = vuln_config.get(THREADS) or self.yaml_configuration.get(THREADS, DEFAULT_THREADS) + input_path = str(Path(self.results_dir) / 'input_endpoints_crlf.txt') + output_path = str(Path(self.results_dir) / f'{self.filename}') + + if urls and is_iterable(urls): + with open(input_path, 'w') as f: + f.write('\n'.join(urls)) + else: + get_http_urls( + is_alive=False, + ignore_files=True, + write_filepath=input_path, + ctx=ctx + ) + + notif = Notification.objects.first() + send_status = notif.send_scan_status_notif if notif else False + + # command builder + cmd = 'crlfuzz -s' + cmd += f' -l {input_path}' + cmd += f' -x {proxy}' if proxy else '' + cmd += f' {custom_header}' if custom_header else '' + cmd += f' -o {output_path}' + + run_command( + cmd, + shell=False, + history_file=self.history_file, + scan_id=self.scan_id, + activity_id=self.activity_id + ) + + if not os.path.isfile(output_path): + logger.info('No Results from CRLFuzz') + return + + crlfs = [] + results = [] + with open(output_path, 'r') as file: + crlfs = file.readlines() + + for crlf in crlfs: + url = crlf.strip() + + vuln_data = parse_crlfuzz_result(url) + + http_url = sanitize_url(url) + subdomain_name = get_subdomain_from_url(http_url) + + try: + subdomain = Subdomain.objects.get( + name=subdomain_name, + scan_history=self.scan, + target_domain=self.domain + ) + except: + logger.warning(f'Subdomain {subdomain_name} was not found in the db, skipping crlfuzz scan for this subdomain.') + continue + + endpoint, _ = save_endpoint( + http_url, + crawl=True, + subdomain=subdomain, + ctx=ctx + ) + if endpoint: + http_url = endpoint.http_url + endpoint.save() + + vuln, _ = save_vulnerability( + target_domain=self.domain, + http_url=http_url, + scan_history=self.scan, + subscan=self.subscan, + **vuln_data + ) + + if not vuln: + continue + + # after vulnerability scan is done, we need to run gpt if + # should_fetch_gpt_report and openapi key exists + + if should_fetch_gpt_report and OpenAiAPIKey.objects.all().first(): + logger.info('Getting CRLFuzz Vulnerability GPT Report') + vulns = Vulnerability.objects.filter( + scan_history__id=self.scan_id + ).filter( + source=CRLFUZZ + ).exclude( + severity=0 + ) + + _vulns = [] + for vuln in vulns: + _vulns.append((vuln.name, vuln.http_url)) + + with concurrent.futures.ThreadPoolExecutor(max_workers=DEFAULT_THREADS) as executor: + future_to_gpt = {executor.submit(get_vulnerability_gpt_report, vuln): vuln for vuln in _vulns} + + # Wait for all tasks to complete + for future in concurrent.futures.as_completed(future_to_gpt): + gpt = future_to_gpt[future] + try: + future.result() + except Exception as e: + logger.error(f"Exception for Vulnerability {vuln}: {e}") + + return results @app.task(name='s3scanner', queue='main_scan_queue', base=RengineTask, bind=True) def s3scanner(self, ctx={}, description=None): - """Bucket Scanner - - Args: - ctx (dict): Context - description (str, optional): Task description shown in UI. - """ - input_path = str(Path(self.results_dir) / f'#{self.scan_id}_subdomain_discovery.txt') - vuln_config = self.yaml_configuration.get(VULNERABILITY_SCAN) or {} - s3_config = vuln_config.get(S3SCANNER) or {} - threads = s3_config.get(THREADS) or self.yaml_configuration.get(THREADS, DEFAULT_THREADS) - providers = s3_config.get(PROVIDERS, S3SCANNER_DEFAULT_PROVIDERS) - scan_history = ScanHistory.objects.filter(pk=self.scan_id).first() - for provider in providers: - cmd = f's3scanner -bucket-file {input_path} -enumerate -provider {provider} -threads {threads} -json' - for line in stream_command( - cmd, - history_file=self.history_file, - scan_id=self.scan_id, - activity_id=self.activity_id): - - if not isinstance(line, dict): - continue - - if line.get('bucket', {}).get('exists', 0) == 1: - result = parse_s3scanner_result(line) - s3bucket, created = S3Bucket.objects.get_or_create(**result) - scan_history.buckets.add(s3bucket) - logger.info(f"s3 bucket added {result['provider']}-{result['name']}-{result['region']}") + """Bucket Scanner + + Args: + ctx (dict): Context + description (str, optional): Task description shown in UI. + """ + input_path = str(Path(self.results_dir) / f'#{self.scan_id}_subdomain_discovery.txt') + vuln_config = self.yaml_configuration.get(VULNERABILITY_SCAN) or {} + s3_config = vuln_config.get(S3SCANNER) or {} + threads = s3_config.get(THREADS) or self.yaml_configuration.get(THREADS, DEFAULT_THREADS) + providers = s3_config.get(PROVIDERS, S3SCANNER_DEFAULT_PROVIDERS) + scan_history = ScanHistory.objects.filter(pk=self.scan_id).first() + for provider in providers: + cmd = f's3scanner -bucket-file {input_path} -enumerate -provider {provider} -threads {threads} -json' + for line in stream_command( + cmd, + history_file=self.history_file, + scan_id=self.scan_id, + activity_id=self.activity_id): + + if not isinstance(line, dict): + continue + + if line.get('bucket', {}).get('exists', 0) == 1: + result = parse_s3scanner_result(line) + s3bucket, created = S3Bucket.objects.get_or_create(**result) + scan_history.buckets.add(s3bucket) + logger.info(f"s3 bucket added {result['provider']}-{result['name']}-{result['region']}") @app.task(name='http_crawl', queue='main_scan_queue', base=RengineTask, bind=True) def http_crawl( - self, - urls=[], - method=None, - recrawl=False, - ctx={}, - track=True, - description=None, - update_subdomain_metadatas=False, - should_remove_duplicate_endpoints=True, - duplicate_removal_fields=[]): - """Use httpx to query HTTP URLs for important info like page titles, http - status, etc... - - Args: - urls (list, optional): A set of URLs to check. Overrides default - behavior which queries all endpoints related to this scan. - method (str): HTTP method to use (GET, HEAD, POST, PUT, DELETE). - recrawl (bool, optional): If False, filter out URLs that have already - been crawled. - should_remove_duplicate_endpoints (bool): Whether to remove duplicate endpoints - duplicate_removal_fields (list): List of Endpoint model fields to check for duplicates - - Returns: - list: httpx results. - """ - logger.info('Initiating HTTP Crawl') - cmd = 'httpx' - config = self.yaml_configuration.get(HTTP_CRAWL) or {} - custom_header = config.get(CUSTOM_HEADER) or self.yaml_configuration.get(CUSTOM_HEADER) - if custom_header: - custom_header = generate_header_param(custom_header, 'common') - threads = config.get(THREADS, DEFAULT_THREADS) - follow_redirect = config.get(FOLLOW_REDIRECT, False) - self.output_path = None - input_path = f'{self.results_dir}/httpx_input.txt' - history_file = f'{self.results_dir}/commands.txt' - if urls and is_iterable(urls): # direct passing URLs to check - if self.url_filter: - urls = [u for u in urls if self.url_filter in u] - with open(input_path, 'w') as f: - f.write('\n'.join(urls)) - else: - # No url provided, so it's a subscan launched from subdomain list - update_subdomain_metadatas = True - - # Append the base subdomain to get subdomain info if task is launched directly from subscan - subdomain_id = ctx.get('subdomain_id') - if subdomain_id: - subdomain = Subdomain.objects.filter(id=ctx.get('subdomain_id')).first() - urls.append(subdomain.name) - - # Get subdomain endpoints to crawl the entire list - http_urls = get_http_urls( - is_uncrawled=not recrawl, - write_filepath=input_path, - ctx=ctx - ) - - # Append endpoints - if http_urls: - urls.append() - - logger.debug(urls) - - # If no URLs found, skip it - if not urls: - return - - # Re-adjust thread number if few URLs to avoid spinning up a monster to - # kill a fly. - if len(urls) < threads: - threads = len(urls) - - # Get random proxy - proxy = get_random_proxy() - - # Run command - cmd += f' -cl -ct -rt -location -td -websocket -cname -asn -cdn -probe -random-agent' - cmd += f' -t {threads}' if threads > 0 else '' - cmd += f' --http-proxy {proxy}' if proxy else '' - cmd += f' {custom_header}' if custom_header else '' - cmd += f' -json' - cmd += f' -u {urls[0]}' if len(urls) == 1 else f' -l {input_path}' - cmd += f' -x {method}' if method else '' - cmd += f' -silent' - if follow_redirect: - cmd += ' -fr' - results = [] - endpoint_ids = [] - for line in stream_command( - cmd, - history_file=history_file, - scan_id=self.scan_id, - activity_id=self.activity_id): - - if not line or not isinstance(line, dict): - continue - - # Check if the http request has an error - if 'error' in line: - logger.error(line) - continue - - logger.debug(line) - - # No response from endpoint - if line.get('failed', False): - continue - - # Parse httpx output - host = line.get('host', '') - content_length = line.get('content_length', 0) - http_status = line.get('status_code') - http_url, is_redirect = extract_httpx_url(line, follow_redirect) - page_title = line.get('title') - webserver = line.get('webserver') - cdn = line.get('cdn', False) - rt = line.get('time') - techs = line.get('tech', []) - cname = line.get('cname', '') - content_type = line.get('content_type', '') - response_time = -1 - if rt: - response_time = float(''.join(ch for ch in rt if not ch.isalpha())) - if rt[-2:] == 'ms': - response_time = response_time / 1000 - - # Create/get Subdomain object in DB - subdomain_name = get_subdomain_from_url(http_url) - subdomain, _ = save_subdomain(subdomain_name, ctx=ctx) - if not isinstance(subdomain, Subdomain): - logger.error(f"Invalid subdomain encountered: {subdomain}") - continue - - # Save default HTTP URL to endpoint object in DB - endpoint, created = save_endpoint( - http_url, - crawl=False, - ctx=ctx, - subdomain=subdomain, - is_default=update_subdomain_metadatas - ) - if not endpoint: - continue - endpoint.http_status = http_status - endpoint.page_title = page_title - endpoint.content_length = content_length - endpoint.webserver = webserver - endpoint.response_time = response_time - endpoint.content_type = content_type - endpoint.save() - endpoint_str = f'{http_url} [{http_status}] `{content_length}B` `{webserver}` `{rt}`' - logger.warning(endpoint_str) - if endpoint and endpoint.is_alive and endpoint.http_status != 403: - self.notify( - fields={'Alive endpoint': f'• {endpoint_str}'}, - add_meta_info=False) - - # Add endpoint to results - line['_cmd'] = cmd - line['final_url'] = http_url - line['endpoint_id'] = endpoint.id - line['endpoint_created'] = created - line['is_redirect'] = is_redirect - results.append(line) - - # Add technology objects to DB - for technology in techs: - tech, _ = Technology.objects.get_or_create(name=technology) - endpoint.techs.add(tech) - endpoint.save() - techs_str = ', '.join([f'`{tech}`' for tech in techs]) - self.notify( - fields={'Technologies': techs_str}, - add_meta_info=False) - - # Add IP objects for 'a' records to DB - a_records = line.get('a', []) - for ip_address in a_records: - ip, created = save_ip_address( - ip_address, - subdomain, - subscan=self.subscan, - cdn=cdn) - ips_str = '• ' + '\n• '.join([f'`{ip}`' for ip in a_records]) - self.notify( - fields={'IPs': ips_str}, - add_meta_info=False) - - # Add IP object for host in DB - if host: - ip, created = save_ip_address( - host, - subdomain, - subscan=self.subscan, - cdn=cdn) - self.notify( - fields={'IPs': f'• `{ip.address}`'}, - add_meta_info=False) - - # Save subdomain metadatas - if update_subdomain_metadatas: - save_subdomain_metadata(subdomain, endpoint, line) - - endpoint_ids.append(endpoint.id) - - if should_remove_duplicate_endpoints: - # Remove 'fake' alive endpoints that are just redirects to the same page - remove_duplicate_endpoints( - self.scan_id, - self.domain_id, - self.subdomain_id, - filter_ids=endpoint_ids - ) - - # Remove input file - run_command( - f'rm {input_path}', - shell=True, - history_file=self.history_file, - scan_id=self.scan_id, - activity_id=self.activity_id) - - return results + self, + urls=[], + method=None, + recrawl=False, + ctx={}, + track=True, + description=None, + update_subdomain_metadatas=False, + should_remove_duplicate_endpoints=True, + duplicate_removal_fields=[]): + """Use httpx to query HTTP URLs for important info like page titles, http + status, etc... + + Args: + urls (list, optional): A set of URLs to check. Overrides default + behavior which queries all endpoints related to this scan. + method (str): HTTP method to use (GET, HEAD, POST, PUT, DELETE). + recrawl (bool, optional): If False, filter out URLs that have already + been crawled. + should_remove_duplicate_endpoints (bool): Whether to remove duplicate endpoints + duplicate_removal_fields (list): List of Endpoint model fields to check for duplicates + + Returns: + list: httpx results. + """ + logger.info('Initiating HTTP Crawl') + cmd = 'httpx' + config = self.yaml_configuration.get(HTTP_CRAWL) or {} + custom_header = config.get(CUSTOM_HEADER) or self.yaml_configuration.get(CUSTOM_HEADER) + if custom_header: + custom_header = generate_header_param(custom_header, 'common') + threads = config.get(THREADS, DEFAULT_THREADS) + follow_redirect = config.get(FOLLOW_REDIRECT, False) + self.output_path = None + input_path = f'{self.results_dir}/httpx_input.txt' + history_file = f'{self.results_dir}/commands.txt' + if urls and is_iterable(urls): # direct passing URLs to check + if self.url_filter: + urls = [u for u in urls if self.url_filter in u] + with open(input_path, 'w') as f: + f.write('\n'.join(urls)) + else: + # No url provided, so it's a subscan launched from subdomain list + update_subdomain_metadatas = True + + # Append the base subdomain to get subdomain info if task is launched directly from subscan + subdomain_id = ctx.get('subdomain_id') + if subdomain_id: + subdomain = Subdomain.objects.filter(id=ctx.get('subdomain_id')).first() + urls.append(subdomain.name) + + # Get subdomain endpoints to crawl the entire list + http_urls = get_http_urls( + is_uncrawled=not recrawl, + write_filepath=input_path, + ctx=ctx + ) + + # Append endpoints + if http_urls: + urls.append() + + logger.debug(urls) + + # If no URLs found, skip it + if not urls: + return + + # Re-adjust thread number if few URLs to avoid spinning up a monster to + # kill a fly. + if len(urls) < threads: + threads = len(urls) + + # Get random proxy + proxy = get_random_proxy() + + # Run command + cmd += f' -cl -ct -rt -location -td -websocket -cname -asn -cdn -probe -random-agent' + cmd += f' -t {threads}' if threads > 0 else '' + cmd += f' --http-proxy {proxy}' if proxy else '' + cmd += f' {custom_header}' if custom_header else '' + cmd += f' -json' + cmd += f' -u {urls[0]}' if len(urls) == 1 else f' -l {input_path}' + cmd += f' -x {method}' if method else '' + cmd += f' -silent' + if follow_redirect: + cmd += ' -fr' + results = [] + endpoint_ids = [] + for line in stream_command( + cmd, + history_file=history_file, + scan_id=self.scan_id, + activity_id=self.activity_id): + + if not line or not isinstance(line, dict): + continue + + # Check if the http request has an error + if 'error' in line: + logger.error(line) + continue + + logger.debug(line) + + # No response from endpoint + if line.get('failed', False): + continue + + # Parse httpx output + host = line.get('host', '') + content_length = line.get('content_length', 0) + http_status = line.get('status_code') + http_url, is_redirect = extract_httpx_url(line, follow_redirect) + page_title = line.get('title') + webserver = line.get('webserver') + cdn = line.get('cdn', False) + rt = line.get('time') + techs = line.get('tech', []) + cname = line.get('cname', '') + content_type = line.get('content_type', '') + response_time = -1 + if rt: + response_time = float(''.join(ch for ch in rt if not ch.isalpha())) + if rt[-2:] == 'ms': + response_time = response_time / 1000 + + # Create/get Subdomain object in DB + subdomain_name = get_subdomain_from_url(http_url) + subdomain, _ = save_subdomain(subdomain_name, ctx=ctx) + if not isinstance(subdomain, Subdomain): + logger.error(f"Invalid subdomain encountered: {subdomain}") + continue + + # Save default HTTP URL to endpoint object in DB + endpoint, created = save_endpoint( + http_url, + crawl=False, + ctx=ctx, + subdomain=subdomain, + is_default=update_subdomain_metadatas + ) + if not endpoint: + continue + endpoint.http_status = http_status + endpoint.page_title = page_title + endpoint.content_length = content_length + endpoint.webserver = webserver + endpoint.response_time = response_time + endpoint.content_type = content_type + endpoint.save() + endpoint_str = f'{http_url} [{http_status}] `{content_length}B` `{webserver}` `{rt}`' + logger.warning(endpoint_str) + if endpoint and endpoint.is_alive and endpoint.http_status != 403: + self.notify( + fields={'Alive endpoint': f'• {endpoint_str}'}, + add_meta_info=False) + + # Add endpoint to results + line['_cmd'] = cmd + line['final_url'] = http_url + line['endpoint_id'] = endpoint.id + line['endpoint_created'] = created + line['is_redirect'] = is_redirect + results.append(line) + + # Add technology objects to DB + for technology in techs: + tech, _ = Technology.objects.get_or_create(name=technology) + endpoint.techs.add(tech) + endpoint.save() + techs_str = ', '.join([f'`{tech}`' for tech in techs]) + self.notify( + fields={'Technologies': techs_str}, + add_meta_info=False) + + # Add IP objects for 'a' records to DB + a_records = line.get('a', []) + for ip_address in a_records: + ip, created = save_ip_address( + ip_address, + subdomain, + subscan=self.subscan, + cdn=cdn) + ips_str = '• ' + '\n• '.join([f'`{ip}`' for ip in a_records]) + self.notify( + fields={'IPs': ips_str}, + add_meta_info=False) + + # Add IP object for host in DB + if host: + ip, created = save_ip_address( + host, + subdomain, + subscan=self.subscan, + cdn=cdn) + self.notify( + fields={'IPs': f'• `{ip.address}`'}, + add_meta_info=False) + + # Save subdomain metadatas + if update_subdomain_metadatas: + save_subdomain_metadata(subdomain, endpoint, line) + + endpoint_ids.append(endpoint.id) + + if should_remove_duplicate_endpoints: + # Remove 'fake' alive endpoints that are just redirects to the same page + remove_duplicate_endpoints( + self.scan_id, + self.domain_id, + self.subdomain_id, + filter_ids=endpoint_ids + ) + + # Remove input file + run_command( + f'rm {input_path}', + shell=True, + history_file=self.history_file, + scan_id=self.scan_id, + activity_id=self.activity_id) + + return results #---------------------# @@ -2988,245 +2989,245 @@ def http_crawl( @app.task(name='send_notif', bind=False, queue='send_notif_queue') def send_notif( - message, - scan_history_id=None, - subscan_id=None, - **options): - if not 'title' in options: - message = enrich_notification(message, scan_history_id, subscan_id) - send_discord_message(message, **options) - send_slack_message(message) - send_lark_message(message) - send_telegram_message(message) + message, + scan_history_id=None, + subscan_id=None, + **options): + if not 'title' in options: + message = enrich_notification(message, scan_history_id, subscan_id) + send_discord_message(message, **options) + send_slack_message(message) + send_lark_message(message) + send_telegram_message(message) @app.task(name='send_scan_notif', bind=False, queue='send_scan_notif_queue') def send_scan_notif( - scan_history_id, - subscan_id=None, - engine_id=None, - status='RUNNING'): - """Send scan status notification. Works for scan or a subscan if subscan_id - is passed. - - Args: - scan_history_id (int, optional): ScanHistory id. - subscan_id (int, optional): SuScan id. - engine_id (int, optional): EngineType id. - """ - - # Skip send if notification settings are not configured - notif = Notification.objects.first() - if not (notif and notif.send_scan_status_notif): - return - - # Get domain, engine, scan_history objects - engine = EngineType.objects.filter(pk=engine_id).first() - scan = ScanHistory.objects.filter(pk=scan_history_id).first() - subscan = SubScan.objects.filter(pk=subscan_id).first() - tasks = ScanActivity.objects.filter(scan_of=scan) if scan else 0 - - # Build notif options - url = get_scan_url(scan_history_id, subscan_id) - title = get_scan_title(scan_history_id, subscan_id) - fields = get_scan_fields(engine, scan, subscan, status, tasks) - severity = None - msg = f'{title} {status}\n' - msg += '\n🡆 '.join(f'**{k}:** {v}' for k, v in fields.items()) - if status: - severity = STATUS_TO_SEVERITIES.get(status) - opts = { - 'title': title, - 'url': url, - 'fields': fields, - 'severity': severity - } - logger.warning(f'Sending notification "{title}" [{severity}]') - - # Send notification - send_notif( - msg, - scan_history_id, - subscan_id, - **opts) + scan_history_id, + subscan_id=None, + engine_id=None, + status='RUNNING'): + """Send scan status notification. Works for scan or a subscan if subscan_id + is passed. + + Args: + scan_history_id (int, optional): ScanHistory id. + subscan_id (int, optional): SuScan id. + engine_id (int, optional): EngineType id. + """ + + # Skip send if notification settings are not configured + notif = Notification.objects.first() + if not (notif and notif.send_scan_status_notif): + return + + # Get domain, engine, scan_history objects + engine = EngineType.objects.filter(pk=engine_id).first() + scan = ScanHistory.objects.filter(pk=scan_history_id).first() + subscan = SubScan.objects.filter(pk=subscan_id).first() + tasks = ScanActivity.objects.filter(scan_of=scan) if scan else 0 + + # Build notif options + url = get_scan_url(scan_history_id, subscan_id) + title = get_scan_title(scan_history_id, subscan_id) + fields = get_scan_fields(engine, scan, subscan, status, tasks) + severity = None + msg = f'{title} {status}\n' + msg += '\n🡆 '.join(f'**{k}:** {v}' for k, v in fields.items()) + if status: + severity = STATUS_TO_SEVERITIES.get(status) + opts = { + 'title': title, + 'url': url, + 'fields': fields, + 'severity': severity + } + logger.warning(f'Sending notification "{title}" [{severity}]') + + # Send notification + send_notif( + msg, + scan_history_id, + subscan_id, + **opts) @app.task(name='send_task_notif', bind=False, queue='send_task_notif_queue') def send_task_notif( - task_name, - status=None, - result=None, - output_path=None, - traceback=None, - scan_history_id=None, - engine_id=None, - subscan_id=None, - severity=None, - add_meta_info=True, - update_fields={}): - """Send task status notification. - - Args: - task_name (str): Task name. - status (str, optional): Task status. - result (str, optional): Task result. - output_path (str, optional): Task output path. - traceback (str, optional): Task traceback. - scan_history_id (int, optional): ScanHistory id. - subscan_id (int, optional): SuScan id. - engine_id (int, optional): EngineType id. - severity (str, optional): Severity (will be mapped to notif colors) - add_meta_info (bool, optional): Wheter to add scan / subscan info to notif. - update_fields (dict, optional): Fields key / value to update. - """ - - # Skip send if notification settings are not configured - notif = Notification.objects.first() - if not (notif and notif.send_scan_status_notif): - return - - # Build fields - url = None - fields = {} - if add_meta_info: - engine = EngineType.objects.filter(pk=engine_id).first() - scan = ScanHistory.objects.filter(pk=scan_history_id).first() - subscan = SubScan.objects.filter(pk=subscan_id).first() - url = get_scan_url(scan_history_id) - if status: - fields['Status'] = f'**{status}**' - if engine: - fields['Engine'] = engine.engine_name - if scan: - fields['Scan ID'] = f'[#{scan.id}]({url})' - if subscan: - url = get_scan_url(scan_history_id, subscan_id) - fields['Subscan ID'] = f'[#{subscan.id}]({url})' - title = get_task_title(task_name, scan_history_id, subscan_id) - if status: - severity = STATUS_TO_SEVERITIES.get(status) - - msg = f'{title} {status}\n' - msg += '\n🡆 '.join(f'**{k}:** {v}' for k, v in fields.items()) - - # Add fields to update - for k, v in update_fields.items(): - fields[k] = v - - # Add traceback to notif - if traceback and notif.send_scan_tracebacks: - fields['Traceback'] = f'```\n{traceback}\n```' - - # Add files to notif - files = [] - attach_file = ( - notif.send_scan_output_file and - output_path and - result and - not traceback - ) - if attach_file: - output_title = output_path.split('/')[-1] - files = [(output_path, output_title)] - - # Send notif - opts = { - 'title': title, - 'url': url, - 'files': files, - 'severity': severity, - 'fields': fields, - 'fields_append': update_fields.keys() - } - send_notif( - msg, - scan_history_id=scan_history_id, - subscan_id=subscan_id, - **opts) + task_name, + status=None, + result=None, + output_path=None, + traceback=None, + scan_history_id=None, + engine_id=None, + subscan_id=None, + severity=None, + add_meta_info=True, + update_fields={}): + """Send task status notification. + + Args: + task_name (str): Task name. + status (str, optional): Task status. + result (str, optional): Task result. + output_path (str, optional): Task output path. + traceback (str, optional): Task traceback. + scan_history_id (int, optional): ScanHistory id. + subscan_id (int, optional): SuScan id. + engine_id (int, optional): EngineType id. + severity (str, optional): Severity (will be mapped to notif colors) + add_meta_info (bool, optional): Wheter to add scan / subscan info to notif. + update_fields (dict, optional): Fields key / value to update. + """ + + # Skip send if notification settings are not configured + notif = Notification.objects.first() + if not (notif and notif.send_scan_status_notif): + return + + # Build fields + url = None + fields = {} + if add_meta_info: + engine = EngineType.objects.filter(pk=engine_id).first() + scan = ScanHistory.objects.filter(pk=scan_history_id).first() + subscan = SubScan.objects.filter(pk=subscan_id).first() + url = get_scan_url(scan_history_id) + if status: + fields['Status'] = f'**{status}**' + if engine: + fields['Engine'] = engine.engine_name + if scan: + fields['Scan ID'] = f'[#{scan.id}]({url})' + if subscan: + url = get_scan_url(scan_history_id, subscan_id) + fields['Subscan ID'] = f'[#{subscan.id}]({url})' + title = get_task_title(task_name, scan_history_id, subscan_id) + if status: + severity = STATUS_TO_SEVERITIES.get(status) + + msg = f'{title} {status}\n' + msg += '\n🡆 '.join(f'**{k}:** {v}' for k, v in fields.items()) + + # Add fields to update + for k, v in update_fields.items(): + fields[k] = v + + # Add traceback to notif + if traceback and notif.send_scan_tracebacks: + fields['Traceback'] = f'```\n{traceback}\n```' + + # Add files to notif + files = [] + attach_file = ( + notif.send_scan_output_file and + output_path and + result and + not traceback + ) + if attach_file: + output_title = output_path.split('/')[-1] + files = [(output_path, output_title)] + + # Send notif + opts = { + 'title': title, + 'url': url, + 'files': files, + 'severity': severity, + 'fields': fields, + 'fields_append': update_fields.keys() + } + send_notif( + msg, + scan_history_id=scan_history_id, + subscan_id=subscan_id, + **opts) @app.task(name='send_file_to_discord', bind=False, queue='send_file_to_discord_queue') def send_file_to_discord(file_path, title=None): - notif = Notification.objects.first() - do_send = notif and notif.send_to_discord and notif.discord_hook_url - if not do_send: - return False - - webhook = DiscordWebhook( - url=notif.discord_hook_url, - rate_limit_retry=True, - username=title or "reNgine Discord Plugin" - ) - with open(file_path, "rb") as f: - head, tail = os.path.split(file_path) - webhook.add_file(file=f.read(), filename=tail) - webhook.execute() + notif = Notification.objects.first() + do_send = notif and notif.send_to_discord and notif.discord_hook_url + if not do_send: + return False + + webhook = DiscordWebhook( + url=notif.discord_hook_url, + rate_limit_retry=True, + username=title or "reNgine Discord Plugin" + ) + with open(file_path, "rb") as f: + head, tail = os.path.split(file_path) + webhook.add_file(file=f.read(), filename=tail) + webhook.execute() @app.task(name='send_hackerone_report', bind=False, queue='send_hackerone_report_queue') def send_hackerone_report(vulnerability_id): - """Send HackerOne vulnerability report. - - Args: - vulnerability_id (int): Vulnerability id. - - Returns: - int: HTTP response status code. - """ - vulnerability = Vulnerability.objects.get(id=vulnerability_id) - severities = {v: k for k,v in NUCLEI_SEVERITY_MAP.items()} - headers = { - 'Content-Type': 'application/json', - 'Accept': 'application/json' - } - - # can only send vulnerability report if team_handle exists - if len(vulnerability.target_domain.h1_team_handle) !=0: - hackerone_query = Hackerone.objects.all() - if hackerone_query.exists(): - hackerone = Hackerone.objects.first() - severity_value = severities[vulnerability.severity] - tpl = hackerone.report_template - - # Replace syntax of report template with actual content - tpl = tpl.replace('{vulnerability_name}', vulnerability.name) - tpl = tpl.replace('{vulnerable_url}', vulnerability.http_url) - tpl = tpl.replace('{vulnerability_severity}', severity_value) - tpl = tpl.replace('{vulnerability_description}', vulnerability.description if vulnerability.description else '') - tpl = tpl.replace('{vulnerability_extracted_results}', vulnerability.extracted_results if vulnerability.extracted_results else '') - tpl = tpl.replace('{vulnerability_reference}', vulnerability.reference if vulnerability.reference else '') - - data = { - "data": { - "type": "report", - "attributes": { - "team_handle": vulnerability.target_domain.h1_team_handle, - "title": f'{vulnerability.name} found in {vulnerability.http_url}', - "vulnerability_information": tpl, - "severity_rating": severity_value, - "impact": "More information about the impact and vulnerability can be found here: \n" + vulnerability.reference if vulnerability.reference else "NA", - } - } - } - - r = requests.post( - 'https://api.hackerone.com/v1/hackers/reports', - auth=(hackerone.username, hackerone.api_key), - json=data, - headers=headers - ) - response = r.json() - status_code = r.status_code - if status_code == 201: - vulnerability.hackerone_report_id = response['data']["id"] - vulnerability.open_status = False - vulnerability.save() - return status_code - - else: - logger.error('No team handle found.') - status_code = 111 - return status_code + """Send HackerOne vulnerability report. + + Args: + vulnerability_id (int): Vulnerability id. + + Returns: + int: HTTP response status code. + """ + vulnerability = Vulnerability.objects.get(id=vulnerability_id) + severities = {v: k for k,v in NUCLEI_SEVERITY_MAP.items()} + headers = { + 'Content-Type': 'application/json', + 'Accept': 'application/json' + } + + # can only send vulnerability report if team_handle exists + if len(vulnerability.target_domain.h1_team_handle) !=0: + hackerone_query = Hackerone.objects.all() + if hackerone_query.exists(): + hackerone = Hackerone.objects.first() + severity_value = severities[vulnerability.severity] + tpl = hackerone.report_template + + # Replace syntax of report template with actual content + tpl = tpl.replace('{vulnerability_name}', vulnerability.name) + tpl = tpl.replace('{vulnerable_url}', vulnerability.http_url) + tpl = tpl.replace('{vulnerability_severity}', severity_value) + tpl = tpl.replace('{vulnerability_description}', vulnerability.description if vulnerability.description else '') + tpl = tpl.replace('{vulnerability_extracted_results}', vulnerability.extracted_results if vulnerability.extracted_results else '') + tpl = tpl.replace('{vulnerability_reference}', vulnerability.reference if vulnerability.reference else '') + + data = { + "data": { + "type": "report", + "attributes": { + "team_handle": vulnerability.target_domain.h1_team_handle, + "title": f'{vulnerability.name} found in {vulnerability.http_url}', + "vulnerability_information": tpl, + "severity_rating": severity_value, + "impact": "More information about the impact and vulnerability can be found here: \n" + vulnerability.reference if vulnerability.reference else "NA", + } + } + } + + r = requests.post( + 'https://api.hackerone.com/v1/hackers/reports', + auth=(hackerone.username, hackerone.api_key), + json=data, + headers=headers + ) + response = r.json() + status_code = r.status_code + if status_code == 201: + vulnerability.hackerone_report_id = response['data']["id"] + vulnerability.open_status = False + vulnerability.save() + return status_code + + else: + logger.error('No team handle found.') + status_code = 111 + return status_code #-------------# @@ -3236,985 +3237,985 @@ def send_hackerone_report(vulnerability_id): @app.task(name='parse_nmap_results', bind=False, queue='parse_nmap_results_queue') def parse_nmap_results(xml_file, output_file=None): - """Parse results from nmap output file. - - Args: - xml_file (str): nmap XML report file path. - - Returns: - list: List of vulnerabilities found from nmap results. - """ - with open(xml_file, encoding='utf8') as f: - content = f.read() - try: - nmap_results = xmltodict.parse(content) # parse XML to dict - except Exception as e: - logger.exception(e) - logger.error(f'Cannot parse {xml_file} to valid JSON. Skipping.') - return [] - - # Write JSON to output file - if output_file: - with open(output_file, 'w') as f: - json.dump(nmap_results, f, indent=4) - logger.warning(json.dumps(nmap_results, indent=4)) - hosts = ( - nmap_results - .get('nmaprun', {}) - .get('host', {}) - ) - all_vulns = [] - if isinstance(hosts, dict): - hosts = [hosts] - - for host in hosts: - # Grab hostname / IP from output - hostnames_dict = host.get('hostnames', {}) - if hostnames_dict: - # Ensure that hostnames['hostname'] is a list for consistency - hostnames_list = hostnames_dict['hostname'] if isinstance(hostnames_dict['hostname'], list) else [hostnames_dict['hostname']] - - # Extract all the @name values from the list of dictionaries - hostnames = [entry.get('@name') for entry in hostnames_list] - else: - hostnames = [host.get('address')['@addr']] - - # Iterate over each hostname for each port - for hostname in hostnames: - - # Grab ports from output - ports = host.get('ports', {}).get('port', []) - if isinstance(ports, dict): - ports = [ports] - - for port in ports: - url_vulns = [] - port_number = port['@portid'] - url = sanitize_url(f'{hostname}:{port_number}') - logger.info(f'Parsing nmap results for {hostname}:{port_number} ...') - if not port_number or not port_number.isdigit(): - continue - port_protocol = port['@protocol'] - scripts = port.get('script', []) - if isinstance(scripts, dict): - scripts = [scripts] - - for script in scripts: - script_id = script['@id'] - script_output = script['@output'] - script_output_table = script.get('table', []) - logger.debug(f'Ran nmap script "{script_id}" on {port_number}/{port_protocol}:\n{script_output}\n') - if script_id == 'vulscan': - vulns = parse_nmap_vulscan_output(script_output) - url_vulns.extend(vulns) - elif script_id == 'vulners': - vulns = parse_nmap_vulners_output(script_output) - url_vulns.extend(vulns) - # elif script_id == 'http-server-header': - # TODO: nmap can help find technologies as well using the http-server-header script - # regex = r'(\w+)/([\d.]+)\s?(?:\((\w+)\))?' - # tech_name, tech_version, tech_os = re.match(regex, test_string).groups() - # Technology.objects.get_or_create(...) - # elif script_id == 'http_csrf': - # vulns = parse_nmap_http_csrf_output(script_output) - # url_vulns.extend(vulns) - else: - logger.warning(f'Script output parsing for script "{script_id}" is not supported yet.') - - # Add URL & source to vuln - for vuln in url_vulns: - vuln['source'] = NMAP - # TODO: This should extend to any URL, not just HTTP - vuln['http_url'] = url - if 'http_path' in vuln: - vuln['http_url'] += vuln['http_path'] - all_vulns.append(vuln) - - return all_vulns + """Parse results from nmap output file. + + Args: + xml_file (str): nmap XML report file path. + + Returns: + list: List of vulnerabilities found from nmap results. + """ + with open(xml_file, encoding='utf8') as f: + content = f.read() + try: + nmap_results = xmltodict.parse(content) # parse XML to dict + except Exception as e: + logger.exception(e) + logger.error(f'Cannot parse {xml_file} to valid JSON. Skipping.') + return [] + + # Write JSON to output file + if output_file: + with open(output_file, 'w') as f: + json.dump(nmap_results, f, indent=4) + logger.warning(json.dumps(nmap_results, indent=4)) + hosts = ( + nmap_results + .get('nmaprun', {}) + .get('host', {}) + ) + all_vulns = [] + if isinstance(hosts, dict): + hosts = [hosts] + + for host in hosts: + # Grab hostname / IP from output + hostnames_dict = host.get('hostnames', {}) + if hostnames_dict: + # Ensure that hostnames['hostname'] is a list for consistency + hostnames_list = hostnames_dict['hostname'] if isinstance(hostnames_dict['hostname'], list) else [hostnames_dict['hostname']] + + # Extract all the @name values from the list of dictionaries + hostnames = [entry.get('@name') for entry in hostnames_list] + else: + hostnames = [host.get('address')['@addr']] + + # Iterate over each hostname for each port + for hostname in hostnames: + + # Grab ports from output + ports = host.get('ports', {}).get('port', []) + if isinstance(ports, dict): + ports = [ports] + + for port in ports: + url_vulns = [] + port_number = port['@portid'] + url = sanitize_url(f'{hostname}:{port_number}') + logger.info(f'Parsing nmap results for {hostname}:{port_number} ...') + if not port_number or not port_number.isdigit(): + continue + port_protocol = port['@protocol'] + scripts = port.get('script', []) + if isinstance(scripts, dict): + scripts = [scripts] + + for script in scripts: + script_id = script['@id'] + script_output = script['@output'] + script_output_table = script.get('table', []) + logger.debug(f'Ran nmap script "{script_id}" on {port_number}/{port_protocol}:\n{script_output}\n') + if script_id == 'vulscan': + vulns = parse_nmap_vulscan_output(script_output) + url_vulns.extend(vulns) + elif script_id == 'vulners': + vulns = parse_nmap_vulners_output(script_output) + url_vulns.extend(vulns) + # elif script_id == 'http-server-header': + # TODO: nmap can help find technologies as well using the http-server-header script + # regex = r'(\w+)/([\d.]+)\s?(?:\((\w+)\))?' + # tech_name, tech_version, tech_os = re.match(regex, test_string).groups() + # Technology.objects.get_or_create(...) + # elif script_id == 'http_csrf': + # vulns = parse_nmap_http_csrf_output(script_output) + # url_vulns.extend(vulns) + else: + logger.warning(f'Script output parsing for script "{script_id}" is not supported yet.') + + # Add URL & source to vuln + for vuln in url_vulns: + vuln['source'] = NMAP + # TODO: This should extend to any URL, not just HTTP + vuln['http_url'] = url + if 'http_path' in vuln: + vuln['http_url'] += vuln['http_path'] + all_vulns.append(vuln) + + return all_vulns def parse_nmap_http_csrf_output(script_output): - pass + pass def parse_nmap_vulscan_output(script_output): - """Parse nmap vulscan script output. - - Args: - script_output (str): Vulscan script output. - - Returns: - list: List of Vulnerability dicts. - """ - data = {} - vulns = [] - provider_name = '' - - # Sort all vulns found by provider so that we can match each provider with - # a function that pulls from its API to get more info about the - # vulnerability. - for line in script_output.splitlines(): - if not line: - continue - if not line.startswith('['): # provider line - if "No findings" in line: - logger.info(f"No findings: {line}") - continue - elif ' - ' in line: - provider_name, provider_url = tuple(line.split(' - ')) - data[provider_name] = {'url': provider_url.rstrip(':'), 'entries': []} - continue - else: - # Log a warning - logger.warning(f"Unexpected line format: {line}") - continue - reg = r'\[(.*)\] (.*)' - matches = re.match(reg, line) - id, title = matches.groups() - entry = {'id': id, 'title': title} - data[provider_name]['entries'].append(entry) - - logger.warning('Vulscan parsed output:') - logger.warning(pprint.pformat(data)) - - for provider_name in data: - if provider_name == 'Exploit-DB': - logger.error(f'Provider {provider_name} is not supported YET.') - pass - elif provider_name == 'IBM X-Force': - logger.error(f'Provider {provider_name} is not supported YET.') - pass - elif provider_name == 'MITRE CVE': - logger.error(f'Provider {provider_name} is not supported YET.') - for entry in data[provider_name]['entries']: - cve_id = entry['id'] - vuln = cve_to_vuln(cve_id) - vulns.append(vuln) - elif provider_name == 'OSVDB': - logger.error(f'Provider {provider_name} is not supported YET.') - pass - elif provider_name == 'OpenVAS (Nessus)': - logger.error(f'Provider {provider_name} is not supported YET.') - pass - elif provider_name == 'SecurityFocus': - logger.error(f'Provider {provider_name} is not supported YET.') - pass - elif provider_name == 'VulDB': - logger.error(f'Provider {provider_name} is not supported YET.') - pass - else: - logger.error(f'Provider {provider_name} is not supported.') - return vulns + """Parse nmap vulscan script output. + + Args: + script_output (str): Vulscan script output. + + Returns: + list: List of Vulnerability dicts. + """ + data = {} + vulns = [] + provider_name = '' + + # Sort all vulns found by provider so that we can match each provider with + # a function that pulls from its API to get more info about the + # vulnerability. + for line in script_output.splitlines(): + if not line: + continue + if not line.startswith('['): # provider line + if "No findings" in line: + logger.info(f"No findings: {line}") + continue + elif ' - ' in line: + provider_name, provider_url = tuple(line.split(' - ')) + data[provider_name] = {'url': provider_url.rstrip(':'), 'entries': []} + continue + else: + # Log a warning + logger.warning(f"Unexpected line format: {line}") + continue + reg = r'\[(.*)\] (.*)' + matches = re.match(reg, line) + id, title = matches.groups() + entry = {'id': id, 'title': title} + data[provider_name]['entries'].append(entry) + + logger.warning('Vulscan parsed output:') + logger.warning(pprint.pformat(data)) + + for provider_name in data: + if provider_name == 'Exploit-DB': + logger.error(f'Provider {provider_name} is not supported YET.') + pass + elif provider_name == 'IBM X-Force': + logger.error(f'Provider {provider_name} is not supported YET.') + pass + elif provider_name == 'MITRE CVE': + logger.error(f'Provider {provider_name} is not supported YET.') + for entry in data[provider_name]['entries']: + cve_id = entry['id'] + vuln = cve_to_vuln(cve_id) + vulns.append(vuln) + elif provider_name == 'OSVDB': + logger.error(f'Provider {provider_name} is not supported YET.') + pass + elif provider_name == 'OpenVAS (Nessus)': + logger.error(f'Provider {provider_name} is not supported YET.') + pass + elif provider_name == 'SecurityFocus': + logger.error(f'Provider {provider_name} is not supported YET.') + pass + elif provider_name == 'VulDB': + logger.error(f'Provider {provider_name} is not supported YET.') + pass + else: + logger.error(f'Provider {provider_name} is not supported.') + return vulns def parse_nmap_vulners_output(script_output, url=''): - """Parse nmap vulners script output. + """Parse nmap vulners script output. - TODO: Rework this as it's currently matching all CVEs no matter the - confidence. + TODO: Rework this as it's currently matching all CVEs no matter the + confidence. - Args: - script_output (str): Script output. + Args: + script_output (str): Script output. - Returns: - list: List of found vulnerabilities. - """ - vulns = [] - # Check for CVE in script output - CVE_REGEX = re.compile(r'.*(CVE-\d\d\d\d-\d+).*') - matches = CVE_REGEX.findall(script_output) - matches = list(dict.fromkeys(matches)) - for cve_id in matches: # get CVE info - vuln = cve_to_vuln(cve_id, vuln_type='nmap-vulners-nse') - if vuln: - vulns.append(vuln) - return vulns + Returns: + list: List of found vulnerabilities. + """ + vulns = [] + # Check for CVE in script output + CVE_REGEX = re.compile(r'.*(CVE-\d\d\d\d-\d+).*') + matches = CVE_REGEX.findall(script_output) + matches = list(dict.fromkeys(matches)) + for cve_id in matches: # get CVE info + vuln = cve_to_vuln(cve_id, vuln_type='nmap-vulners-nse') + if vuln: + vulns.append(vuln) + return vulns def cve_to_vuln(cve_id, vuln_type=''): - """Search for a CVE using CVESearch and return Vulnerability data. - - Args: - cve_id (str): CVE ID in the form CVE-* - - Returns: - dict: Vulnerability dict. - """ - cve_info = CVESearch('https://cve.circl.lu').id(cve_id) - if not cve_info: - logger.error(f'Could not fetch CVE info for cve {cve_id}. Skipping.') - return None - vuln_cve_id = cve_info['id'] - vuln_name = vuln_cve_id - vuln_description = cve_info.get('summary', 'none').replace(vuln_cve_id, '').strip() - try: - vuln_cvss = float(cve_info.get('cvss', -1)) - except (ValueError, TypeError): - vuln_cvss = -1 - vuln_cwe_id = cve_info.get('cwe', '') - exploit_ids = cve_info.get('refmap', {}).get('exploit-db', []) - osvdb_ids = cve_info.get('refmap', {}).get('osvdb', []) - references = cve_info.get('references', []) - capec_objects = cve_info.get('capec', []) - - # Parse ovals for a better vuln name / type - ovals = cve_info.get('oval', []) - if ovals: - vuln_name = ovals[0]['title'] - vuln_type = ovals[0]['family'] - - # Set vulnerability severity based on CVSS score - vuln_severity = 'info' - if vuln_cvss < 4: - vuln_severity = 'low' - elif vuln_cvss < 7: - vuln_severity = 'medium' - elif vuln_cvss < 9: - vuln_severity = 'high' - else: - vuln_severity = 'critical' - - # Build console warning message - msg = f'{vuln_name} | {vuln_severity.upper()} | {vuln_cve_id} | {vuln_cwe_id} | {vuln_cvss}' - for id in osvdb_ids: - msg += f'\n\tOSVDB: {id}' - for exploit_id in exploit_ids: - msg += f'\n\tEXPLOITDB: {exploit_id}' - logger.warning(msg) - vuln = { - 'name': vuln_name, - 'type': vuln_type, - 'severity': NUCLEI_SEVERITY_MAP[vuln_severity], - 'description': vuln_description, - 'cvss_score': vuln_cvss, - 'references': references, - 'cve_ids': [vuln_cve_id], - 'cwe_ids': [vuln_cwe_id] - } - return vuln + """Search for a CVE using CVESearch and return Vulnerability data. + + Args: + cve_id (str): CVE ID in the form CVE-* + + Returns: + dict: Vulnerability dict. + """ + cve_info = CVESearch('https://cve.circl.lu').id(cve_id) + if not cve_info: + logger.error(f'Could not fetch CVE info for cve {cve_id}. Skipping.') + return None + vuln_cve_id = cve_info['id'] + vuln_name = vuln_cve_id + vuln_description = cve_info.get('summary', 'none').replace(vuln_cve_id, '').strip() + try: + vuln_cvss = float(cve_info.get('cvss', -1)) + except (ValueError, TypeError): + vuln_cvss = -1 + vuln_cwe_id = cve_info.get('cwe', '') + exploit_ids = cve_info.get('refmap', {}).get('exploit-db', []) + osvdb_ids = cve_info.get('refmap', {}).get('osvdb', []) + references = cve_info.get('references', []) + capec_objects = cve_info.get('capec', []) + + # Parse ovals for a better vuln name / type + ovals = cve_info.get('oval', []) + if ovals: + vuln_name = ovals[0]['title'] + vuln_type = ovals[0]['family'] + + # Set vulnerability severity based on CVSS score + vuln_severity = 'info' + if vuln_cvss < 4: + vuln_severity = 'low' + elif vuln_cvss < 7: + vuln_severity = 'medium' + elif vuln_cvss < 9: + vuln_severity = 'high' + else: + vuln_severity = 'critical' + + # Build console warning message + msg = f'{vuln_name} | {vuln_severity.upper()} | {vuln_cve_id} | {vuln_cwe_id} | {vuln_cvss}' + for id in osvdb_ids: + msg += f'\n\tOSVDB: {id}' + for exploit_id in exploit_ids: + msg += f'\n\tEXPLOITDB: {exploit_id}' + logger.warning(msg) + vuln = { + 'name': vuln_name, + 'type': vuln_type, + 'severity': NUCLEI_SEVERITY_MAP[vuln_severity], + 'description': vuln_description, + 'cvss_score': vuln_cvss, + 'references': references, + 'cve_ids': [vuln_cve_id], + 'cwe_ids': [vuln_cwe_id] + } + return vuln def parse_s3scanner_result(line): - ''' - Parses and returns s3Scanner Data - ''' - bucket = line['bucket'] - return { - 'name': bucket['name'], - 'region': bucket['region'], - 'provider': bucket['provider'], - 'owner_display_name': bucket['owner_display_name'], - 'owner_id': bucket['owner_id'], - 'perm_auth_users_read': bucket['perm_auth_users_read'], - 'perm_auth_users_write': bucket['perm_auth_users_write'], - 'perm_auth_users_read_acl': bucket['perm_auth_users_read_acl'], - 'perm_auth_users_write_acl': bucket['perm_auth_users_write_acl'], - 'perm_auth_users_full_control': bucket['perm_auth_users_full_control'], - 'perm_all_users_read': bucket['perm_all_users_read'], - 'perm_all_users_write': bucket['perm_all_users_write'], - 'perm_all_users_read_acl': bucket['perm_all_users_read_acl'], - 'perm_all_users_write_acl': bucket['perm_all_users_write_acl'], - 'perm_all_users_full_control': bucket['perm_all_users_full_control'], - 'num_objects': bucket['num_objects'], - 'size': bucket['bucket_size'] - } + ''' + Parses and returns s3Scanner Data + ''' + bucket = line['bucket'] + return { + 'name': bucket['name'], + 'region': bucket['region'], + 'provider': bucket['provider'], + 'owner_display_name': bucket['owner_display_name'], + 'owner_id': bucket['owner_id'], + 'perm_auth_users_read': bucket['perm_auth_users_read'], + 'perm_auth_users_write': bucket['perm_auth_users_write'], + 'perm_auth_users_read_acl': bucket['perm_auth_users_read_acl'], + 'perm_auth_users_write_acl': bucket['perm_auth_users_write_acl'], + 'perm_auth_users_full_control': bucket['perm_auth_users_full_control'], + 'perm_all_users_read': bucket['perm_all_users_read'], + 'perm_all_users_write': bucket['perm_all_users_write'], + 'perm_all_users_read_acl': bucket['perm_all_users_read_acl'], + 'perm_all_users_write_acl': bucket['perm_all_users_write_acl'], + 'perm_all_users_full_control': bucket['perm_all_users_full_control'], + 'num_objects': bucket['num_objects'], + 'size': bucket['bucket_size'] + } def parse_nuclei_result(line): - """Parse results from nuclei JSON output. - - Args: - line (dict): Nuclei JSON line output. - - Returns: - dict: Vulnerability data. - """ - return { - 'name': line['info'].get('name', ''), - 'type': line['type'], - 'severity': NUCLEI_SEVERITY_MAP[line['info'].get('severity', 'unknown')], - 'template': line['template'], - 'template_url': line['template-url'], - 'template_id': line['template-id'], - 'description': line['info'].get('description', ''), - 'matcher_name': line.get('matcher-name', ''), - 'curl_command': line.get('curl-command'), - 'request': html.escape(line.get('request')), - 'response': html.escape(line.get('response')), - 'extracted_results': line.get('extracted-results', []), - 'cvss_metrics': line['info'].get('classification', {}).get('cvss-metrics', ''), - 'cvss_score': line['info'].get('classification', {}).get('cvss-score'), - 'cve_ids': line['info'].get('classification', {}).get('cve_id', []) or [], - 'cwe_ids': line['info'].get('classification', {}).get('cwe_id', []) or [], - 'references': line['info'].get('reference', []) or [], - 'tags': line['info'].get('tags', []), - 'source': NUCLEI, - } + """Parse results from nuclei JSON output. + + Args: + line (dict): Nuclei JSON line output. + + Returns: + dict: Vulnerability data. + """ + return { + 'name': line['info'].get('name', ''), + 'type': line['type'], + 'severity': NUCLEI_SEVERITY_MAP[line['info'].get('severity', 'unknown')], + 'template': line['template'], + 'template_url': line['template-url'], + 'template_id': line['template-id'], + 'description': line['info'].get('description', ''), + 'matcher_name': line.get('matcher-name', ''), + 'curl_command': line.get('curl-command'), + 'request': html.escape(line.get('request')), + 'response': html.escape(line.get('response')), + 'extracted_results': line.get('extracted-results', []), + 'cvss_metrics': line['info'].get('classification', {}).get('cvss-metrics', ''), + 'cvss_score': line['info'].get('classification', {}).get('cvss-score'), + 'cve_ids': line['info'].get('classification', {}).get('cve_id', []) or [], + 'cwe_ids': line['info'].get('classification', {}).get('cwe_id', []) or [], + 'references': line['info'].get('reference', []) or [], + 'tags': line['info'].get('tags', []), + 'source': NUCLEI, + } def parse_dalfox_result(line): - """Parse results from nuclei JSON output. + """Parse results from nuclei JSON output. - Args: - line (dict): Nuclei JSON line output. + Args: + line (dict): Nuclei JSON line output. - Returns: - dict: Vulnerability data. - """ + Returns: + dict: Vulnerability data. + """ - description = '' - description += f" Evidence: {line.get('evidence')}
" if line.get('evidence') else '' - description += f" Message: {line.get('message')}
" if line.get('message') else '' - description += f" Payload: {line.get('message_str')}
" if line.get('message_str') else '' - description += f" Vulnerable Parameter: {line.get('param')}
" if line.get('param') else '' + description = '' + description += f" Evidence: {line.get('evidence')}
" if line.get('evidence') else '' + description += f" Message: {line.get('message')}
" if line.get('message') else '' + description += f" Payload: {line.get('message_str')}
" if line.get('message_str') else '' + description += f" Vulnerable Parameter: {line.get('param')}
" if line.get('param') else '' - return { - 'name': 'XSS (Cross Site Scripting)', - 'type': 'XSS', - 'severity': DALFOX_SEVERITY_MAP[line.get('severity', 'unknown')], - 'description': description, - 'source': DALFOX, - 'cwe_ids': [line.get('cwe')] - } + return { + 'name': 'XSS (Cross Site Scripting)', + 'type': 'XSS', + 'severity': DALFOX_SEVERITY_MAP[line.get('severity', 'unknown')], + 'description': description, + 'source': DALFOX, + 'cwe_ids': [line.get('cwe')] + } def parse_crlfuzz_result(url): - """Parse CRLF results + """Parse CRLF results - Args: - url (str): CRLF Vulnerable URL + Args: + url (str): CRLF Vulnerable URL - Returns: - dict: Vulnerability data. - """ + Returns: + dict: Vulnerability data. + """ - return { - 'name': 'CRLF (HTTP Response Splitting)', - 'type': 'CRLF', - 'severity': 2, - 'description': 'A CRLF (HTTP Response Splitting) vulnerability has been discovered.', - 'source': CRLFUZZ, - } + return { + 'name': 'CRLF (HTTP Response Splitting)', + 'type': 'CRLF', + 'severity': 2, + 'description': 'A CRLF (HTTP Response Splitting) vulnerability has been discovered.', + 'source': CRLFUZZ, + } def record_exists(model, data, exclude_keys=[]): - """ - Check if a record already exists in the database based on the given data. + """ + Check if a record already exists in the database based on the given data. - Args: - model (django.db.models.Model): The Django model to check against. - data (dict): Data dictionary containing fields and values. - exclude_keys (list): List of keys to exclude from the lookup. + Args: + model (django.db.models.Model): The Django model to check against. + data (dict): Data dictionary containing fields and values. + exclude_keys (list): List of keys to exclude from the lookup. - Returns: - bool: True if the record exists, False otherwise. - """ + Returns: + bool: True if the record exists, False otherwise. + """ - # Extract the keys that will be used for the lookup - lookup_fields = {key: data[key] for key in data if key not in exclude_keys} + # Extract the keys that will be used for the lookup + lookup_fields = {key: data[key] for key in data if key not in exclude_keys} - # Return True if a record exists based on the lookup fields, False otherwise - return model.objects.filter(**lookup_fields).exists() + # Return True if a record exists based on the lookup fields, False otherwise + return model.objects.filter(**lookup_fields).exists() @app.task(name='geo_localize', bind=False, queue='geo_localize_queue') def geo_localize(host, ip_id=None): - """Uses geoiplookup to find location associated with host. - - Args: - host (str): Hostname. - ip_id (int): IpAddress object id. - - Returns: - startScan.models.CountryISO: CountryISO object from DB or None. - """ - if validators.ipv6(host): - logger.info(f'Ipv6 "{host}" is not supported by geoiplookup. Skipping.') - return None - cmd = f'geoiplookup {host}' - _, out = run_command(cmd) - if 'IP Address not found' not in out and "can't resolve hostname" not in out: - country_iso = out.split(':')[1].strip().split(',')[0] - country_name = out.split(':')[1].strip().split(',')[1].strip() - geo_object, _ = CountryISO.objects.get_or_create( - iso=country_iso, - name=country_name - ) - geo_json = { - 'iso': country_iso, - 'name': country_name - } - if ip_id: - ip = IpAddress.objects.get(pk=ip_id) - ip.geo_iso = geo_object - ip.save() - return geo_json - logger.info(f'Geo IP lookup failed for host "{host}"') - return None + """Uses geoiplookup to find location associated with host. + + Args: + host (str): Hostname. + ip_id (int): IpAddress object id. + + Returns: + startScan.models.CountryISO: CountryISO object from DB or None. + """ + if validators.ipv6(host): + logger.info(f'Ipv6 "{host}" is not supported by geoiplookup. Skipping.') + return None + cmd = f'geoiplookup {host}' + _, out = run_command(cmd) + if 'IP Address not found' not in out and "can't resolve hostname" not in out: + country_iso = out.split(':')[1].strip().split(',')[0] + country_name = out.split(':')[1].strip().split(',')[1].strip() + geo_object, _ = CountryISO.objects.get_or_create( + iso=country_iso, + name=country_name + ) + geo_json = { + 'iso': country_iso, + 'name': country_name + } + if ip_id: + ip = IpAddress.objects.get(pk=ip_id) + ip.geo_iso = geo_object + ip.save() + return geo_json + logger.info(f'Geo IP lookup failed for host "{host}"') + return None @app.task(name='query_whois', bind=False, queue='query_whois_queue') def query_whois(ip_domain, force_reload_whois=False): - """Query WHOIS information for an IP or a domain name. - - Args: - ip_domain (str): IP address or domain name. - save_domain (bool): Whether to save domain or not, default False - Returns: - dict: WHOIS information. - """ - if not force_reload_whois and Domain.objects.filter(name=ip_domain).exists() and Domain.objects.get(name=ip_domain).domain_info: - domain = Domain.objects.get(name=ip_domain) - if not domain.insert_date: - domain.insert_date = timezone.now() - domain.save() - domain_info_db = domain.domain_info - domain_info = DottedDict( - dnssec=domain_info_db.dnssec, - created=domain_info_db.created, - updated=domain_info_db.updated, - expires=domain_info_db.expires, - geolocation_iso=domain_info_db.geolocation_iso, - status=[status['name'] for status in DomainWhoisStatusSerializer(domain_info_db.status, many=True).data], - whois_server=domain_info_db.whois_server, - ns_records=[ns['name'] for ns in NameServersSerializer(domain_info_db.name_servers, many=True).data], - registrar_name=domain_info_db.registrar.name, - registrar_phone=domain_info_db.registrar.phone, - registrar_email=domain_info_db.registrar.email, - registrar_url=domain_info_db.registrar.url, - registrant_name=domain_info_db.registrant.name, - registrant_id=domain_info_db.registrant.id_str, - registrant_organization=domain_info_db.registrant.organization, - registrant_city=domain_info_db.registrant.city, - registrant_state=domain_info_db.registrant.state, - registrant_zip_code=domain_info_db.registrant.zip_code, - registrant_country=domain_info_db.registrant.country, - registrant_phone=domain_info_db.registrant.phone, - registrant_fax=domain_info_db.registrant.fax, - registrant_email=domain_info_db.registrant.email, - registrant_address=domain_info_db.registrant.address, - admin_name=domain_info_db.admin.name, - admin_id=domain_info_db.admin.id_str, - admin_organization=domain_info_db.admin.organization, - admin_city=domain_info_db.admin.city, - admin_state=domain_info_db.admin.state, - admin_zip_code=domain_info_db.admin.zip_code, - admin_country=domain_info_db.admin.country, - admin_phone=domain_info_db.admin.phone, - admin_fax=domain_info_db.admin.fax, - admin_email=domain_info_db.admin.email, - admin_address=domain_info_db.admin.address, - tech_name=domain_info_db.tech.name, - tech_id=domain_info_db.tech.id_str, - tech_organization=domain_info_db.tech.organization, - tech_city=domain_info_db.tech.city, - tech_state=domain_info_db.tech.state, - tech_zip_code=domain_info_db.tech.zip_code, - tech_country=domain_info_db.tech.country, - tech_phone=domain_info_db.tech.phone, - tech_fax=domain_info_db.tech.fax, - tech_email=domain_info_db.tech.email, - tech_address=domain_info_db.tech.address, - related_tlds=[domain['name'] for domain in RelatedDomainSerializer(domain_info_db.related_tlds, many=True).data], - related_domains=[domain['name'] for domain in RelatedDomainSerializer(domain_info_db.related_domains, many=True).data], - historical_ips=[ip for ip in HistoricalIPSerializer(domain_info_db.historical_ips, many=True).data], - ) - if domain_info_db.dns_records: - a_records = [] - txt_records = [] - mx_records = [] - dns_records = [{'name': dns['name'], 'type': dns['type']} for dns in DomainDNSRecordSerializer(domain_info_db.dns_records, many=True).data] - for dns in dns_records: - if dns['type'] == 'a': - a_records.append(dns['name']) - elif dns['type'] == 'txt': - txt_records.append(dns['name']) - elif dns['type'] == 'mx': - mx_records.append(dns['name']) - domain_info.a_records = a_records - domain_info.txt_records = txt_records - domain_info.mx_records = mx_records - else: - logger.info(f'Domain info for "{ip_domain}" not found in DB, querying whois') - domain_info = DottedDict() - # find domain historical ip - try: - historical_ips = get_domain_historical_ip_address(ip_domain) - domain_info.historical_ips = historical_ips - except Exception as e: - logger.error(f'HistoricalIP for {ip_domain} not found!\nError: {str(e)}') - historical_ips = [] - # find associated domains using ip_domain - try: - related_domains = reverse_whois(ip_domain.split('.')[0]) - except Exception as e: - logger.error(f'Associated domain not found for {ip_domain}\nError: {str(e)}') - similar_domains = [] - # find related tlds using TLSx - try: - related_tlds = [] - output_path = '/tmp/ip_domain_tlsx.txt' - tlsx_command = f'tlsx -san -cn -silent -ro -host {ip_domain} -o {output_path}' - run_command( - tlsx_command, - shell=True, - ) - tlsx_output = [] - with open(output_path) as f: - tlsx_output = f.readlines() - - tldextract_target = tldextract.extract(ip_domain) - for doms in tlsx_output: - doms = doms.strip() - tldextract_res = tldextract.extract(doms) - if ip_domain != doms and tldextract_res.domain == tldextract_target.domain and tldextract_res.subdomain == '': - related_tlds.append(doms) - - related_tlds = list(set(related_tlds)) - domain_info.related_tlds = related_tlds - except Exception as e: - logger.error(f'Associated domain not found for {ip_domain}\nError: {str(e)}') - similar_domains = [] - - related_domains_list = [] - if Domain.objects.filter(name=ip_domain).exists(): - domain = Domain.objects.get(name=ip_domain) - db_domain_info = domain.domain_info if domain.domain_info else DomainInfo() - db_domain_info.save() - for _domain in related_domains: - domain_related = RelatedDomain.objects.get_or_create( - name=_domain['name'], - )[0] - db_domain_info.related_domains.add(domain_related) - related_domains_list.append(_domain['name']) - - for _domain in related_tlds: - domain_related = RelatedDomain.objects.get_or_create( - name=_domain, - )[0] - db_domain_info.related_tlds.add(domain_related) - - for _ip in historical_ips: - historical_ip = HistoricalIP.objects.get_or_create( - ip=_ip['ip'], - owner=_ip['owner'], - location=_ip['location'], - last_seen=_ip['last_seen'], - )[0] - db_domain_info.historical_ips.add(historical_ip) - domain.domain_info = db_domain_info - domain.save() - - command = f'netlas host {ip_domain} -f json' - # check if netlas key is provided - netlas_key = get_netlas_key() - command += f' -a {netlas_key}' if netlas_key else '' - - result = subprocess.check_output(command.split()).decode('utf-8') - if 'Failed to parse response data' in result: - # do fallback - return { - 'status': False, - 'ip_domain': ip_domain, - 'result': "Netlas limit exceeded.", - 'message': 'Netlas limit exceeded.' - } - try: - result = json.loads(result) - logger.info(result) - whois = result.get('whois') if result.get('whois') else {} - - domain_info.created = whois.get('created_date') - domain_info.expires = whois.get('expiration_date') - domain_info.updated = whois.get('updated_date') - domain_info.whois_server = whois.get('whois_server') - - - if 'registrant' in whois: - registrant = whois.get('registrant') - domain_info.registrant_name = registrant.get('name') - domain_info.registrant_country = registrant.get('country') - domain_info.registrant_id = registrant.get('id') - domain_info.registrant_state = registrant.get('province') - domain_info.registrant_city = registrant.get('city') - domain_info.registrant_phone = registrant.get('phone') - domain_info.registrant_address = registrant.get('street') - domain_info.registrant_organization = registrant.get('organization') - domain_info.registrant_fax = registrant.get('fax') - domain_info.registrant_zip_code = registrant.get('postal_code') - email_search = EMAIL_REGEX.search(str(registrant.get('email'))) - field_content = email_search.group(0) if email_search else None - domain_info.registrant_email = field_content - - if 'administrative' in whois: - administrative = whois.get('administrative') - domain_info.admin_name = administrative.get('name') - domain_info.admin_country = administrative.get('country') - domain_info.admin_id = administrative.get('id') - domain_info.admin_state = administrative.get('province') - domain_info.admin_city = administrative.get('city') - domain_info.admin_phone = administrative.get('phone') - domain_info.admin_address = administrative.get('street') - domain_info.admin_organization = administrative.get('organization') - domain_info.admin_fax = administrative.get('fax') - domain_info.admin_zip_code = administrative.get('postal_code') - mail_search = EMAIL_REGEX.search(str(administrative.get('email'))) - field_content = email_search.group(0) if email_search else None - domain_info.admin_email = field_content - - if 'technical' in whois: - technical = whois.get('technical') - domain_info.tech_name = technical.get('name') - domain_info.tech_country = technical.get('country') - domain_info.tech_state = technical.get('province') - domain_info.tech_id = technical.get('id') - domain_info.tech_city = technical.get('city') - domain_info.tech_phone = technical.get('phone') - domain_info.tech_address = technical.get('street') - domain_info.tech_organization = technical.get('organization') - domain_info.tech_fax = technical.get('fax') - domain_info.tech_zip_code = technical.get('postal_code') - mail_search = EMAIL_REGEX.search(str(technical.get('email'))) - field_content = email_search.group(0) if email_search else None - domain_info.tech_email = field_content - - if 'dns' in result: - dns = result.get('dns') - domain_info.mx_records = dns.get('mx') - domain_info.txt_records = dns.get('txt') - domain_info.a_records = dns.get('a') - - domain_info.ns_records = whois.get('name_servers') - domain_info.dnssec = True if whois.get('dnssec') else False - domain_info.status = whois.get('status') - - if 'registrar' in whois: - registrar = whois.get('registrar') - domain_info.registrar_name = registrar.get('name') - domain_info.registrar_email = registrar.get('email') - domain_info.registrar_phone = registrar.get('phone') - domain_info.registrar_url = registrar.get('url') - - # find associated domains if registrant email is found - related_domains = reverse_whois(domain_info.get('registrant_email')) if domain_info.get('registrant_email') else [] - for _domain in related_domains: - related_domains_list.append(_domain['name']) - - # remove duplicate domains from related domains list - related_domains_list = list(set(related_domains_list)) - domain_info.related_domains = related_domains_list - - # save to db if domain exists - if Domain.objects.filter(name=ip_domain).exists(): - domain = Domain.objects.get(name=ip_domain) - db_domain_info = domain.domain_info if domain.domain_info else DomainInfo() - db_domain_info.save() - for _domain in related_domains: - domain_rel = RelatedDomain.objects.get_or_create( - name=_domain['name'], - )[0] - db_domain_info.related_domains.add(domain_rel) - - db_domain_info.dnssec = domain_info.get('dnssec') - #dates - db_domain_info.created = domain_info.get('created') - db_domain_info.updated = domain_info.get('updated') - db_domain_info.expires = domain_info.get('expires') - #registrar - db_domain_info.registrar = Registrar.objects.get_or_create( - name=domain_info.get('registrar_name'), - email=domain_info.get('registrar_email'), - phone=domain_info.get('registrar_phone'), - url=domain_info.get('registrar_url'), - )[0] - db_domain_info.registrant = DomainRegistration.objects.get_or_create( - name=domain_info.get('registrant_name'), - organization=domain_info.get('registrant_organization'), - address=domain_info.get('registrant_address'), - city=domain_info.get('registrant_city'), - state=domain_info.get('registrant_state'), - zip_code=domain_info.get('registrant_zip_code'), - country=domain_info.get('registrant_country'), - email=domain_info.get('registrant_email'), - phone=domain_info.get('registrant_phone'), - fax=domain_info.get('registrant_fax'), - id_str=domain_info.get('registrant_id'), - )[0] - db_domain_info.admin = DomainRegistration.objects.get_or_create( - name=domain_info.get('admin_name'), - organization=domain_info.get('admin_organization'), - address=domain_info.get('admin_address'), - city=domain_info.get('admin_city'), - state=domain_info.get('admin_state'), - zip_code=domain_info.get('admin_zip_code'), - country=domain_info.get('admin_country'), - email=domain_info.get('admin_email'), - phone=domain_info.get('admin_phone'), - fax=domain_info.get('admin_fax'), - id_str=domain_info.get('admin_id'), - )[0] - db_domain_info.tech = DomainRegistration.objects.get_or_create( - name=domain_info.get('tech_name'), - organization=domain_info.get('tech_organization'), - address=domain_info.get('tech_address'), - city=domain_info.get('tech_city'), - state=domain_info.get('tech_state'), - zip_code=domain_info.get('tech_zip_code'), - country=domain_info.get('tech_country'), - email=domain_info.get('tech_email'), - phone=domain_info.get('tech_phone'), - fax=domain_info.get('tech_fax'), - id_str=domain_info.get('tech_id'), - )[0] - for status in domain_info.get('status') or []: - _status = WhoisStatus.objects.get_or_create( - name=status - )[0] - _status.save() - db_domain_info.status.add(_status) - - for ns in domain_info.get('ns_records') or []: - _ns = NameServer.objects.get_or_create( - name=ns - )[0] - _ns.save() - db_domain_info.name_servers.add(_ns) - - for a in domain_info.get('a_records') or []: - _a = DNSRecord.objects.get_or_create( - name=a, - type='a' - )[0] - _a.save() - db_domain_info.dns_records.add(_a) - for mx in domain_info.get('mx_records') or []: - _mx = DNSRecord.objects.get_or_create( - name=mx, - type='mx' - )[0] - _mx.save() - db_domain_info.dns_records.add(_mx) - for txt in domain_info.get('txt_records') or []: - _txt = DNSRecord.objects.get_or_create( - name=txt, - type='txt' - )[0] - _txt.save() - db_domain_info.dns_records.add(_txt) - - db_domain_info.geolocation_iso = domain_info.get('registrant_country') - db_domain_info.whois_server = domain_info.get('whois_server') - db_domain_info.save() - domain.domain_info = db_domain_info - domain.save() - - except Exception as e: - return { - 'status': False, - 'ip_domain': ip_domain, - 'result': "unable to fetch records from WHOIS database.", - 'message': str(e) - } - - return { - 'status': True, - 'ip_domain': ip_domain, - 'dnssec': domain_info.get('dnssec'), - 'created': domain_info.get('created'), - 'updated': domain_info.get('updated'), - 'expires': domain_info.get('expires'), - 'geolocation_iso': domain_info.get('registrant_country'), - 'domain_statuses': domain_info.get('status'), - 'whois_server': domain_info.get('whois_server'), - 'dns': { - 'a': domain_info.get('a_records'), - 'mx': domain_info.get('mx_records'), - 'txt': domain_info.get('txt_records'), - }, - 'registrar': { - 'name': domain_info.get('registrar_name'), - 'phone': domain_info.get('registrar_phone'), - 'email': domain_info.get('registrar_email'), - 'url': domain_info.get('registrar_url'), - }, - 'registrant': { - 'name': domain_info.get('registrant_name'), - 'id': domain_info.get('registrant_id'), - 'organization': domain_info.get('registrant_organization'), - 'address': domain_info.get('registrant_address'), - 'city': domain_info.get('registrant_city'), - 'state': domain_info.get('registrant_state'), - 'zipcode': domain_info.get('registrant_zip_code'), - 'country': domain_info.get('registrant_country'), - 'phone': domain_info.get('registrant_phone'), - 'fax': domain_info.get('registrant_fax'), - 'email': domain_info.get('registrant_email'), - }, - 'admin': { - 'name': domain_info.get('admin_name'), - 'id': domain_info.get('admin_id'), - 'organization': domain_info.get('admin_organization'), - 'address':domain_info.get('admin_address'), - 'city': domain_info.get('admin_city'), - 'state': domain_info.get('admin_state'), - 'zipcode': domain_info.get('admin_zip_code'), - 'country': domain_info.get('admin_country'), - 'phone': domain_info.get('admin_phone'), - 'fax': domain_info.get('admin_fax'), - 'email': domain_info.get('admin_email'), - }, - 'technical_contact': { - 'name': domain_info.get('tech_name'), - 'id': domain_info.get('tech_id'), - 'organization': domain_info.get('tech_organization'), - 'address': domain_info.get('tech_address'), - 'city': domain_info.get('tech_city'), - 'state': domain_info.get('tech_state'), - 'zipcode': domain_info.get('tech_zip_code'), - 'country': domain_info.get('tech_country'), - 'phone': domain_info.get('tech_phone'), - 'fax': domain_info.get('tech_fax'), - 'email': domain_info.get('tech_email'), - }, - 'nameservers': domain_info.get('ns_records'), - # 'similar_domains': domain_info.get('similar_domains'), - 'related_domains': domain_info.get('related_domains'), - 'related_tlds': domain_info.get('related_tlds'), - 'historical_ips': domain_info.get('historical_ips'), - } + """Query WHOIS information for an IP or a domain name. + + Args: + ip_domain (str): IP address or domain name. + save_domain (bool): Whether to save domain or not, default False + Returns: + dict: WHOIS information. + """ + if not force_reload_whois and Domain.objects.filter(name=ip_domain).exists() and Domain.objects.get(name=ip_domain).domain_info: + domain = Domain.objects.get(name=ip_domain) + if not domain.insert_date: + domain.insert_date = timezone.now() + domain.save() + domain_info_db = domain.domain_info + domain_info = DottedDict( + dnssec=domain_info_db.dnssec, + created=domain_info_db.created, + updated=domain_info_db.updated, + expires=domain_info_db.expires, + geolocation_iso=domain_info_db.geolocation_iso, + status=[status['name'] for status in DomainWhoisStatusSerializer(domain_info_db.status, many=True).data], + whois_server=domain_info_db.whois_server, + ns_records=[ns['name'] for ns in NameServersSerializer(domain_info_db.name_servers, many=True).data], + registrar_name=domain_info_db.registrar.name, + registrar_phone=domain_info_db.registrar.phone, + registrar_email=domain_info_db.registrar.email, + registrar_url=domain_info_db.registrar.url, + registrant_name=domain_info_db.registrant.name, + registrant_id=domain_info_db.registrant.id_str, + registrant_organization=domain_info_db.registrant.organization, + registrant_city=domain_info_db.registrant.city, + registrant_state=domain_info_db.registrant.state, + registrant_zip_code=domain_info_db.registrant.zip_code, + registrant_country=domain_info_db.registrant.country, + registrant_phone=domain_info_db.registrant.phone, + registrant_fax=domain_info_db.registrant.fax, + registrant_email=domain_info_db.registrant.email, + registrant_address=domain_info_db.registrant.address, + admin_name=domain_info_db.admin.name, + admin_id=domain_info_db.admin.id_str, + admin_organization=domain_info_db.admin.organization, + admin_city=domain_info_db.admin.city, + admin_state=domain_info_db.admin.state, + admin_zip_code=domain_info_db.admin.zip_code, + admin_country=domain_info_db.admin.country, + admin_phone=domain_info_db.admin.phone, + admin_fax=domain_info_db.admin.fax, + admin_email=domain_info_db.admin.email, + admin_address=domain_info_db.admin.address, + tech_name=domain_info_db.tech.name, + tech_id=domain_info_db.tech.id_str, + tech_organization=domain_info_db.tech.organization, + tech_city=domain_info_db.tech.city, + tech_state=domain_info_db.tech.state, + tech_zip_code=domain_info_db.tech.zip_code, + tech_country=domain_info_db.tech.country, + tech_phone=domain_info_db.tech.phone, + tech_fax=domain_info_db.tech.fax, + tech_email=domain_info_db.tech.email, + tech_address=domain_info_db.tech.address, + related_tlds=[domain['name'] for domain in RelatedDomainSerializer(domain_info_db.related_tlds, many=True).data], + related_domains=[domain['name'] for domain in RelatedDomainSerializer(domain_info_db.related_domains, many=True).data], + historical_ips=[ip for ip in HistoricalIPSerializer(domain_info_db.historical_ips, many=True).data], + ) + if domain_info_db.dns_records: + a_records = [] + txt_records = [] + mx_records = [] + dns_records = [{'name': dns['name'], 'type': dns['type']} for dns in DomainDNSRecordSerializer(domain_info_db.dns_records, many=True).data] + for dns in dns_records: + if dns['type'] == 'a': + a_records.append(dns['name']) + elif dns['type'] == 'txt': + txt_records.append(dns['name']) + elif dns['type'] == 'mx': + mx_records.append(dns['name']) + domain_info.a_records = a_records + domain_info.txt_records = txt_records + domain_info.mx_records = mx_records + else: + logger.info(f'Domain info for "{ip_domain}" not found in DB, querying whois') + domain_info = DottedDict() + # find domain historical ip + try: + historical_ips = get_domain_historical_ip_address(ip_domain) + domain_info.historical_ips = historical_ips + except Exception as e: + logger.error(f'HistoricalIP for {ip_domain} not found!\nError: {str(e)}') + historical_ips = [] + # find associated domains using ip_domain + try: + related_domains = reverse_whois(ip_domain.split('.')[0]) + except Exception as e: + logger.error(f'Associated domain not found for {ip_domain}\nError: {str(e)}') + similar_domains = [] + # find related tlds using TLSx + try: + related_tlds = [] + output_path = '/tmp/ip_domain_tlsx.txt' + tlsx_command = f'tlsx -san -cn -silent -ro -host {ip_domain} -o {output_path}' + run_command( + tlsx_command, + shell=True, + ) + tlsx_output = [] + with open(output_path) as f: + tlsx_output = f.readlines() + + tldextract_target = tldextract.extract(ip_domain) + for doms in tlsx_output: + doms = doms.strip() + tldextract_res = tldextract.extract(doms) + if ip_domain != doms and tldextract_res.domain == tldextract_target.domain and tldextract_res.subdomain == '': + related_tlds.append(doms) + + related_tlds = list(set(related_tlds)) + domain_info.related_tlds = related_tlds + except Exception as e: + logger.error(f'Associated domain not found for {ip_domain}\nError: {str(e)}') + similar_domains = [] + + related_domains_list = [] + if Domain.objects.filter(name=ip_domain).exists(): + domain = Domain.objects.get(name=ip_domain) + db_domain_info = domain.domain_info if domain.domain_info else DomainInfo() + db_domain_info.save() + for _domain in related_domains: + domain_related = RelatedDomain.objects.get_or_create( + name=_domain['name'], + )[0] + db_domain_info.related_domains.add(domain_related) + related_domains_list.append(_domain['name']) + + for _domain in related_tlds: + domain_related = RelatedDomain.objects.get_or_create( + name=_domain, + )[0] + db_domain_info.related_tlds.add(domain_related) + + for _ip in historical_ips: + historical_ip = HistoricalIP.objects.get_or_create( + ip=_ip['ip'], + owner=_ip['owner'], + location=_ip['location'], + last_seen=_ip['last_seen'], + )[0] + db_domain_info.historical_ips.add(historical_ip) + domain.domain_info = db_domain_info + domain.save() + + command = f'netlas host {ip_domain} -f json' + # check if netlas key is provided + netlas_key = get_netlas_key() + command += f' -a {netlas_key}' if netlas_key else '' + + result = subprocess.check_output(command.split()).decode('utf-8') + if 'Failed to parse response data' in result: + # do fallback + return { + 'status': False, + 'ip_domain': ip_domain, + 'result': "Netlas limit exceeded.", + 'message': 'Netlas limit exceeded.' + } + try: + result = json.loads(result) + logger.info(result) + whois = result.get('whois') if result.get('whois') else {} + + domain_info.created = whois.get('created_date') + domain_info.expires = whois.get('expiration_date') + domain_info.updated = whois.get('updated_date') + domain_info.whois_server = whois.get('whois_server') + + + if 'registrant' in whois: + registrant = whois.get('registrant') + domain_info.registrant_name = registrant.get('name') + domain_info.registrant_country = registrant.get('country') + domain_info.registrant_id = registrant.get('id') + domain_info.registrant_state = registrant.get('province') + domain_info.registrant_city = registrant.get('city') + domain_info.registrant_phone = registrant.get('phone') + domain_info.registrant_address = registrant.get('street') + domain_info.registrant_organization = registrant.get('organization') + domain_info.registrant_fax = registrant.get('fax') + domain_info.registrant_zip_code = registrant.get('postal_code') + email_search = EMAIL_REGEX.search(str(registrant.get('email'))) + field_content = email_search.group(0) if email_search else None + domain_info.registrant_email = field_content + + if 'administrative' in whois: + administrative = whois.get('administrative') + domain_info.admin_name = administrative.get('name') + domain_info.admin_country = administrative.get('country') + domain_info.admin_id = administrative.get('id') + domain_info.admin_state = administrative.get('province') + domain_info.admin_city = administrative.get('city') + domain_info.admin_phone = administrative.get('phone') + domain_info.admin_address = administrative.get('street') + domain_info.admin_organization = administrative.get('organization') + domain_info.admin_fax = administrative.get('fax') + domain_info.admin_zip_code = administrative.get('postal_code') + mail_search = EMAIL_REGEX.search(str(administrative.get('email'))) + field_content = email_search.group(0) if email_search else None + domain_info.admin_email = field_content + + if 'technical' in whois: + technical = whois.get('technical') + domain_info.tech_name = technical.get('name') + domain_info.tech_country = technical.get('country') + domain_info.tech_state = technical.get('province') + domain_info.tech_id = technical.get('id') + domain_info.tech_city = technical.get('city') + domain_info.tech_phone = technical.get('phone') + domain_info.tech_address = technical.get('street') + domain_info.tech_organization = technical.get('organization') + domain_info.tech_fax = technical.get('fax') + domain_info.tech_zip_code = technical.get('postal_code') + mail_search = EMAIL_REGEX.search(str(technical.get('email'))) + field_content = email_search.group(0) if email_search else None + domain_info.tech_email = field_content + + if 'dns' in result: + dns = result.get('dns') + domain_info.mx_records = dns.get('mx') + domain_info.txt_records = dns.get('txt') + domain_info.a_records = dns.get('a') + + domain_info.ns_records = whois.get('name_servers') + domain_info.dnssec = True if whois.get('dnssec') else False + domain_info.status = whois.get('status') + + if 'registrar' in whois: + registrar = whois.get('registrar') + domain_info.registrar_name = registrar.get('name') + domain_info.registrar_email = registrar.get('email') + domain_info.registrar_phone = registrar.get('phone') + domain_info.registrar_url = registrar.get('url') + + # find associated domains if registrant email is found + related_domains = reverse_whois(domain_info.get('registrant_email')) if domain_info.get('registrant_email') else [] + for _domain in related_domains: + related_domains_list.append(_domain['name']) + + # remove duplicate domains from related domains list + related_domains_list = list(set(related_domains_list)) + domain_info.related_domains = related_domains_list + + # save to db if domain exists + if Domain.objects.filter(name=ip_domain).exists(): + domain = Domain.objects.get(name=ip_domain) + db_domain_info = domain.domain_info if domain.domain_info else DomainInfo() + db_domain_info.save() + for _domain in related_domains: + domain_rel = RelatedDomain.objects.get_or_create( + name=_domain['name'], + )[0] + db_domain_info.related_domains.add(domain_rel) + + db_domain_info.dnssec = domain_info.get('dnssec') + #dates + db_domain_info.created = domain_info.get('created') + db_domain_info.updated = domain_info.get('updated') + db_domain_info.expires = domain_info.get('expires') + #registrar + db_domain_info.registrar = Registrar.objects.get_or_create( + name=domain_info.get('registrar_name'), + email=domain_info.get('registrar_email'), + phone=domain_info.get('registrar_phone'), + url=domain_info.get('registrar_url'), + )[0] + db_domain_info.registrant = DomainRegistration.objects.get_or_create( + name=domain_info.get('registrant_name'), + organization=domain_info.get('registrant_organization'), + address=domain_info.get('registrant_address'), + city=domain_info.get('registrant_city'), + state=domain_info.get('registrant_state'), + zip_code=domain_info.get('registrant_zip_code'), + country=domain_info.get('registrant_country'), + email=domain_info.get('registrant_email'), + phone=domain_info.get('registrant_phone'), + fax=domain_info.get('registrant_fax'), + id_str=domain_info.get('registrant_id'), + )[0] + db_domain_info.admin = DomainRegistration.objects.get_or_create( + name=domain_info.get('admin_name'), + organization=domain_info.get('admin_organization'), + address=domain_info.get('admin_address'), + city=domain_info.get('admin_city'), + state=domain_info.get('admin_state'), + zip_code=domain_info.get('admin_zip_code'), + country=domain_info.get('admin_country'), + email=domain_info.get('admin_email'), + phone=domain_info.get('admin_phone'), + fax=domain_info.get('admin_fax'), + id_str=domain_info.get('admin_id'), + )[0] + db_domain_info.tech = DomainRegistration.objects.get_or_create( + name=domain_info.get('tech_name'), + organization=domain_info.get('tech_organization'), + address=domain_info.get('tech_address'), + city=domain_info.get('tech_city'), + state=domain_info.get('tech_state'), + zip_code=domain_info.get('tech_zip_code'), + country=domain_info.get('tech_country'), + email=domain_info.get('tech_email'), + phone=domain_info.get('tech_phone'), + fax=domain_info.get('tech_fax'), + id_str=domain_info.get('tech_id'), + )[0] + for status in domain_info.get('status') or []: + _status = WhoisStatus.objects.get_or_create( + name=status + )[0] + _status.save() + db_domain_info.status.add(_status) + + for ns in domain_info.get('ns_records') or []: + _ns = NameServer.objects.get_or_create( + name=ns + )[0] + _ns.save() + db_domain_info.name_servers.add(_ns) + + for a in domain_info.get('a_records') or []: + _a = DNSRecord.objects.get_or_create( + name=a, + type='a' + )[0] + _a.save() + db_domain_info.dns_records.add(_a) + for mx in domain_info.get('mx_records') or []: + _mx = DNSRecord.objects.get_or_create( + name=mx, + type='mx' + )[0] + _mx.save() + db_domain_info.dns_records.add(_mx) + for txt in domain_info.get('txt_records') or []: + _txt = DNSRecord.objects.get_or_create( + name=txt, + type='txt' + )[0] + _txt.save() + db_domain_info.dns_records.add(_txt) + + db_domain_info.geolocation_iso = domain_info.get('registrant_country') + db_domain_info.whois_server = domain_info.get('whois_server') + db_domain_info.save() + domain.domain_info = db_domain_info + domain.save() + + except Exception as e: + return { + 'status': False, + 'ip_domain': ip_domain, + 'result': "unable to fetch records from WHOIS database.", + 'message': str(e) + } + + return { + 'status': True, + 'ip_domain': ip_domain, + 'dnssec': domain_info.get('dnssec'), + 'created': domain_info.get('created'), + 'updated': domain_info.get('updated'), + 'expires': domain_info.get('expires'), + 'geolocation_iso': domain_info.get('registrant_country'), + 'domain_statuses': domain_info.get('status'), + 'whois_server': domain_info.get('whois_server'), + 'dns': { + 'a': domain_info.get('a_records'), + 'mx': domain_info.get('mx_records'), + 'txt': domain_info.get('txt_records'), + }, + 'registrar': { + 'name': domain_info.get('registrar_name'), + 'phone': domain_info.get('registrar_phone'), + 'email': domain_info.get('registrar_email'), + 'url': domain_info.get('registrar_url'), + }, + 'registrant': { + 'name': domain_info.get('registrant_name'), + 'id': domain_info.get('registrant_id'), + 'organization': domain_info.get('registrant_organization'), + 'address': domain_info.get('registrant_address'), + 'city': domain_info.get('registrant_city'), + 'state': domain_info.get('registrant_state'), + 'zipcode': domain_info.get('registrant_zip_code'), + 'country': domain_info.get('registrant_country'), + 'phone': domain_info.get('registrant_phone'), + 'fax': domain_info.get('registrant_fax'), + 'email': domain_info.get('registrant_email'), + }, + 'admin': { + 'name': domain_info.get('admin_name'), + 'id': domain_info.get('admin_id'), + 'organization': domain_info.get('admin_organization'), + 'address':domain_info.get('admin_address'), + 'city': domain_info.get('admin_city'), + 'state': domain_info.get('admin_state'), + 'zipcode': domain_info.get('admin_zip_code'), + 'country': domain_info.get('admin_country'), + 'phone': domain_info.get('admin_phone'), + 'fax': domain_info.get('admin_fax'), + 'email': domain_info.get('admin_email'), + }, + 'technical_contact': { + 'name': domain_info.get('tech_name'), + 'id': domain_info.get('tech_id'), + 'organization': domain_info.get('tech_organization'), + 'address': domain_info.get('tech_address'), + 'city': domain_info.get('tech_city'), + 'state': domain_info.get('tech_state'), + 'zipcode': domain_info.get('tech_zip_code'), + 'country': domain_info.get('tech_country'), + 'phone': domain_info.get('tech_phone'), + 'fax': domain_info.get('tech_fax'), + 'email': domain_info.get('tech_email'), + }, + 'nameservers': domain_info.get('ns_records'), + # 'similar_domains': domain_info.get('similar_domains'), + 'related_domains': domain_info.get('related_domains'), + 'related_tlds': domain_info.get('related_tlds'), + 'historical_ips': domain_info.get('historical_ips'), + } @app.task(name='remove_duplicate_endpoints', bind=False, queue='remove_duplicate_endpoints_queue') def remove_duplicate_endpoints( - scan_history_id, - domain_id, - subdomain_id=None, - filter_ids=[], - # TODO Check if the status code could be set as parameters of the scan engine instead of hardcoded values - filter_status=[200, 301, 302, 303, 307, 404, 410], # Extended status codes - duplicate_removal_fields=ENDPOINT_SCAN_DEFAULT_DUPLICATE_FIELDS - ): - """Remove duplicate endpoints. - - Check for implicit redirections by comparing endpoints: - - [x] `content_length` similarities indicating redirections - - [x] `page_title` (check for same page title) - - [ ] Sign-in / login page (check for endpoints with the same words) - - Args: - scan_history_id: ScanHistory id. - domain_id (int): Domain id. - subdomain_id (int, optional): Subdomain id. - filter_ids (list): List of endpoint ids to filter on. - filter_status (list): List of HTTP status codes to filter on. - duplicate_removal_fields (list): List of Endpoint model fields to check for duplicates - """ - logger.info(f'Removing duplicate endpoints based on {duplicate_removal_fields}') - - # Filter endpoints based on scan history and domain - endpoints = ( - EndPoint.objects - .filter(scan_history__id=scan_history_id) - .filter(target_domain__id=domain_id) - ) - if filter_status: - endpoints = endpoints.filter(http_status__in=filter_status) - - if subdomain_id: - endpoints = endpoints.filter(subdomain__id=subdomain_id) - - if filter_ids: - endpoints = endpoints.filter(id__in=filter_ids) - - # Group by all duplicate removal fields combined - fields_combined = duplicate_removal_fields[:] - fields_combined.append('id') # Add ID to ensure unique identification - - cl_query = ( - endpoints - .values(*duplicate_removal_fields) - .annotate(mc=Count('id')) - .order_by('-mc') - ) - - for field_values in cl_query: - if field_values['mc'] > DELETE_DUPLICATES_THRESHOLD: - filter_criteria = {field: field_values[field] for field in duplicate_removal_fields} - eps_to_delete = ( - endpoints - .filter(**filter_criteria) - .order_by('discovered_date') - .all()[1:] - ) - msg = f'Deleting {len(eps_to_delete)} endpoints [reason: same {filter_criteria}]' - for ep in eps_to_delete: - url = urlparse(ep.http_url) - if url.path in ['', '/', '/login']: # Ensure not to delete the original page that other pages redirect to - continue - msg += f'\n\t {ep.http_url} [{ep.http_status}] {filter_criteria}' - ep.delete() - logger.warning(msg) + scan_history_id, + domain_id, + subdomain_id=None, + filter_ids=[], + # TODO Check if the status code could be set as parameters of the scan engine instead of hardcoded values + filter_status=[200, 301, 302, 303, 307, 404, 410], # Extended status codes + duplicate_removal_fields=ENDPOINT_SCAN_DEFAULT_DUPLICATE_FIELDS + ): + """Remove duplicate endpoints. + + Check for implicit redirections by comparing endpoints: + - [x] `content_length` similarities indicating redirections + - [x] `page_title` (check for same page title) + - [ ] Sign-in / login page (check for endpoints with the same words) + + Args: + scan_history_id: ScanHistory id. + domain_id (int): Domain id. + subdomain_id (int, optional): Subdomain id. + filter_ids (list): List of endpoint ids to filter on. + filter_status (list): List of HTTP status codes to filter on. + duplicate_removal_fields (list): List of Endpoint model fields to check for duplicates + """ + logger.info(f'Removing duplicate endpoints based on {duplicate_removal_fields}') + + # Filter endpoints based on scan history and domain + endpoints = ( + EndPoint.objects + .filter(scan_history__id=scan_history_id) + .filter(target_domain__id=domain_id) + ) + if filter_status: + endpoints = endpoints.filter(http_status__in=filter_status) + + if subdomain_id: + endpoints = endpoints.filter(subdomain__id=subdomain_id) + + if filter_ids: + endpoints = endpoints.filter(id__in=filter_ids) + + # Group by all duplicate removal fields combined + fields_combined = duplicate_removal_fields[:] + fields_combined.append('id') # Add ID to ensure unique identification + + cl_query = ( + endpoints + .values(*duplicate_removal_fields) + .annotate(mc=Count('id')) + .order_by('-mc') + ) + + for field_values in cl_query: + if field_values['mc'] > DELETE_DUPLICATES_THRESHOLD: + filter_criteria = {field: field_values[field] for field in duplicate_removal_fields} + eps_to_delete = ( + endpoints + .filter(**filter_criteria) + .order_by('discovered_date') + .all()[1:] + ) + msg = f'Deleting {len(eps_to_delete)} endpoints [reason: same {filter_criteria}]' + for ep in eps_to_delete: + url = urlparse(ep.http_url) + if url.path in ['', '/', '/login']: # Ensure not to delete the original page that other pages redirect to + continue + msg += f'\n\t {ep.http_url} [{ep.http_status}] {filter_criteria}' + ep.delete() + logger.warning(msg) @app.task(name='run_command', bind=False, queue='run_command_queue') def run_command( - cmd, - cwd=None, - shell=False, - history_file=None, - scan_id=None, - activity_id=None, - remove_ansi_sequence=False - ): - """Run a given command using subprocess module. - - Args: - cmd (str): Command to run. - cwd (str): Current working directory. - echo (bool): Log command. - shell (bool): Run within separate shell if True. - history_file (str): Write command + output to history file. - remove_ansi_sequence (bool): Used to remove ANSI escape sequences from output such as color coding - Returns: - tuple: Tuple with return_code, output. - """ - logger.info(cmd) - logger.warning(activity_id) - - # Create a command record in the database - command_obj = Command.objects.create( - command=cmd, - time=timezone.now(), - scan_history_id=scan_id, - activity_id=activity_id) - - # Run the command using subprocess - popen = subprocess.Popen( - cmd if shell else cmd.split(), - shell=shell, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - cwd=cwd, - universal_newlines=True) - output = '' - for stdout_line in iter(popen.stdout.readline, ""): - item = stdout_line.strip() - output += '\n' + item - logger.debug(item) - popen.stdout.close() - popen.wait() - return_code = popen.returncode - command_obj.output = output - command_obj.return_code = return_code - command_obj.save() - if history_file: - mode = 'a' - if not os.path.exists(history_file): - mode = 'w' - with open(history_file, mode) as f: - f.write(f'\n{cmd}\n{return_code}\n{output}\n------------------\n') - if remove_ansi_sequence: - output = remove_ansi_escape_sequences(output) - return return_code, output + cmd, + cwd=None, + shell=False, + history_file=None, + scan_id=None, + activity_id=None, + remove_ansi_sequence=False + ): + """Run a given command using subprocess module. + + Args: + cmd (str): Command to run. + cwd (str): Current working directory. + echo (bool): Log command. + shell (bool): Run within separate shell if True. + history_file (str): Write command + output to history file. + remove_ansi_sequence (bool): Used to remove ANSI escape sequences from output such as color coding + Returns: + tuple: Tuple with return_code, output. + """ + logger.info(cmd) + logger.warning(activity_id) + + # Create a command record in the database + command_obj = Command.objects.create( + command=cmd, + time=timezone.now(), + scan_history_id=scan_id, + activity_id=activity_id) + + # Run the command using subprocess + popen = subprocess.Popen( + cmd if shell else cmd.split(), + shell=shell, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + cwd=cwd, + universal_newlines=True) + output = '' + for stdout_line in iter(popen.stdout.readline, ""): + item = stdout_line.strip() + output += '\n' + item + logger.debug(item) + popen.stdout.close() + popen.wait() + return_code = popen.returncode + command_obj.output = output + command_obj.return_code = return_code + command_obj.save() + if history_file: + mode = 'a' + if not os.path.exists(history_file): + mode = 'w' + with open(history_file, mode) as f: + f.write(f'\n{cmd}\n{return_code}\n{output}\n------------------\n') + if remove_ansi_sequence: + output = remove_ansi_escape_sequences(output) + return return_code, output #-------------# @@ -4222,117 +4223,117 @@ def run_command( #-------------# def stream_command(cmd, cwd=None, shell=False, history_file=None, encoding='utf-8', scan_id=None, activity_id=None, trunc_char=None): - # Log cmd - logger.info(cmd) - # logger.warning(activity_id) - - # Create a command record in the database - command_obj = Command.objects.create( - command=cmd, - time=timezone.now(), - scan_history_id=scan_id, - activity_id=activity_id) - - # Sanitize the cmd - command = cmd if shell else cmd.split() - - # Run the command using subprocess - process = subprocess.Popen( - command, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - universal_newlines=True, - shell=shell) - - # Log the output in real-time to the database - output = "" - - # Process the output - for line in iter(lambda: process.stdout.readline(), b''): - if not line: - break - line = line.strip() - ansi_escape = re.compile(r'\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])') - line = ansi_escape.sub('', line) - line = line.replace('\\x0d\\x0a', '\n') - if trunc_char and line.endswith(trunc_char): - line = line[:-1] - item = line - - # Try to parse the line as JSON - try: - item = json.loads(line) - except json.JSONDecodeError: - pass - - # Yield the line - #logger.debug(item) - yield item - - # Add the log line to the output - output += line + "\n" - - # Update the command record in the database - command_obj.output = output - command_obj.save() - - # Retrieve the return code and output - process.wait() - return_code = process.returncode - - # Update the return code and final output in the database - command_obj.return_code = return_code - command_obj.save() - - # Append the command, return code and output to the history file - if history_file is not None: - with open(history_file, "a") as f: - f.write(f"{cmd}\n{return_code}\n{output}\n") + # Log cmd + logger.info(cmd) + # logger.warning(activity_id) + + # Create a command record in the database + command_obj = Command.objects.create( + command=cmd, + time=timezone.now(), + scan_history_id=scan_id, + activity_id=activity_id) + + # Sanitize the cmd + command = cmd if shell else cmd.split() + + # Run the command using subprocess + process = subprocess.Popen( + command, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + universal_newlines=True, + shell=shell) + + # Log the output in real-time to the database + output = "" + + # Process the output + for line in iter(lambda: process.stdout.readline(), b''): + if not line: + break + line = line.strip() + ansi_escape = re.compile(r'\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])') + line = ansi_escape.sub('', line) + line = line.replace('\\x0d\\x0a', '\n') + if trunc_char and line.endswith(trunc_char): + line = line[:-1] + item = line + + # Try to parse the line as JSON + try: + item = json.loads(line) + except json.JSONDecodeError: + pass + + # Yield the line + #logger.debug(item) + yield item + + # Add the log line to the output + output += line + "\n" + + # Update the command record in the database + command_obj.output = output + command_obj.save() + + # Retrieve the return code and output + process.wait() + return_code = process.returncode + + # Update the return code and final output in the database + command_obj.return_code = return_code + command_obj.save() + + # Append the command, return code and output to the history file + if history_file is not None: + with open(history_file, "a") as f: + f.write(f"{cmd}\n{return_code}\n{output}\n") def process_httpx_response(line): - """TODO: implement this""" + """TODO: implement this""" def extract_httpx_url(line, follow_redirect): - """Extract final URL from httpx results. - - Args: - line (dict): URL data output by httpx. - - Returns: - tuple: (final_url, redirect_bool) tuple. - """ - status_code = line.get('status_code', 0) - final_url = line.get('final_url') - location = line.get('location') - chain_status_codes = line.get('chain_status_codes', []) - http_url = line.get('url') - - # Final URL is already looking nice, if it exists and follow redirect is enabled, return it - if final_url and follow_redirect: - return final_url, False - - # Handle redirects manually if follow redirect is enabled - if follow_redirect: - REDIRECT_STATUS_CODES = [301, 302] - is_redirect = ( - status_code in REDIRECT_STATUS_CODES - or - any(x in REDIRECT_STATUS_CODES for x in chain_status_codes) - ) - if is_redirect and location: - if location.startswith(('http', 'https')): - http_url = location - else: - http_url = f'{http_url}/{location.lstrip("/")}' - else: - is_redirect = False - - # Sanitize URL - http_url = sanitize_url(http_url) - - return http_url, is_redirect + """Extract final URL from httpx results. + + Args: + line (dict): URL data output by httpx. + + Returns: + tuple: (final_url, redirect_bool) tuple. + """ + status_code = line.get('status_code', 0) + final_url = line.get('final_url') + location = line.get('location') + chain_status_codes = line.get('chain_status_codes', []) + http_url = line.get('url') + + # Final URL is already looking nice, if it exists and follow redirect is enabled, return it + if final_url and follow_redirect: + return final_url, False + + # Handle redirects manually if follow redirect is enabled + if follow_redirect: + REDIRECT_STATUS_CODES = [301, 302] + is_redirect = ( + status_code in REDIRECT_STATUS_CODES + or + any(x in REDIRECT_STATUS_CODES for x in chain_status_codes) + ) + if is_redirect and location: + if location.startswith(('http', 'https')): + http_url = location + else: + http_url = f'{http_url}/{location.lstrip("/")}' + else: + is_redirect = False + + # Sanitize URL + http_url = sanitize_url(http_url) + + return http_url, is_redirect #-------------# @@ -4340,158 +4341,158 @@ def extract_httpx_url(line, follow_redirect): #-------------# def get_and_save_dork_results(lookup_target, results_dir, type, lookup_keywords=None, lookup_extensions=None, delay=3, page_count=2, scan_history=None): - """ - Uses gofuzz to dork and store information - - Args: - lookup_target (str): target to look into such as stackoverflow or even the target itself - results_dir (str): Results directory - type (str): Dork Type Title - lookup_keywords (str): comma separated keywords or paths to look for - lookup_extensions (str): comma separated extensions to look for - delay (int): delay between each requests - page_count (int): pages in google to extract information - scan_history (startScan.ScanHistory): Scan History Object - """ - results = [] - gofuzz_command = f'{GOFUZZ_EXEC_PATH} -t {lookup_target} -d {delay} -p {page_count}' - - if lookup_extensions: - gofuzz_command += f' -e {lookup_extensions}' - elif lookup_keywords: - gofuzz_command += f' -w {lookup_keywords}' - - output_file = str(Path(results_dir) / 'gofuzz.txt') - gofuzz_command += f' -o {output_file}' - history_file = str(Path(results_dir) / 'commands.txt') - - try: - run_command( - gofuzz_command, - shell=False, - history_file=history_file, - scan_id=scan_history.id, - ) - - if not os.path.isfile(output_file): - return - - with open(output_file) as f: - for line in f.readlines(): - url = line.strip() - if url: - results.append(url) - dork, created = Dork.objects.get_or_create( - type=type, - url=url - ) - if scan_history: - scan_history.dorks.add(dork) - - # remove output file - os.remove(output_file) - - except Exception as e: - logger.exception(e) - - return results + """ + Uses gofuzz to dork and store information + + Args: + lookup_target (str): target to look into such as stackoverflow or even the target itself + results_dir (str): Results directory + type (str): Dork Type Title + lookup_keywords (str): comma separated keywords or paths to look for + lookup_extensions (str): comma separated extensions to look for + delay (int): delay between each requests + page_count (int): pages in google to extract information + scan_history (startScan.ScanHistory): Scan History Object + """ + results = [] + gofuzz_command = f'{GOFUZZ_EXEC_PATH} -t {lookup_target} -d {delay} -p {page_count}' + + if lookup_extensions: + gofuzz_command += f' -e {lookup_extensions}' + elif lookup_keywords: + gofuzz_command += f' -w {lookup_keywords}' + + output_file = str(Path(results_dir) / 'gofuzz.txt') + gofuzz_command += f' -o {output_file}' + history_file = str(Path(results_dir) / 'commands.txt') + + try: + run_command( + gofuzz_command, + shell=False, + history_file=history_file, + scan_id=scan_history.id, + ) + + if not os.path.isfile(output_file): + return + + with open(output_file) as f: + for line in f.readlines(): + url = line.strip() + if url: + results.append(url) + dork, created = Dork.objects.get_or_create( + type=type, + url=url + ) + if scan_history: + scan_history.dorks.add(dork) + + # remove output file + os.remove(output_file) + + except Exception as e: + logger.exception(e) + + return results def get_and_save_emails(scan_history, activity_id, results_dir): - """Get and save emails from Google, Bing and Baidu. - - Args: - scan_history (startScan.ScanHistory): Scan history object. - activity_id: ScanActivity Object - results_dir (str): Results directory. - - Returns: - list: List of emails found. - """ - emails = [] - - # Proxy settings - # get_random_proxy() - - # Gather emails from Google, Bing and Baidu - output_file = str(Path(results_dir) / 'emails_tmp.txt') - history_file = str(Path(results_dir) / 'commands.txt') - command = f'infoga --domain {scan_history.domain.name} --source all --report {output_file}' - try: - run_command( - command, - shell=False, - history_file=history_file, - scan_id=scan_history.id, - activity_id=activity_id) - - if not os.path.isfile(output_file): - logger.info('No Email results') - return [] - - with open(output_file) as f: - for line in f.readlines(): - if 'Email' in line: - split_email = line.split(' ')[2] - emails.append(split_email) - - output_path = str(Path(results_dir) / 'emails.txt') - with open(output_path, 'w') as output_file: - for email_address in emails: - save_email(email_address, scan_history) - output_file.write(f'{email_address}\n') - - except Exception as e: - logger.exception(e) - return emails + """Get and save emails from Google, Bing and Baidu. + + Args: + scan_history (startScan.ScanHistory): Scan history object. + activity_id: ScanActivity Object + results_dir (str): Results directory. + + Returns: + list: List of emails found. + """ + emails = [] + + # Proxy settings + # get_random_proxy() + + # Gather emails from Google, Bing and Baidu + output_file = str(Path(results_dir) / 'emails_tmp.txt') + history_file = str(Path(results_dir) / 'commands.txt') + command = f'infoga --domain {scan_history.domain.name} --source all --report {output_file}' + try: + run_command( + command, + shell=False, + history_file=history_file, + scan_id=scan_history.id, + activity_id=activity_id) + + if not os.path.isfile(output_file): + logger.info('No Email results') + return [] + + with open(output_file) as f: + for line in f.readlines(): + if 'Email' in line: + split_email = line.split(' ')[2] + emails.append(split_email) + + output_path = str(Path(results_dir) / 'emails.txt') + with open(output_path, 'w') as output_file: + for email_address in emails: + save_email(email_address, scan_history) + output_file.write(f'{email_address}\n') + + except Exception as e: + logger.exception(e) + return emails def save_metadata_info(meta_dict): - """Extract metadata from Google Search. - - Args: - meta_dict (dict): Info dict. - - Returns: - list: List of startScan.MetaFinderDocument objects. - """ - logger.warning(f'Getting metadata for {meta_dict.osint_target}') - - scan_history = ScanHistory.objects.get(id=meta_dict.scan_id) - - # Proxy settings - get_random_proxy() - - # Get metadata - result = extract_metadata_from_google_search(meta_dict.osint_target, meta_dict.documents_limit) - if not result: - logger.error(f'No metadata result from Google Search for {meta_dict.osint_target}.') - return [] - - # Add metadata info to DB - results = [] - for metadata_name, data in result.get_metadata().items(): - subdomain = Subdomain.objects.get( - scan_history=meta_dict.scan_id, - name=meta_dict.osint_target) - metadata = DottedDict({k: v for k, v in data.items()}) - meta_finder_document = MetaFinderDocument( - subdomain=subdomain, - target_domain=meta_dict.domain, - scan_history=scan_history, - url=metadata.url, - doc_name=metadata_name, - http_status=metadata.status_code, - producer=metadata.metadata.get('Producer'), - creator=metadata.metadata.get('Creator'), - creation_date=metadata.metadata.get('CreationDate'), - modified_date=metadata.metadata.get('ModDate'), - author=metadata.metadata.get('Author'), - title=metadata.metadata.get('Title'), - os=metadata.metadata.get('OSInfo')) - meta_finder_document.save() - results.append(data) - return results + """Extract metadata from Google Search. + + Args: + meta_dict (dict): Info dict. + + Returns: + list: List of startScan.MetaFinderDocument objects. + """ + logger.warning(f'Getting metadata for {meta_dict.osint_target}') + + scan_history = ScanHistory.objects.get(id=meta_dict.scan_id) + + # Proxy settings + get_random_proxy() + + # Get metadata + result = extract_metadata_from_google_search(meta_dict.osint_target, meta_dict.documents_limit) + if not result: + logger.error(f'No metadata result from Google Search for {meta_dict.osint_target}.') + return [] + + # Add metadata info to DB + results = [] + for metadata_name, data in result.get_metadata().items(): + subdomain = Subdomain.objects.get( + scan_history=meta_dict.scan_id, + name=meta_dict.osint_target) + metadata = DottedDict({k: v for k, v in data.items()}) + meta_finder_document = MetaFinderDocument( + subdomain=subdomain, + target_domain=meta_dict.domain, + scan_history=scan_history, + url=metadata.url, + doc_name=metadata_name, + http_status=metadata.status_code, + producer=metadata.metadata.get('Producer'), + creator=metadata.metadata.get('Creator'), + creation_date=metadata.metadata.get('CreationDate'), + modified_date=metadata.metadata.get('ModDate'), + author=metadata.metadata.get('Author'), + title=metadata.metadata.get('Title'), + os=metadata.metadata.get('OSInfo')) + meta_finder_document.save() + results.append(data) + return results #-----------------# @@ -4499,13 +4500,13 @@ def save_metadata_info(meta_dict): #-----------------# def create_scan_activity(scan_history_id, message, status): - scan_activity = ScanActivity() - scan_activity.scan_of = ScanHistory.objects.get(pk=scan_history_id) - scan_activity.title = message - scan_activity.time = timezone.now() - scan_activity.status = status - scan_activity.save() - return scan_activity.id + scan_activity = ScanActivity() + scan_activity.scan_of = ScanHistory.objects.get(pk=scan_history_id) + scan_activity.title = message + scan_activity.time = timezone.now() + scan_activity.status = status + scan_activity.save() + return scan_activity.id #--------------------# @@ -4514,422 +4515,527 @@ def create_scan_activity(scan_history_id, message, status): def save_vulnerability(**vuln_data): - references = vuln_data.pop('references', []) - cve_ids = vuln_data.pop('cve_ids', []) - cwe_ids = vuln_data.pop('cwe_ids', []) - tags = vuln_data.pop('tags', []) - subscan = vuln_data.pop('subscan', None) - - # remove nulls - vuln_data = replace_nulls(vuln_data) - - # Create vulnerability - vuln, created = Vulnerability.objects.get_or_create(**vuln_data) - if created: - vuln.discovered_date = timezone.now() - vuln.open_status = True - vuln.save() - - # Save vuln tags - for tag_name in tags or []: - tag, created = VulnerabilityTags.objects.get_or_create(name=tag_name) - if tag: - vuln.tags.add(tag) - vuln.save() - - # Save CVEs - for cve_id in cve_ids or []: - cve, created = CveId.objects.get_or_create(name=cve_id) - if cve: - vuln.cve_ids.add(cve) - vuln.save() - - # Save CWEs - for cve_id in cwe_ids or []: - cwe, created = CweId.objects.get_or_create(name=cve_id) - if cwe: - vuln.cwe_ids.add(cwe) - vuln.save() - - # Save vuln reference - for url in references or []: - ref, created = VulnerabilityReference.objects.get_or_create(url=url) - if created: - vuln.references.add(ref) - vuln.save() - - # Save subscan id in vuln object - if subscan: - vuln.vuln_subscan_ids.add(subscan) - vuln.save() - - return vuln, created + references = vuln_data.pop('references', []) + cve_ids = vuln_data.pop('cve_ids', []) + cwe_ids = vuln_data.pop('cwe_ids', []) + tags = vuln_data.pop('tags', []) + subscan = vuln_data.pop('subscan', None) + + # remove nulls + vuln_data = replace_nulls(vuln_data) + + # Create vulnerability + vuln, created = Vulnerability.objects.get_or_create(**vuln_data) + if created: + vuln.discovered_date = timezone.now() + vuln.open_status = True + vuln.save() + + # Save vuln tags + for tag_name in tags or []: + tag, created = VulnerabilityTags.objects.get_or_create(name=tag_name) + if tag: + vuln.tags.add(tag) + vuln.save() + + # Save CVEs + for cve_id in cve_ids or []: + cve, created = CveId.objects.get_or_create(name=cve_id) + if cve: + vuln.cve_ids.add(cve) + vuln.save() + + # Save CWEs + for cve_id in cwe_ids or []: + cwe, created = CweId.objects.get_or_create(name=cve_id) + if cwe: + vuln.cwe_ids.add(cwe) + vuln.save() + + # Save vuln reference + for url in references or []: + ref, created = VulnerabilityReference.objects.get_or_create(url=url) + if created: + vuln.references.add(ref) + vuln.save() + + # Save subscan id in vuln object + if subscan: + vuln.vuln_subscan_ids.add(subscan) + vuln.save() + + return vuln, created def save_endpoint( - http_url, - ctx={}, - crawl=False, - is_default=False, - **endpoint_data): - """Get or create EndPoint object. If crawl is True, also crawl the endpoint - HTTP URL with httpx. - - Args: - http_url (str): Input HTTP URL. - is_default (bool): If the url is a default url for SubDomains. - scan_history (startScan.models.ScanHistory): ScanHistory object. - domain (startScan.models.Domain): Domain object. - subdomain (starScan.models.Subdomain): Subdomain object. - results_dir (str, optional): Results directory. - crawl (bool, optional): Run httpx on endpoint if True. Default: False. - force (bool, optional): Force crawl even if ENABLE_HTTP_CRAWL mode is on. - subscan (startScan.models.SubScan, optional): SubScan object. - - Returns: - tuple: (startScan.models.EndPoint, created) where `created` is a boolean - indicating if the object is new or already existed. - """ - # remove nulls - endpoint_data = replace_nulls(endpoint_data) - - scheme = urlparse(http_url).scheme - endpoint = None - created = False - if ctx.get('domain_id'): - domain = Domain.objects.get(id=ctx.get('domain_id')) - if domain.name not in http_url: - logger.error(f"{http_url} is not a URL of domain {domain.name}. Skipping.") - return None, False - if crawl: - ctx['track'] = False - results = http_crawl( - urls=[http_url], - ctx=ctx) - if results: - endpoint_data = results[0] - endpoint_id = endpoint_data['endpoint_id'] - created = endpoint_data['endpoint_created'] - endpoint = EndPoint.objects.get(pk=endpoint_id) - elif not scheme: - return None, False - else: # add dumb endpoint without probing it - scan = ScanHistory.objects.filter(pk=ctx.get('scan_history_id')).first() - domain = Domain.objects.filter(pk=ctx.get('domain_id')).first() - if not validators.url(http_url): - return None, False - http_url = sanitize_url(http_url) - - # Try to get the first matching record (prevent duplicate error) - endpoints = EndPoint.objects.filter( - scan_history=scan, - target_domain=domain, - http_url=http_url, - **endpoint_data - ) - - if endpoints.exists(): - endpoint = endpoints.first() - created = False - else: - # No existing record, create a new one - endpoint = EndPoint.objects.create( - scan_history=scan, - target_domain=domain, - http_url=http_url, - **endpoint_data - ) - created = True - - if created: - endpoint.is_default = is_default - endpoint.discovered_date = timezone.now() - endpoint.save() - subscan_id = ctx.get('subscan_id') - if subscan_id: - endpoint.endpoint_subscan_ids.add(subscan_id) - endpoint.save() - - return endpoint, created + http_url, + ctx={}, + crawl=False, + is_default=False, + **endpoint_data): + """Get or create EndPoint object. If crawl is True, also crawl the endpoint + HTTP URL with httpx. + + Args: + http_url (str): Input HTTP URL. + is_default (bool): If the url is a default url for SubDomains. + scan_history (startScan.models.ScanHistory): ScanHistory object. + domain (startScan.models.Domain): Domain object. + subdomain (starScan.models.Subdomain): Subdomain object. + results_dir (str, optional): Results directory. + crawl (bool, optional): Run httpx on endpoint if True. Default: False. + force (bool, optional): Force crawl even if ENABLE_HTTP_CRAWL mode is on. + subscan (startScan.models.SubScan, optional): SubScan object. + + Returns: + tuple: (startScan.models.EndPoint, created) where `created` is a boolean + indicating if the object is new or already existed. + """ + # remove nulls + endpoint_data = replace_nulls(endpoint_data) + + scheme = urlparse(http_url).scheme + endpoint = None + created = False + if ctx.get('domain_id'): + domain = Domain.objects.get(id=ctx.get('domain_id')) + if domain.name not in http_url: + logger.error(f"{http_url} is not a URL of domain {domain.name}. Skipping.") + return None, False + if crawl: + ctx['track'] = False + results = http_crawl( + urls=[http_url], + ctx=ctx) + if results: + endpoint_data = results[0] + endpoint_id = endpoint_data['endpoint_id'] + created = endpoint_data['endpoint_created'] + endpoint = EndPoint.objects.get(pk=endpoint_id) + elif not scheme: + return None, False + else: # add dumb endpoint without probing it + scan = ScanHistory.objects.filter(pk=ctx.get('scan_history_id')).first() + domain = Domain.objects.filter(pk=ctx.get('domain_id')).first() + if not validators.url(http_url): + return None, False + http_url = sanitize_url(http_url) + + # Try to get the first matching record (prevent duplicate error) + endpoints = EndPoint.objects.filter( + scan_history=scan, + target_domain=domain, + http_url=http_url, + **endpoint_data + ) + + if endpoints.exists(): + endpoint = endpoints.first() + created = False + else: + # No existing record, create a new one + endpoint = EndPoint.objects.create( + scan_history=scan, + target_domain=domain, + http_url=http_url, + **endpoint_data + ) + created = True + + if created: + endpoint.is_default = is_default + endpoint.discovered_date = timezone.now() + endpoint.save() + subscan_id = ctx.get('subscan_id') + if subscan_id: + endpoint.endpoint_subscan_ids.add(subscan_id) + endpoint.save() + + return endpoint, created def save_subdomain(subdomain_name, ctx={}): - """Get or create Subdomain object. - - Args: - subdomain_name (str): Subdomain name. - scan_history (startScan.models.ScanHistory): ScanHistory object. - - Returns: - tuple: (startScan.models.Subdomain, created) where `created` is a - boolean indicating if the object has been created in DB. - """ - scan_id = ctx.get('scan_history_id') - subscan_id = ctx.get('subscan_id') - out_of_scope_subdomains = ctx.get('out_of_scope_subdomains', []) - subdomain_name = subdomain_name.lower() - valid_domain = ( - validators.domain(subdomain_name) or - validators.ipv4(subdomain_name) or - validators.ipv6(subdomain_name) - ) - if not valid_domain: - logger.error(f'{subdomain_name} is not a valid domain. Skipping.') - return None, False - - if subdomain_name in out_of_scope_subdomains: - logger.error(f'{subdomain_name} is out-of-scope. Skipping.') - return None, False - - if ctx.get('domain_id'): - domain = Domain.objects.get(id=ctx.get('domain_id')) - if domain.name not in subdomain_name: - logger.error(f"{subdomain_name} is not a subdomain of domain {domain.name}. Skipping.") - return None, False - - scan = ScanHistory.objects.filter(pk=scan_id).first() - domain = scan.domain if scan else None - subdomain, created = Subdomain.objects.get_or_create( - scan_history=scan, - target_domain=domain, - name=subdomain_name) - if created: - logger.info(f'Found new subdomain {subdomain_name}') - subdomain.discovered_date = timezone.now() - if subscan_id: - subdomain.subdomain_subscan_ids.add(subscan_id) - subdomain.save() - return subdomain, created + """Get or create Subdomain object. + + Args: + subdomain_name (str): Subdomain name. + scan_history (startScan.models.ScanHistory): ScanHistory object. + + Returns: + tuple: (startScan.models.Subdomain, created) where `created` is a + boolean indicating if the object has been created in DB. + """ + scan_id = ctx.get('scan_history_id') + subscan_id = ctx.get('subscan_id') + out_of_scope_subdomains = ctx.get('out_of_scope_subdomains', []) + subdomain_name = subdomain_name.lower() + valid_domain = ( + validators.domain(subdomain_name) or + validators.ipv4(subdomain_name) or + validators.ipv6(subdomain_name) + ) + if not valid_domain: + logger.error(f'{subdomain_name} is not a valid domain. Skipping.') + return None, False + + if subdomain_name in out_of_scope_subdomains: + logger.error(f'{subdomain_name} is out-of-scope. Skipping.') + return None, False + + if ctx.get('domain_id'): + domain = Domain.objects.get(id=ctx.get('domain_id')) + if domain.name not in subdomain_name: + logger.error(f"{subdomain_name} is not a subdomain of domain {domain.name}. Skipping.") + return None, False + + scan = ScanHistory.objects.filter(pk=scan_id).first() + domain = scan.domain if scan else None + subdomain, created = Subdomain.objects.get_or_create( + scan_history=scan, + target_domain=domain, + name=subdomain_name) + if created: + logger.info(f'Found new subdomain {subdomain_name}') + subdomain.discovered_date = timezone.now() + if subscan_id: + subdomain.subdomain_subscan_ids.add(subscan_id) + subdomain.save() + return subdomain, created def save_subdomain_metadata(subdomain, endpoint, extra_datas={}): - if endpoint and endpoint.is_alive: - logger.info(f'Saving HTTP metadatas from {endpoint.http_url}') - subdomain.http_url = endpoint.http_url - subdomain.http_status = endpoint.http_status - subdomain.response_time = endpoint.response_time - subdomain.page_title = endpoint.page_title - subdomain.content_type = endpoint.content_type - subdomain.content_length = endpoint.content_length - subdomain.webserver = endpoint.webserver - cname = extra_datas.get('cname') - if cname and is_iterable(cname): - subdomain.cname = ','.join(cname) - cdn = extra_datas.get('cdn') - if cdn and is_iterable(cdn): - subdomain.is_cdn = ','.join(cdn) - subdomain.cdn_name = extra_datas.get('cdn_name') - for tech in endpoint.techs.all(): - subdomain.technologies.add(tech) - subdomain.save() + if endpoint and endpoint.is_alive: + logger.info(f'Saving HTTP metadatas from {endpoint.http_url}') + subdomain.http_url = endpoint.http_url + subdomain.http_status = endpoint.http_status + subdomain.response_time = endpoint.response_time + subdomain.page_title = endpoint.page_title + subdomain.content_type = endpoint.content_type + subdomain.content_length = endpoint.content_length + subdomain.webserver = endpoint.webserver + cname = extra_datas.get('cname') + if cname and is_iterable(cname): + subdomain.cname = ','.join(cname) + cdn = extra_datas.get('cdn') + if cdn and is_iterable(cdn): + subdomain.is_cdn = ','.join(cdn) + subdomain.cdn_name = extra_datas.get('cdn_name') + for tech in endpoint.techs.all(): + subdomain.technologies.add(tech) + subdomain.save() def save_email(email_address, scan_history=None): - if not validators.email(email_address): - logger.info(f'Email {email_address} is invalid. Skipping.') - return None, False - email, created = Email.objects.get_or_create(address=email_address) - if created: - logger.info(f'Found new email address {email_address}') + if not validators.email(email_address): + logger.info(f'Email {email_address} is invalid. Skipping.') + return None, False + email, created = Email.objects.get_or_create(address=email_address) + if created: + logger.info(f'Found new email address {email_address}') - # Add email to ScanHistory - if scan_history: - scan_history.emails.add(email) - scan_history.save() + # Add email to ScanHistory + if scan_history: + scan_history.emails.add(email) + scan_history.save() - return email, created + return email, created def save_employee(name, designation, scan_history=None): - employee, created = Employee.objects.get_or_create( - name=name, - designation=designation) - if created: - logger.warning(f'Found new employee {name}') + employee, created = Employee.objects.get_or_create( + name=name, + designation=designation) + if created: + logger.warning(f'Found new employee {name}') - # Add employee to ScanHistory - if scan_history: - scan_history.employees.add(employee) - scan_history.save() + # Add employee to ScanHistory + if scan_history: + scan_history.employees.add(employee) + scan_history.save() - return employee, created + return employee, created def save_ip_address(ip_address, subdomain=None, subscan=None, **kwargs): - if not (validators.ipv4(ip_address) or validators.ipv6(ip_address)): - logger.info(f'IP {ip_address} is not a valid IP. Skipping.') - return None, False - ip, created = IpAddress.objects.get_or_create(address=ip_address) - if created: - logger.warning(f'Found new IP {ip_address}') + if not (validators.ipv4(ip_address) or validators.ipv6(ip_address)): + logger.info(f'IP {ip_address} is not a valid IP. Skipping.') + return None, False + ip, created = IpAddress.objects.get_or_create(address=ip_address) + if created: + logger.warning(f'Found new IP {ip_address}') - # Set extra attributes - for key, value in kwargs.items(): - setattr(ip, key, value) - ip.save() + # Set extra attributes + for key, value in kwargs.items(): + setattr(ip, key, value) + ip.save() - # Add IP to subdomain - if subdomain: - subdomain.ip_addresses.add(ip) - subdomain.save() + # Add IP to subdomain + if subdomain: + subdomain.ip_addresses.add(ip) + subdomain.save() - # Add subscan to IP - if subscan: - ip.ip_subscan_ids.add(subscan) + # Add subscan to IP + if subscan: + ip.ip_subscan_ids.add(subscan) - # Geo-localize IP asynchronously - if created: - geo_localize.delay(ip_address, ip.id) + # Geo-localize IP asynchronously + if created: + geo_localize.delay(ip_address, ip.id) - return ip, created + return ip, created def save_imported_subdomains(subdomains, ctx={}): - """Take a list of subdomains imported and write them to from_imported.txt. - - Args: - subdomains (list): List of subdomain names. - scan_history (startScan.models.ScanHistory): ScanHistory instance. - domain (startScan.models.Domain): Domain instance. - results_dir (str): Results directory. - """ - domain_id = ctx['domain_id'] - domain = Domain.objects.get(pk=domain_id) - results_dir = ctx.get('results_dir', RENGINE_RESULTS) - - # Validate each subdomain and de-duplicate entries - subdomains = list(set([ - subdomain for subdomain in subdomains - if domain.name == get_domain_from_subdomain(subdomain) - ])) - if not subdomains: - return - - logger.warning(f'Found {len(subdomains)} imported subdomains.') - with open(f'{results_dir}/from_imported.txt', 'w+') as output_file: - url_filter = ctx.get('url_filter') - enable_http_crawl = ctx.get('yaml_configuration').get(ENABLE_HTTP_CRAWL, DEFAULT_ENABLE_HTTP_CRAWL) - for subdomain in subdomains: - # Save valid imported subdomains - subdomain_name = subdomain.strip() - subdomain, _ = save_subdomain(subdomain_name, ctx=ctx) - if not isinstance(subdomain, Subdomain): - logger.error(f"Invalid subdomain encountered: {subdomain}") - continue - subdomain.is_imported_subdomain = True - subdomain.save() - output_file.write(f'{subdomain}\n') - - # Create base endpoint (for scan) - http_url = f'{subdomain.name}{url_filter}' if url_filter else subdomain.name - endpoint, _ = save_endpoint( - http_url, - ctx=ctx, - crawl=enable_http_crawl, - is_default=True, - subdomain=subdomain - ) - save_subdomain_metadata(subdomain, endpoint) + """Take a list of subdomains imported and write them to from_imported.txt. + + Args: + subdomains (list): List of subdomain names. + scan_history (startScan.models.ScanHistory): ScanHistory instance. + domain (startScan.models.Domain): Domain instance. + results_dir (str): Results directory. + """ + domain_id = ctx['domain_id'] + domain = Domain.objects.get(pk=domain_id) + results_dir = ctx.get('results_dir', RENGINE_RESULTS) + + # Validate each subdomain and de-duplicate entries + subdomains = list(set([ + subdomain for subdomain in subdomains + if domain.name == get_domain_from_subdomain(subdomain) + ])) + if not subdomains: + return + + logger.warning(f'Found {len(subdomains)} imported subdomains.') + with open(f'{results_dir}/from_imported.txt', 'w+') as output_file: + url_filter = ctx.get('url_filter') + enable_http_crawl = ctx.get('yaml_configuration').get(ENABLE_HTTP_CRAWL, DEFAULT_ENABLE_HTTP_CRAWL) + for subdomain in subdomains: + # Save valid imported subdomains + subdomain_name = subdomain.strip() + subdomain, _ = save_subdomain(subdomain_name, ctx=ctx) + if not isinstance(subdomain, Subdomain): + logger.error(f"Invalid subdomain encountered: {subdomain}") + continue + subdomain.is_imported_subdomain = True + subdomain.save() + output_file.write(f'{subdomain}\n') + + # Create base endpoint (for scan) + http_url = f'{subdomain.name}{url_filter}' if url_filter else subdomain.name + endpoint, _ = save_endpoint( + http_url, + ctx=ctx, + crawl=enable_http_crawl, + is_default=True, + subdomain=subdomain + ) + save_subdomain_metadata(subdomain, endpoint) @app.task(name='query_reverse_whois', bind=False, queue='query_reverse_whois_queue') def query_reverse_whois(lookup_keyword): - """Queries Reverse WHOIS information for an organization or email address. + """Queries Reverse WHOIS information for an organization or email address. - Args: - lookup_keyword (str): Registrar Name or email - Returns: - dict: Reverse WHOIS information. - """ + Args: + lookup_keyword (str): Registrar Name or email + Returns: + dict: Reverse WHOIS information. + """ - return get_associated_domains(lookup_keyword) + return get_associated_domains(lookup_keyword) @app.task(name='query_ip_history', bind=False, queue='query_ip_history_queue') def query_ip_history(domain): - """Queries the IP history for a domain + """Queries the IP history for a domain - Args: - domain (str): domain_name - Returns: - list: list of historical ip addresses - """ + Args: + domain (str): domain_name + Returns: + list: list of historical ip addresses + """ - return get_domain_historical_ip_address(domain) + return get_domain_historical_ip_address(domain) @app.task(name='gpt_vulnerability_description', bind=False, queue='gpt_queue') def gpt_vulnerability_description(vulnerability_id): - """Generate and store Vulnerability Description using GPT. - - Args: - vulnerability_id (Vulnerability Model ID): Vulnerability ID to fetch Description. - """ - logger.info('Getting GPT Vulnerability Description') - try: - lookup_vulnerability = Vulnerability.objects.get(id=vulnerability_id) - lookup_url = urlparse(lookup_vulnerability.http_url) - path = lookup_url.path - except Exception as e: - return { - 'status': False, - 'error': str(e) - } - - # check in db GPTVulnerabilityReport model if vulnerability description and path matches - stored = GPTVulnerabilityReport.objects.filter(url_path=path).filter(title=lookup_vulnerability.name).first() - if stored: - response = { - 'status': True, - 'description': stored.description, - 'impact': stored.impact, - 'remediation': stored.remediation, - 'references': [url.url for url in stored.references.all()] - } - else: - vulnerability_description = get_gpt_vuln_input_description( - lookup_vulnerability.name, - path - ) - # one can add more description here later - - gpt_generator = GPTVulnerabilityReportGenerator() - response = gpt_generator.get_vulnerability_description(vulnerability_description) - add_gpt_description_db( - lookup_vulnerability.name, - path, - response.get('description'), - response.get('impact'), - response.get('remediation'), - response.get('references', []) - ) - - # for all vulnerabilities with the same vulnerability name this description has to be stored. - # also the consition is that the url must contain a part of this. - - for vuln in Vulnerability.objects.filter(name=lookup_vulnerability.name, http_url__icontains=path): - vuln.description = response.get('description', vuln.description) - vuln.impact = response.get('impact') - vuln.remediation = response.get('remediation') - vuln.is_gpt_used = True - vuln.save() - - for url in response.get('references', []): - ref, created = VulnerabilityReference.objects.get_or_create(url=url) - vuln.references.add(ref) - vuln.save() - - return response + """Generate and store Vulnerability Description using GPT. + + Args: + vulnerability_id (Vulnerability Model ID): Vulnerability ID to fetch Description. + """ + logger.info('Getting GPT Vulnerability Description') + try: + lookup_vulnerability = Vulnerability.objects.get(id=vulnerability_id) + lookup_url = urlparse(lookup_vulnerability.http_url) + path = lookup_url.path + except Exception as e: + return { + 'status': False, + 'error': str(e) + } + + # check in db GPTVulnerabilityReport model if vulnerability description and path matches + stored = GPTVulnerabilityReport.objects.filter(url_path=path).filter(title=lookup_vulnerability.name).first() + if stored: + response = { + 'status': True, + 'description': stored.description, + 'impact': stored.impact, + 'remediation': stored.remediation, + 'references': [url.url for url in stored.references.all()] + } + else: + vulnerability_description = get_gpt_vuln_input_description( + lookup_vulnerability.name, + path + ) + # one can add more description here later + + gpt_generator = GPTVulnerabilityReportGenerator() + response = gpt_generator.get_vulnerability_description(vulnerability_description) + add_gpt_description_db( + lookup_vulnerability.name, + path, + response.get('description'), + response.get('impact'), + response.get('remediation'), + response.get('references', []) + ) + + # for all vulnerabilities with the same vulnerability name this description has to be stored. + # also the consition is that the url must contain a part of this. + + for vuln in Vulnerability.objects.filter(name=lookup_vulnerability.name, http_url__icontains=path): + vuln.description = response.get('description', vuln.description) + vuln.impact = response.get('impact') + vuln.remediation = response.get('remediation') + vuln.is_gpt_used = True + vuln.save() + + for url in response.get('references', []): + ref, created = VulnerabilityReference.objects.get_or_create(url=url) + vuln.references.add(ref) + vuln.save() + + return response + + +@app.task(name='run_wafw00f', bind=False, queue='run_command_queue') +def run_wafw00f(url): + try: + logger.info(f"Starting WAF detection for URL: {url}") + wafw00f_command = f'wafw00f {url}' + return_code, output = run_command( + cmd=wafw00f_command, + shell=True, + remove_ansi_sequence=True + ) + + logger.info(f"Raw output from wafw00f: {output}") + + # Use regex to extract the WAF name + regex = r"behind (.+)" + match = re.search(regex, output) + + if match: + result = match.group(1) + logger.info(f"WAF detected: {result}") + return result + else: + logger.info("No WAF detected") + return "No WAF detected" + except Exception as e: + logger.error(f"Unexpected error: {e}") + return f"Unexpected error: {str(e)}" + +@app.task(name='run_cmseek', queue='run_command_queue') +def run_cmseek(url): + try: + # Prepare CMSeeK command + cms_detector_command = f'cmseek --random-agent --batch --follow-redirect -u {url}' + + # Run CMSeeK + _, output = run_command(cms_detector_command, remove_ansi_sequence=True) + + # Parse CMSeeK output + base_path = RENGINE_TOOL_PATH + "/.github/CMSeeK/Result" + domain_name = urlparse(url).netloc + json_path = os.path.join(base_path, domain_name, "cms.json") + + if os.path.isfile(json_path): + with open(json_path, 'r') as f: + cms_data = json.load(f) + + if cms_data.get('cms_name'): + # CMS detected + result = {'status': True} + result.update(cms_data) + + # Clean up CMSeeK results + try: + shutil.rmtree(os.path.dirname(json_path)) + except Exception as e: + logger.error(f"Error cleaning up CMSeeK results: {e}") + + return result + + # CMS not detected + return {'status': False, 'message': 'Could not detect CMS!'} + + except Exception as e: + logger.error(f"Error running CMSeeK: {e}") + return {'status': False, 'message': str(e)} + +@app.task(name='run_gf_list', queue='run_command_queue') +def run_gf_list(): + try: + # Prepare GF list command + gf_command = 'gf -list' + + # Run GF list command + return_code, output = run_command( + cmd=gf_command, + shell=True, + remove_ansi_sequence=True + ) + + # Log the raw output + logger.info(f"Raw output from GF list: {output}") + + # Check if the command was successful + if return_code == 0: + # Split the output into a list of patterns + patterns = [pattern.strip() for pattern in output.split('\n') if pattern.strip()] + return { + 'status': True, + 'output': patterns + } + else: + logger.error(f"GF list command failed with return code: {return_code}") + return { + 'status': False, + 'message': f"GF list command failed with return code: {return_code}" + } + + except Exception as e: + logger.error(f"Error running GF list: {e}") + return { + 'status': False, + 'message': str(e) + } #----------------------# # Remote debug # #----------------------# def debug(): - try: - # Activate remote debug for scan worker - if CELERY_REMOTE_DEBUG: - logger.info(f"\n⚡ Debugger started on port "+ str(CELERY_REMOTE_DEBUG_PORT) +", task is waiting IDE (VSCode ...) to be attached to continue ⚡\n") - os.environ['GEVENT_SUPPORT'] = 'True' - debugpy.listen(('0.0.0.0',CELERY_REMOTE_DEBUG_PORT)) - debugpy.wait_for_client() - except Exception as e: - logger.error(e) + try: + # Activate remote debug for scan worker + if CELERY_REMOTE_DEBUG: + logger.info(f"\n⚡ Debugger started on port "+ str(CELERY_REMOTE_DEBUG_PORT) +", task is waiting IDE (VSCode ...) to be attached to continue ⚡\n") + os.environ['GEVENT_SUPPORT'] = 'True' + debugpy.listen(('0.0.0.0',CELERY_REMOTE_DEBUG_PORT)) + debugpy.wait_for_client() + except Exception as e: + logger.error(e) diff --git a/web/scanEngine/static/scanEngine/js/custom_tools.js b/web/scanEngine/static/scanEngine/js/custom_tools.js index 68af0ab5..537ebfc3 100644 --- a/web/scanEngine/static/scanEngine/js/custom_tools.js +++ b/web/scanEngine/static/scanEngine/js/custom_tools.js @@ -146,3 +146,19 @@ $("#theharvester_config_text_area").dblclick(function() { $("#theharvester-config-form").append(''); } }); + +$.getJSON(`/api/getFileContents?gau_config&format=json`, function(data) { + $("#gau_config_text_area").attr("rows", 14); + $("textarea#gau_config_text_area").html(htmlEncode(data['content'])); +}).fail(function(){ + $("#gau_config_text_area").removeAttr("readonly"); + $("textarea#gau_config_text_area").html(`# Your GAU configuration here.`); + $("#gau-config-form").append(''); +}); + +$("#gau_config_text_area").dblclick(function() { + if (!document.getElementById('gau-config-submit')) { + $("#gau_config_text_area").removeAttr("readonly"); + $("#gau-config-form").append(''); + } +}); diff --git a/web/scanEngine/templates/scanEngine/settings/tool.html b/web/scanEngine/templates/scanEngine/settings/tool.html index 57dfc2fe..53f56267 100644 --- a/web/scanEngine/templates/scanEngine/settings/tool.html +++ b/web/scanEngine/templates/scanEngine/settings/tool.html @@ -163,6 +163,22 @@

Current theHarvester Configuration

+
+
+

GAU

+

+ This section lets you modify GAU config file. You can find more information about the GAU config here.
+
+ Please note that only TOML config is supported. +

+
+ {% csrf_token %} +

Current GAU Configuration

+ + +
+
+
diff --git a/web/scanEngine/views.py b/web/scanEngine/views.py index 7ae6ab5b..5b0a4f25 100644 --- a/web/scanEngine/views.py +++ b/web/scanEngine/views.py @@ -13,7 +13,7 @@ from rolepermissions.decorators import has_permission_decorator from reNgine.common_func import * -from reNgine.tasks import (run_command, send_discord_message, send_slack_message,send_lark_message, send_telegram_message) +from reNgine.tasks import (run_command, send_discord_message, send_slack_message,send_lark_message, send_telegram_message, run_gf_list) from scanEngine.forms import * from scanEngine.forms import ConfigurationForm from scanEngine.models import * @@ -202,34 +202,30 @@ def tool_specific_settings(request, slug): context = {} # check for incoming form requests if request.method == "POST": - - print(request.FILES) if 'gfFileUpload' in request.FILES: gf_file = request.FILES['gfFileUpload'] - file_extension = gf_file.name.split('.')[len(gf_file.name.split('.'))-1] + file_extension = gf_file.name.split('.')[-1] if file_extension != 'json': messages.add_message(request, messages.ERROR, 'Invalid GF Pattern, upload only *.json extension') else: # remove special chars from filename, that could possibly do directory traversal or XSS filename = re.sub(r'[\\/*?:"<>|]',"", gf_file.name) - file_path = Path.home() / '.gf/' + filename - file = open(file_path, "w") - file.write(gf_file.read().decode("utf-8")) - file.close() + file_path = Path.home() / '.gf/' / filename + with open(file_path, "w") as file: + file.write(gf_file.read().decode("utf-8")) messages.add_message(request, messages.INFO, f'Pattern {gf_file.name[:4]} successfully uploaded') return http.HttpResponseRedirect(reverse('tool_settings', kwargs={'slug': slug})) elif 'nucleiFileUpload' in request.FILES: nuclei_file = request.FILES['nucleiFileUpload'] - file_extension = nuclei_file.name.split('.')[len(nuclei_file.name.split('.'))-1] + file_extension = nuclei_file.name.split('.')[-1] if file_extension != 'yaml': messages.add_message(request, messages.ERROR, 'Invalid Nuclei Pattern, upload only *.yaml extension') else: filename = re.sub(r'[\\/*?:"<>|]',"", nuclei_file.name) - file_path = Path.home() / 'nuclei-templates/' + filename - file = open(file_path, "w") - file.write(nuclei_file.read().decode("utf-8")) - file.close() + file_path = Path.home() / 'nuclei-templates/' / filename + with open(file_path, "w") as file: + file.write(nuclei_file.read().decode("utf-8")) messages.add_message(request, messages.INFO, f'Nuclei Pattern {nuclei_file.name[:-5]} successfully uploaded') return http.HttpResponseRedirect(reverse('tool_settings', kwargs={'slug': slug})) @@ -258,18 +254,34 @@ def tool_specific_settings(request, slug): return http.HttpResponseRedirect(reverse('tool_settings', kwargs={'slug': slug})) elif 'theharvester_config_text_area' in request.POST: - with open(Path(RENGINE_TOOL_GITHUB_PATH) / 'theHarvester' / 'api-keys.yaml', "w") as fhandle: + with open(Path.home() / '.config' / 'theHarvester' / 'api-keys.yaml', "w") as fhandle: fhandle.write(request.POST.get('theharvester_config_text_area')) messages.add_message(request, messages.INFO, 'theHarvester config updated!') return http.HttpResponseRedirect(reverse('tool_settings', kwargs={'slug': slug})) + elif 'gau_config_text_area' in request.POST: + with open(Path.home() / '.config' / '.gau.toml', "w") as fhandle: + fhandle.write(request.POST.get('gau_config_text_area')) + messages.add_message(request, messages.INFO, 'GAU config updated!') + return http.HttpResponseRedirect(reverse('tool_settings', kwargs={'slug': slug})) + context['settings_nav_active'] = 'active' context['tool_settings_li'] = 'active' context['settings_ul_show'] = 'show' - gf_list = (subprocess.check_output(['gf', '-list'])).decode("utf-8") - nuclei_custom_pattern = [f for f in glob.glob(Path.home() / "nuclei-templates" / "*.yaml")] + try: + gf_task = run_gf_list.delay() + gf_result = gf_task.get(timeout=30) # 30 seconds timeout + if gf_result['status']: + context['gf_patterns'] = sorted(gf_result['output']) + else: + context['gf_patterns'] = [] + messages.add_message(request, messages.ERROR, f"Error fetching GF patterns: {gf_result['message']}") + except Exception as e: + context['gf_patterns'] = [] + messages.add_message(request, messages.ERROR, f"Error fetching GF patterns: {str(e)}") + nuclei_custom_pattern = [f for f in glob.glob(str(Path.home() / "nuclei-templates" / "*.yaml"))] context['nuclei_templates'] = nuclei_custom_pattern - context['gf_patterns'] = sorted(gf_list.split('\n')) + context['gf_patterns'] = context['gf_patterns'] return render(request, 'scanEngine/settings/tool.html', context) diff --git a/web/static/custom/custom.js b/web/static/custom/custom.js index d26eb5d2..9f1ffadd 100644 --- a/web/static/custom/custom.js +++ b/web/static/custom/custom.js @@ -1789,13 +1789,17 @@ function display_whois_on_modal(response, show_add_target_btn=false) { content += `
`; - content += `
${response.nameservers.length} NameServers identified
`; - - for (var ns in response.nameservers) { - var ns_object = response.nameservers[ns]; - content += `${ns_object}`; + if (response.nameservers && response.nameservers.length > 0) { + content += `
${response.nameservers.length} NameServers identified
`; + + for (var ns in response.nameservers) { + var ns_object = response.nameservers[ns]; + content += `${ns_object}`; + } + } else { + content += `
No NameServer identified
`; } - + content += `
`; if (response.related_tlds.length > 0) { From 1ae599d049475f70a6e2c71fe81b21a0da8331ec Mon Sep 17 00:00:00 2001 From: psyray Date: Thu, 29 Aug 2024 18:03:01 +0200 Subject: [PATCH 243/262] bug(oneforall): fix wrong s3 bucket reported --- web/reNgine/tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/web/reNgine/tasks.py b/web/reNgine/tasks.py index afe7609b..7c9f452d 100644 --- a/web/reNgine/tasks.py +++ b/web/reNgine/tasks.py @@ -426,7 +426,7 @@ def subdomain_discovery( elif tool == 'oneforall': cmd = f'oneforall --target {host} run' - cmd_extract = f'cut -d\',\' -f6 ' + str(Path(RENGINE_TOOL_GITHUB_PATH) / 'OneForAll' / 'results' / f'{host}.csv') + ' > ' + str(Path(self.results_dir) / 'subdomains_oneforall.txt') + cmd_extract = f'cut -d\',\' -f6 ' + str(Path(RENGINE_TOOL_GITHUB_PATH) / 'OneForAll' / 'results' / f'{host}.csv') + ' | tail -n +2 > ' + str(Path(self.results_dir) / 'subdomains_oneforall.txt') cmd_rm = f'rm -rf ' + str(Path(RENGINE_TOOL_GITHUB_PATH) / 'OneForAll' / 'results'/ f'{host}.csv') cmd += f' && {cmd_extract} && {cmd_rm}' From 72fc7a64e3d71d2acdc0812eccaa1ef7e948d2af Mon Sep 17 00:00:00 2001 From: psyray Date: Thu, 29 Aug 2024 20:13:21 +0200 Subject: [PATCH 244/262] fix(ssl): add SAN extension to the cert --- docker/certs/entrypoint.sh | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/docker/certs/entrypoint.sh b/docker/certs/entrypoint.sh index fded161b..4d707f8e 100755 --- a/docker/certs/entrypoint.sh +++ b/docker/certs/entrypoint.sh @@ -19,10 +19,14 @@ cert() { -out ${FILENAME}.csr \ -subj "/C=${COUNTRY_CODE}/ST=${STATE}/L=${CITY}/O=${COMPANY}/CN=${COMMON_NAME}" + # Creating SAN extension which is needed by modern browsers + echo "subjectAltName=DNS:${COMMON_NAME}" > client-ext.cnf + # Create a new certificate using our own CA openssl x509 -req -sha256 -passin pass:${AUTHORITY_PASSWORD} -days 3650 \ -in ${FILENAME}.csr -CA ca.crt -CAkey ca.key \ - -out ${FILENAME}.crt + -out ${FILENAME}.crt \ + -extfile client-ext.cnf # Rename files and remove useless ones mv ${FILENAME}.crt ${FILENAME}.pem From 3241f76c39c58989357da528ddda7b5f951e4864 Mon Sep 17 00:00:00 2001 From: psyray Date: Thu, 29 Aug 2024 20:26:07 +0200 Subject: [PATCH 245/262] bug(ui): fix xss stored --- web/startScan/static/startScan/js/detail_scan.js | 15 +++++++++++++++ web/static/custom/custom.js | 11 +++++++++-- 2 files changed, 24 insertions(+), 2 deletions(-) diff --git a/web/startScan/static/startScan/js/detail_scan.js b/web/startScan/static/startScan/js/detail_scan.js index 0a8d5ba6..3b28fe6e 100644 --- a/web/startScan/static/startScan/js/detail_scan.js +++ b/web/startScan/static/startScan/js/detail_scan.js @@ -145,6 +145,12 @@ function get_endpoints(project, scan_history_id=null, domain_id=null, gf_tags=nu }, "targets": 2, }, + { + "render": function ( data, type, row ) { + return htmlEncode(data); + }, + "targets": 3, + }, { "render": function ( data, type, row ) { if (data){ @@ -345,6 +351,15 @@ function get_subdomain_changes(scan_history_id){ }, "targets": 0 }, + { + "render": function ( data, type, row ) { + if (data){ + return htmlEncode(data); + } + return ""; + }, + "targets": 1, + }, { "render": function ( data, type, row ) { // display badge based on http status diff --git a/web/static/custom/custom.js b/web/static/custom/custom.js index 9f1ffadd..884b29da 100644 --- a/web/static/custom/custom.js +++ b/web/static/custom/custom.js @@ -692,7 +692,14 @@ function get_interesting_endpoints(project, target_id, scan_history_id) { return "" + url + ""; }, "targets": 0 - }, { + }, + { + "render": function(data, type, row) { + return htmlEncode(data); + }, + "targets": 1 + }, + { "render": function(data, type, row) { // display badge based on http status // green for http status 2XX, orange for 3XX and warning for everything else @@ -1096,7 +1103,7 @@ function render_endpoint_in_xlmodal(endpoint_count, subdomain_name, result) { ${http_url_td} ${get_http_status_badge(endpoint['http_status'])} - ${return_str_if_not_null(endpoint['page_title'])} + ${return_str_if_not_null(htmlEncode(endpoint['page_title']))} ${parse_comma_values_into_span(endpoint['matched_gf_patterns'], "danger", outline=true)} ${return_str_if_not_null(endpoint['content_type'])} ${return_str_if_not_null(endpoint['content_length'])} From 75f042f6e2f7dfcb2a8b808a3fd315c1bfbea328 Mon Sep 17 00:00:00 2001 From: psyray Date: Sat, 31 Aug 2024 13:30:17 +0200 Subject: [PATCH 246/262] fix(install): revert changes of prebuilt chain --- install.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/install.sh b/install.sh index f873d178..3976879d 100755 --- a/install.sh +++ b/install.sh @@ -85,11 +85,11 @@ if [ $isNonInteractive = false ]; then INSTALL_TYPE="source" ;; 2|"") - INSTALL_TYPE="pre-built" + INSTALL_TYPE="prebuilt" ;; *) log "Invalid choice. Defaulting to pre-built images." $COLOR_YELLOW - INSTALL_TYPE="pre-built" + INSTALL_TYPE="prebuilt" ;; esac From f9aaef20e9f8297e715276682f969afb4221c57f Mon Sep 17 00:00:00 2001 From: psyray Date: Sat, 31 Aug 2024 14:08:47 +0200 Subject: [PATCH 247/262] fix(ui): fix 500 error on scan engine add --- web/scanEngine/views.py | 1 + 1 file changed, 1 insertion(+) diff --git a/web/scanEngine/views.py b/web/scanEngine/views.py index 5b0a4f25..397b59c6 100644 --- a/web/scanEngine/views.py +++ b/web/scanEngine/views.py @@ -14,6 +14,7 @@ from reNgine.common_func import * from reNgine.tasks import (run_command, send_discord_message, send_slack_message,send_lark_message, send_telegram_message, run_gf_list) +from reNgine.settings import RENGINE_HOME from scanEngine.forms import * from scanEngine.forms import ConfigurationForm from scanEngine.models import * From f8ecea7b89c6825d82865da5140f8cc4f9786c62 Mon Sep 17 00:00:00 2001 From: psyray Date: Mon, 2 Sep 2024 14:26:53 +0200 Subject: [PATCH 248/262] fix(readme): redirect install & update section to the wiki pages --- README.md | 78 ++----------------------------------------------------- 1 file changed, 2 insertions(+), 76 deletions(-) diff --git a/README.md b/README.md index f78b9758..d81e0660 100644 --- a/README.md +++ b/README.md @@ -77,85 +77,11 @@ You can find detailed documentation in the repository [Wiki](https://github.com/ ### Quick Installation -1. Clone this repo - - ```bash - git clone https://github.com/Security-Tools-Alliance/rengine-ng && cd rengine-ng - ``` - -1. Edit the `.env` file, **please make sure to change the password for postgresql `POSTGRES_PASSWORD`!** - - ```bash - nano .env - ``` - -1. **Optional, only for non-interactive install**: In the `.env` file, **please make sure to change the super admin values!** - - ```bash - DJANGO_SUPERUSER_USERNAME=yourUsername - DJANGO_SUPERUSER_EMAIL=YourMail@example.com - DJANGO_SUPERUSER_PASSWORD=yourStrongPassword - ``` - - If you need to carry out a non-interactive installation, you can setup the login, email and password of the web interface admin directly from the .env file (instead of manually setting them from prompts during the installation process). This option can be interesting for automated installation (via ansible, vagrant, etc.). - - `DJANGO_SUPERUSER_USERNAME`: web interface admin username (used to login to the web interface). - - `DJANGO_SUPERUSER_EMAIL`: web interface admin email. - - `DJANGO_SUPERUSER_PASSWORD`: web interface admin password (used to login to the web interface). - -1. In the dotenv file, you may also modify the Scaling Configurations - - ```bash - MAX_CONCURRENCY=30 - MIN_CONCURRENCY=10 - ``` - - MAX_CONCURRENCY: This parameter specifies the maximum number of reNgine-ng's concurrent Celery worker processes that can be spawned. In this case, it's set to 80, meaning that the application can utilize up to 80 concurrent worker processes to execute tasks concurrently. This is useful for handling a high volume of scans or when you want to scale up processing power during periods of high demand. If you have more CPU cores, you will need to increase this for maximized performance. - - MIN_CONCURRENCY: On the other hand, MIN_CONCURRENCY specifies the minimum number of concurrent worker processes that should be maintained, even during periods of lower demand. In this example, it's set to 10, which means that even when there are fewer tasks to process, at least 10 worker processes will be kept running. This helps ensure that the application can respond promptly to incoming tasks without the overhead of repeatedly starting and stopping worker processes. - - These settings allow for dynamic scaling of Celery workers, ensuring that the application efficiently manages its workload by adjusting the number of concurrent workers based on the workload's size and complexity. - - Here is the ideal value for `MIN_CONCURRENCY` and `MAX_CONCURRENCY` depending on the number of RAM your machine has: - - * 4GB: `MAX_CONCURRENCY=10` - * 8GB: `MAX_CONCURRENCY=30` - * 16GB: `MAX_CONCURRENCY=50` - - This is just an ideal value which developers have tested and tried out and works! But feel free to play around with the values. - Maximum number of scans is determined by various factors, your network bandwidth, RAM, number of CPUs available. etc - -1. Run the installation script, Please keep an eye for any prompt, you will also be asked for username and password for reNgine-ng. - - ```bash - sudo ./install.sh - ``` - - Or for a non-interactive installation, use `-n` argument (make sure you've modified the `.env` file before launching the installation). - - ```bash - sudo ./install.sh -n - ``` - - If `install.sh` does not have execution permissions, please grant it execution permissions: `chmod +x install.sh` - -Detailed installation instructions can be found at [https://github.com/Security-Tools-Alliance/rengine-ng/wiki/Installation#-quick-installation](https://github.com/Security-Tools-Alliance/rengine-ng/wiki/Installation#-quick-installation) +Detailed installation instructions can be found in the [install section of the wiki](https://github.com/Security-Tools-Alliance/rengine-ng/wiki/Installation#-quick-installation) ### Updating -1. Updating is as simple as running the following command: - - ```bash - cd rengine-ng && sudo ./update.sh - ``` - - If `update.sh` does not have execution permissions, please grant it execution permissions: `sudo chmod +x update.sh` - - **NOTE:** if you're updating from 1.3.6 and you're getting a 'password authentication failed' error, consider uninstalling 1.3.6 first, then install 2.x.x as you'd normally do. - -Detailed update instructions: +Detailed update instructions can be found in the [update section of the wiki](https://github.com/Security-Tools-Alliance/rengine-ng/wiki/Installation#-quick-installation) ### Changelog From 5ca5915bbb8135d4e6b9b60f9c76adf5b2562ebd Mon Sep 17 00:00:00 2001 From: Psyray Date: Mon, 2 Sep 2024 18:37:51 +0200 Subject: [PATCH 249/262] build(ci): build docker images for each tag, release, push (#151) * build(ci): build docker images for each tag, release, push * build(ci): set bot name and login vars * docker(build): add arm64 to the build options * build(ci): add manual push workflow and use repo vars * build(ci): update vscode tasks to push latest tag * fix from feedback --- .github/workflows/build.yml | 113 ++++++++++++++++++++++++++++++++---- .vscode/tasks.json | 84 +++++++++++++++++++++++---- 2 files changed, 176 insertions(+), 21 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 41fa9d98..991d9b43 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -1,23 +1,116 @@ name: Docker Image CI on: + pull_request: push: branches: - - master + - "master" + - "release/**" + tags: + - "v*.*.*" + release: + types: [published] + workflow_dispatch: + inputs: + push_image: + description: 'Push image to registry' + required: true + default: 'false' + type: choice + options: + - 'true' + - 'false' + +env: + REGISTRY: ghcr.io + OWNER: security-tools-alliance + PROJECT: rengine-ng jobs: - build: + build-and-push: runs-on: ubuntu-latest - + strategy: + matrix: + image: [celery, web, postgres, redis, ollama, certs, proxy] + platform: [linux/amd64, linux/arm64] steps: - - name: Checkout the repo + - name: Checkout code uses: actions/checkout@v4 - - name: Log in to GitHub's Container Registry - run: echo "${{ secrets.CONTAINER_REGISTRY_SECRET }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin + - name: Docker meta + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ env.REGISTRY }}/${{ env.OWNER }}/${{ env.PROJECT }} + tags: | + type=ref,event=branch + type=ref,event=pr + type=semver,pattern={{version}} + type=semver,pattern={{major}}.{{minor}} + type=semver,pattern={{major}} + type=sha + type=raw,value=latest,enable={{is_default_branch}} + + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Login to GHCR + if: github.event_name != 'pull_request' || github.event.inputs.push_image == 'true' + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ vars.GHCR_USERNAME }} + password: ${{ secrets.GHCR_PAT }} + + - name: Build and push + uses: docker/build-push-action@v6 + with: + context: ./docker/${{ matrix.image }} + file: ./docker/${{ matrix.image }}/Dockerfile + push: ${{ github.event_name != 'pull_request' || github.event.inputs.push_image == 'true' }} + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + platforms: ${{ matrix.platform }} + outputs: type=docker,dest=/tmp/image.tar + + - name: Push image if exists + if: github.event.inputs.push_image == 'true' + run: | + if [ -f /tmp/image.tar ]; then + docker load --input /tmp/image.tar + docker push ${{ steps.meta.outputs.tags }} + else + echo "No image found to push" + fi - - name: Build the Docker image - run: docker build . -t ghcr.io/Security-Tools-Alliance/rengine-ng:latest + update-release: + needs: build-and-push + if: github.event_name == 'release' && github.event.action == 'published' + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 - - name: Push the Docker image - run: docker push ghcr.io/Security-Tools-Alliance/rengine-ng:latest + - name: Update release description + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + release_id=$(curl -s -H "Authorization: token $GITHUB_TOKEN" \ + "https://api.github.com/repos/${{ github.repository }}/releases/latest" | \ + jq -r .id) + + images="celery web postgres redis ollama certs proxy" + image_list="" + for image in $images; do + image_list="${image_list}- ghcr.io/${{ env.OWNER }}/${{ env.PROJECT }}:rengine-${image}-${{ github.ref_name }}\n" + done + + body="Docker images for this release:\n${image_list}" + + curl -X PATCH -H "Authorization: token $GITHUB_TOKEN" \ + -H "Accept: application/vnd.github.v3+json" \ + "https://api.github.com/repos/${{ github.repository }}/releases/${release_id}" \ + -d "{\"body\": \"$body\"}" \ No newline at end of file diff --git a/.vscode/tasks.json b/.vscode/tasks.json index 262cf340..e778e7b3 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -4,7 +4,7 @@ { "label": "Build and Push Docker Image", "type": "shell", - "command": "cd ./docker; docker buildx build -t ghcr.io/security-tools-alliance/rengine-ng:rengine-${image}-${version} -f ./${image}/Dockerfile ./${image} && docker push ghcr.io/security-tools-alliance/rengine-ng:rengine-${image}-${version}", + "command": "cd ./docker; docker buildx build -t ghcr.io/security-tools-alliance/rengine-ng:rengine-${image}-${version} -t ghcr.io/security-tools-alliance/rengine-ng:rengine-${image}-latest -f ./${image}/Dockerfile ./${image} && docker push ghcr.io/security-tools-alliance/rengine-ng:rengine-${image}-${version} && if [ \"${input:isLatest}\" = \"true\" ]; then docker push ghcr.io/security-tools-alliance/rengine-ng:rengine-${image}-latest; fi", "problemMatcher": [], "options": { "env": { @@ -16,7 +16,6 @@ { "label": "Build All Docker Images", "type": "shell", - "command": "echo Building all images with version ${input:globalVersion}", "dependsOn": [ "Build CELERY", "Build WEB", @@ -29,46 +28,103 @@ "dependsOrder": "sequence", "problemMatcher": [] }, + { + "label": "Build and Push All Docker Images", + "type": "shell", + "dependsOn": [ + "Build and Push CELERY", + "Build and Push WEB", + "Build and Push POSTGRES", + "Build and Push REDIS", + "Build and Push OLLAMA", + "Build and Push CERTS", + "Build and Push PROXY" + ], + "dependsOrder": "sequence", + "problemMatcher": [] + }, { "label": "Build CELERY", "type": "shell", - "command": "cd ./docker; docker buildx build -t ghcr.io/security-tools-alliance/rengine-ng:rengine-celery-${input:globalVersion} -f ./celery/Dockerfile ./celery && docker push ghcr.io/security-tools-alliance/rengine-ng:rengine-celery-${input:globalVersion}", + "command": "cd ./docker; docker buildx build -t ghcr.io/security-tools-alliance/rengine-ng:rengine-celery-${input:globalVersion} -t ghcr.io/security-tools-alliance/rengine-ng:rengine-celery-latest -f ./celery/Dockerfile ./celery", "problemMatcher": [] }, { "label": "Build WEB", "type": "shell", - "command": "cd ./docker; docker buildx build -t ghcr.io/security-tools-alliance/rengine-ng:rengine-web-${input:globalVersion} -f ./web/Dockerfile ./web && docker push ghcr.io/security-tools-alliance/rengine-ng:rengine-web-${input:globalVersion}", + "command": "cd ./docker; docker buildx build -t ghcr.io/security-tools-alliance/rengine-ng:rengine-web-${input:globalVersion} -t ghcr.io/security-tools-alliance/rengine-ng:rengine-web-latest -f ./web/Dockerfile ./web", "problemMatcher": [] }, { "label": "Build POSTGRES", "type": "shell", - "command": "cd ./docker; docker buildx build -t ghcr.io/security-tools-alliance/rengine-ng:rengine-postgres-${input:globalVersion} -f ./postgres/Dockerfile ./postgres && docker push ghcr.io/security-tools-alliance/rengine-ng:rengine-postgres-${input:globalVersion}", + "command": "cd ./docker; docker buildx build -t ghcr.io/security-tools-alliance/rengine-ng:rengine-postgres-${input:globalVersion} -t ghcr.io/security-tools-alliance/rengine-ng:rengine-postgres-latest -f ./postgres/Dockerfile ./postgres", "problemMatcher": [] }, { "label": "Build REDIS", "type": "shell", - "command": "cd ./docker; docker buildx build -t ghcr.io/security-tools-alliance/rengine-ng:rengine-redis-${input:globalVersion} -f ./redis/Dockerfile ./redis && docker push ghcr.io/security-tools-alliance/rengine-ng:rengine-redis-${input:globalVersion}", + "command": "cd ./docker; docker buildx build -t ghcr.io/security-tools-alliance/rengine-ng:rengine-redis-${input:globalVersion} -t ghcr.io/security-tools-alliance/rengine-ng:rengine-redis-latest -f ./redis/Dockerfile ./redis", "problemMatcher": [] }, { "label": "Build OLLAMA", "type": "shell", - "command": "cd ./docker; docker buildx build -t ghcr.io/security-tools-alliance/rengine-ng:rengine-ollama-${input:globalVersion} -f ./ollama/Dockerfile ./ollama && docker push ghcr.io/security-tools-alliance/rengine-ng:rengine-ollama-${input:globalVersion}", + "command": "cd ./docker; docker buildx build -t ghcr.io/security-tools-alliance/rengine-ng:rengine-ollama-${input:globalVersion} -t ghcr.io/security-tools-alliance/rengine-ng:rengine-ollama-latest -f ./ollama/Dockerfile ./ollama", "problemMatcher": [] }, { "label": "Build CERTS", "type": "shell", - "command": "cd ./docker; docker buildx build -t ghcr.io/security-tools-alliance/rengine-ng:rengine-certs-${input:globalVersion} -f ./certs/Dockerfile ./certs && docker push ghcr.io/security-tools-alliance/rengine-ng:rengine-certs-${input:globalVersion}", + "command": "cd ./docker; docker buildx build -t ghcr.io/security-tools-alliance/rengine-ng:rengine-certs-${input:globalVersion} -t ghcr.io/security-tools-alliance/rengine-ng:rengine-certs-latest -f ./certs/Dockerfile ./certs", "problemMatcher": [] }, { "label": "Build PROXY", "type": "shell", - "command": "cd ./docker; docker buildx build -t ghcr.io/security-tools-alliance/rengine-ng:rengine-proxy-${input:globalVersion} -f ./proxy/Dockerfile ./proxy && docker push ghcr.io/security-tools-alliance/rengine-ng:rengine-proxy-${input:globalVersion}", + "command": "cd ./docker; docker buildx build -t ghcr.io/security-tools-alliance/rengine-ng:rengine-proxy-${input:globalVersion} -t ghcr.io/security-tools-alliance/rengine-ng:rengine-proxy-latest -f ./proxy/Dockerfile ./proxy", + "problemMatcher": [] + }, + { + "label": "Build and Push CELERY", + "type": "shell", + "command": "cd ./docker; docker buildx build -t ghcr.io/security-tools-alliance/rengine-ng:rengine-celery-${input:globalVersion} -t ghcr.io/security-tools-alliance/rengine-ng:rengine-celery-latest -f ./celery/Dockerfile ./celery && docker push ghcr.io/security-tools-alliance/rengine-ng:rengine-celery-${input:globalVersion} && if [ \"${input:isLatest}\" = \"true\" ]; then docker push ghcr.io/security-tools-alliance/rengine-ng:rengine-celery-latest; fi", + "problemMatcher": [] + }, + { + "label": "Build and Push WEB", + "type": "shell", + "command": "cd ./docker; docker buildx build -t ghcr.io/security-tools-alliance/rengine-ng:rengine-web-${input:globalVersion} -t ghcr.io/security-tools-alliance/rengine-ng:rengine-web-latest -f ./web/Dockerfile ./web && docker push ghcr.io/security-tools-alliance/rengine-ng:rengine-web-${input:globalVersion} && if [ \"${input:isLatest}\" = \"true\" ]; then docker push ghcr.io/security-tools-alliance/rengine-ng:rengine-web-latest; fi", + "problemMatcher": [] + }, + { + "label": "Build and Push POSTGRES", + "type": "shell", + "command": "cd ./docker; docker buildx build -t ghcr.io/security-tools-alliance/rengine-ng:rengine-postgres-${input:globalVersion} -t ghcr.io/security-tools-alliance/rengine-ng:rengine-postgres-latest -f ./postgres/Dockerfile ./postgres && docker push ghcr.io/security-tools-alliance/rengine-ng:rengine-postgres-${input:globalVersion} && if [ \"${input:isLatest}\" = \"true\" ]; then docker push ghcr.io/security-tools-alliance/rengine-ng:rengine-postgres-latest; fi", + "problemMatcher": [] + }, + { + "label": "Build and Push REDIS", + "type": "shell", + "command": "cd ./docker; docker buildx build -t ghcr.io/security-tools-alliance/rengine-ng:rengine-redis-${input:globalVersion} -t ghcr.io/security-tools-alliance/rengine-ng:rengine-redis-latest -f ./redis/Dockerfile ./redis && docker push ghcr.io/security-tools-alliance/rengine-ng:rengine-redis-${input:globalVersion} && if [ \"${input:isLatest}\" = \"true\" ]; then docker push ghcr.io/security-tools-alliance/rengine-ng:rengine-redis-latest; fi", + "problemMatcher": [] + }, + { + "label": "Build and Push OLLAMA", + "type": "shell", + "command": "cd ./docker; docker buildx build -t ghcr.io/security-tools-alliance/rengine-ng:rengine-ollama-${input:globalVersion} -t ghcr.io/security-tools-alliance/rengine-ng:rengine-ollama-latest -f ./ollama/Dockerfile ./ollama && docker push ghcr.io/security-tools-alliance/rengine-ng:rengine-ollama-${input:globalVersion} && if [ \"${input:isLatest}\" = \"true\" ]; then docker push ghcr.io/security-tools-alliance/rengine-ng:rengine-ollama-latest; fi", + "problemMatcher": [] + }, + { + "label": "Build and Push CERTS", + "type": "shell", + "command": "cd ./docker; docker buildx build -t ghcr.io/security-tools-alliance/rengine-ng:rengine-certs-${input:globalVersion} -t ghcr.io/security-tools-alliance/rengine-ng:rengine-certs-latest -f ./certs/Dockerfile ./certs && docker push ghcr.io/security-tools-alliance/rengine-ng:rengine-certs-${input:globalVersion} && if [ \"${input:isLatest}\" = \"true\" ]; then docker push ghcr.io/security-tools-alliance/rengine-ng:rengine-certs-latest; fi", + "problemMatcher": [] + }, + { + "label": "Build and Push PROXY", + "type": "shell", + "command": "cd ./docker; docker buildx build -t ghcr.io/security-tools-alliance/rengine-ng:rengine-proxy-${input:globalVersion} -t ghcr.io/security-tools-alliance/rengine-ng:rengine-proxy-latest -f ./proxy/Dockerfile ./proxy && docker push ghcr.io/security-tools-alliance/rengine-ng:rengine-proxy-${input:globalVersion} && if [ \"${input:isLatest}\" = \"true\" ]; then docker push ghcr.io/security-tools-alliance/rengine-ng:rengine-proxy-latest; fi", "problemMatcher": [] } ], @@ -88,6 +144,12 @@ "type": "pickString", "description": "Select the image to build", "options": ["celery", "web", "postgres", "redis", "ollama", "certs", "proxy"] - } - ] + }, + { + "id": "isLatest", + "type": "pickString", + "description": "Is this the latest version (this will also push the 'latest' tag)?", + "options": ["true", "false"], + "default": "false" + } ] } \ No newline at end of file From 7f5b8bc30cb37eade0e62adc56c05079fc313833 Mon Sep 17 00:00:00 2001 From: psyray Date: Mon, 2 Sep 2024 19:03:33 +0200 Subject: [PATCH 250/262] fix(install): improve root detection and set ownership on files --- .env-dist | 3 ++- install.sh | 30 +++++++++++++++++++++++++----- make.bat | 51 --------------------------------------------------- 3 files changed, 27 insertions(+), 57 deletions(-) delete mode 100644 make.bat diff --git a/.env-dist b/.env-dist index 75a7be4a..2a7bb618 100644 --- a/.env-dist +++ b/.env-dist @@ -15,7 +15,8 @@ STATE=Georgia CITY=Atlanta # -# Database configurations +# Database configurations +# /!\ POSTGRES_USER & PG_USER must be the same user or Celery will fail to start # POSTGRES_DB=rengine POSTGRES_USER=rengine diff --git a/install.sh b/install.sh index 3976879d..b46b01d5 100755 --- a/install.sh +++ b/install.sh @@ -12,12 +12,32 @@ for ip in $internal_ips; do done # Check for root privileges -if [ "$(whoami)" != "root" ] - then - log "" - log "Error installing reNgine-ng: please run this script as root!" $COLOR_RED +if [ $EUID -eq 0 ]; then + if [ "$SUDO_USER" = "root" ] || [ "$SUDO_USER" = "" ]; then + log "Error: Do not run this script as root. Use sudo with a non-root user." $COLOR_RED + log "Example: sudo ./install.sh" $COLOR_RED + exit 1 + fi +fi + +# Check if the script is run with sudo +if [ -z "$SUDO_USER" ]; then + log "Error: This script must be run with sudo." $COLOR_RED log "Example: sudo ./install.sh" $COLOR_RED - exit + exit 1 +fi + +# Check that the project directory is not owned by root +project_dir=$(pwd) +if [ "$(stat -c '%U' $project_dir)" = "root" ]; then + log "The project directory is owned by root. Changing ownership..." $COLOR_YELLOW + sudo chown -R $SUDO_USER:$SUDO_USER $project_dir + if [ $? -eq 0 ]; then + log "Project directory ownership successfully changed." $COLOR_GREEN + else + log "Failed to change project directory ownership." $COLOR_RED + exit 1 + fi fi usageFunction() diff --git a/make.bat b/make.bat deleted file mode 100644 index f796a32b..00000000 --- a/make.bat +++ /dev/null @@ -1,51 +0,0 @@ -@echo off - -:: Credits: https://github.com/ninjhacks - -set COMPOSE_FILE = -f docker/docker-compose.yml -set COMPOSE_DEV_FILE = -f docker/docker-compose.dev.yml -set COMPOSE_BUILD_FILE = -f docker/docker-compose.build.yml -set SERVICES = db web proxy redis celery celery-beat - -:: Generate certificates. -if "%1" == "certs" docker compose -f docker/docker-compose.setup.yml run --rm certs -:: Generate certificates. -if "%1" == "setup" docker compose -f docker/docker-compose.setup.yml run --rm certs -:: Build and start all services. -if "%1" == "up" docker compose %COMPOSE_FILE% up -d %SERVICES% -:: Build all services. -if "%1" == "build" docker compose %COMPOSE_FILE% %COMPOSE_BUILD_FILE% build %SERVICES% -:: Build and start all services. -if "%1" == "build_up" ( - docker compose %COMPOSE_FILE% %COMPOSE_BUILD_FILE% build %SERVICES% - docker compose %COMPOSE_FILE% up -d %SERVICES% -) -:: Pull and start all services. -if "%1" == "pull_up" ( - docker compose %COMPOSE_FILE% pull %SERVICES% - docker compose %COMPOSE_FILE% up -d %SERVICES% -) -:: Generate Username (use only after make up). -if "%1" == "username" docker compose %COMPOSE_FILE% exec web python3 manage.py createsuperuser -:: Change password for user -if "%1" == "changepassword" docker compose %COMPOSE_FILE% exec web python3 manage.py changepassword -:: Apply migrations -if "%1" == "migrate" docker compose %COMPOSE_FILE% exec web python3 manage.py migrate -:: Pull Docker images. -if "%1" == "pull" docker login docker.pkg.github.com & docker compose %COMPOSE_FILE% pull -:: Down all services. -if "%1" == "down" docker compose %COMPOSE_FILE% down -:: Stop all services. -if "%1" == "stop" docker compose %COMPOSE_FILE% stop %SERVICES% -:: Restart all services. -if "%1" == "restart" docker compose %COMPOSE_FILE% restart %SERVICES% -:: Remove all services containers. -if "%1" == "rm" docker compose %COMPOSE_FILE% rm -f %SERVICES% -:: Tail all logs with -n 1000. -if "%1" == "logs" docker compose %COMPOSE_FILE% logs --follow --tail=1000 %SERVICES% -:: Show all Docker images. -if "%1" == "images" docker compose %COMPOSE_FILE% images %SERVICES% -:: Remove containers and delete volume data. -if "%1" == "prune" docker compose %COMPOSE_FILE% stop %SERVICES% & docker compose %COMPOSE_FILE% rm -f %SERVICES% & docker volume prune -f -:: Show this help. -if "%1" == "help" @echo Make application Docker images and manage containers using Docker Compose files only for Windows. From 3a48a44660f7369eabe1897cc4c843157e8299ed Mon Sep 17 00:00:00 2001 From: psyray Date: Mon, 2 Sep 2024 22:38:43 +0200 Subject: [PATCH 251/262] style(install): Apply suggestions --- install.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/install.sh b/install.sh index b46b01d5..38de5a72 100755 --- a/install.sh +++ b/install.sh @@ -14,8 +14,8 @@ done # Check for root privileges if [ $EUID -eq 0 ]; then if [ "$SUDO_USER" = "root" ] || [ "$SUDO_USER" = "" ]; then - log "Error: Do not run this script as root. Use sudo with a non-root user." $COLOR_RED - log "Example: sudo ./install.sh" $COLOR_RED + log "Error: Do not run this script as root user. Use 'sudo' with a non-root user." $COLOR_RED + log "Example: 'sudo ./install.sh'" $COLOR_RED exit 1 fi fi @@ -23,7 +23,7 @@ fi # Check if the script is run with sudo if [ -z "$SUDO_USER" ]; then log "Error: This script must be run with sudo." $COLOR_RED - log "Example: sudo ./install.sh" $COLOR_RED + log "Example: 'sudo ./install.sh'" $COLOR_RED exit 1 fi From 7ff6d012fbcc93ca47b4dcc4b0c138cb78191ab5 Mon Sep 17 00:00:00 2001 From: Psyray Date: Wed, 4 Sep 2024 20:06:51 +0200 Subject: [PATCH 252/262] build(images): restrict image creation, add correct tags and clean non tagged images (#193) * build(ci): restrict build to file in docker folder & improve tags * build(ci): add a remove image without tag workflow * build(ci): set automatic execution for image removal * style(language): remove french comments --- .github/workflows/build.yml | 43 ++++++---------- .github/workflows/delete-untagged-images.yml | 52 ++++++++++++++++++++ 2 files changed, 68 insertions(+), 27 deletions(-) create mode 100644 .github/workflows/delete-untagged-images.yml diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 991d9b43..7d60f66f 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -2,10 +2,14 @@ name: Docker Image CI on: pull_request: + paths: + - 'docker/**' push: branches: - "master" - "release/**" + paths: + - 'docker/**' tags: - "v*.*.*" release: @@ -37,19 +41,14 @@ jobs: - name: Checkout code uses: actions/checkout@v4 - - name: Docker meta - id: meta - uses: docker/metadata-action@v5 - with: - images: ${{ env.REGISTRY }}/${{ env.OWNER }}/${{ env.PROJECT }} - tags: | - type=ref,event=branch - type=ref,event=pr - type=semver,pattern={{version}} - type=semver,pattern={{major}}.{{minor}} - type=semver,pattern={{major}} - type=sha - type=raw,value=latest,enable={{is_default_branch}} + - name: Get version + id: get_version + run: | + if [[ $GITHUB_REF == refs/tags/* ]]; then + echo "VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT + else + echo "VERSION=latest" >> $GITHUB_OUTPUT + fi - name: Set up QEMU uses: docker/setup-qemu-action@v3 @@ -71,20 +70,10 @@ jobs: context: ./docker/${{ matrix.image }} file: ./docker/${{ matrix.image }}/Dockerfile push: ${{ github.event_name != 'pull_request' || github.event.inputs.push_image == 'true' }} - tags: ${{ steps.meta.outputs.tags }} - labels: ${{ steps.meta.outputs.labels }} + tags: | + ${{ env.REGISTRY }}/${{ env.OWNER }}/${{ env.PROJECT }}:rengine-${{ matrix.image }}-${{ steps.get_version.outputs.VERSION }} + ${{ env.REGISTRY }}/${{ env.OWNER }}/${{ env.PROJECT }}:rengine-${{ matrix.image }}-latest platforms: ${{ matrix.platform }} - outputs: type=docker,dest=/tmp/image.tar - - - name: Push image if exists - if: github.event.inputs.push_image == 'true' - run: | - if [ -f /tmp/image.tar ]; then - docker load --input /tmp/image.tar - docker push ${{ steps.meta.outputs.tags }} - else - echo "No image found to push" - fi update-release: needs: build-and-push @@ -113,4 +102,4 @@ jobs: curl -X PATCH -H "Authorization: token $GITHUB_TOKEN" \ -H "Accept: application/vnd.github.v3+json" \ "https://api.github.com/repos/${{ github.repository }}/releases/${release_id}" \ - -d "{\"body\": \"$body\"}" \ No newline at end of file + -d "{\"body\": \"$body\"}" diff --git a/.github/workflows/delete-untagged-images.yml b/.github/workflows/delete-untagged-images.yml new file mode 100644 index 00000000..daef0219 --- /dev/null +++ b/.github/workflows/delete-untagged-images.yml @@ -0,0 +1,52 @@ +name: Delete Untagged GHCR Images + +on: + workflow_dispatch: + inputs: + dry_run: + description: 'Dry run (does not delete images)' + required: true + default: 'true' + type: choice + options: + - 'true' + - 'false' + schedule: + - cron: '0 0 1,15 * *' + +env: + REGISTRY: ghcr.io + OWNER: security-tools-alliance + PROJECT: rengine-ng + +jobs: + delete-untagged-ghcr: + runs-on: ubuntu-latest + steps: + - name: Login to GitHub Container Registry + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ vars.GHCR_USERNAME }} + password: ${{ secrets.GHCR_PAT }} + + - name: Delete untagged images + uses: Chizkiyahu/delete-untagged-ghcr-action@v4 + with: + token: ${{ secrets.GHCR_PAT }} + repository_owner: ${{ env.OWNER }} + repository: ${{ env.PROJECT }} + untagged_only: true + owner_type: org + except_untagged_multiplatform: true + + - name: Summary + if: always() + env: + DRY_RUN: ${{ github.event.inputs.dry_run || 'false' }} + run: | + echo "## Summary of untagged image deletion" >> $GITHUB_STEP_SUMMARY + echo "- Dry run: $DRY_RUN" >> $GITHUB_STEP_SUMMARY + echo "- Owner: $OWNER" >> $GITHUB_STEP_SUMMARY + echo "- Project: $PROJECT" >> $GITHUB_STEP_SUMMARY + echo "Check the logs above for more details on deleted images or images that would have been deleted in dry run mode." >> $GITHUB_STEP_SUMMARY From 4d3540075e59f755a10a75ce47a8459c1ea17872 Mon Sep 17 00:00:00 2001 From: Psyray Date: Wed, 4 Sep 2024 21:27:22 +0200 Subject: [PATCH 253/262] fix(scan): fix clocked and scheduled scan not working (#182) * fix(scan): fix clocked and scheduled scan not working * fix(scan): store start datetime of schedule scan in UTC * fix(celery): add more DEBUG error for celery beat (datetime of tasks ...) * style(time): add UTC to the schedule time --- web/reNgine/common_func.py | 28 +++ web/reNgine/settings.py | 16 +- web/reNgine/tasks.py | 226 ++++++++++-------- .../startScan/schedule_scan_list.html | 2 +- .../templates/startScan/schedule_scan_ui.html | 14 ++ web/startScan/views.py | 97 ++++---- 6 files changed, 222 insertions(+), 161 deletions(-) diff --git a/web/reNgine/common_func.py b/web/reNgine/common_func.py index 1f8b7969..7a59e13c 100644 --- a/web/reNgine/common_func.py +++ b/web/reNgine/common_func.py @@ -1139,3 +1139,31 @@ def extract_columns(row, columns): list: Extracted values from the specified columns. """ return [row[i] for i in columns] + +def create_scan_object(host_id, engine_id, initiated_by_id=None): + ''' + create task with pending status so that celery task will execute when + threads are free + Args: + host_id: int: id of Domain model + engine_id: int: id of EngineType model + initiated_by_id: int : id of User model (Optional) + ''' + # get current time + current_scan_time = timezone.now() + # fetch engine and domain object + engine = EngineType.objects.get(pk=engine_id) + domain = Domain.objects.get(pk=host_id) + scan = ScanHistory() + scan.scan_status = INITIATED_TASK + scan.domain = domain + scan.scan_type = engine + scan.start_scan_date = current_scan_time + if initiated_by_id: + user = User.objects.get(pk=initiated_by_id) + scan.initiated_by = user + scan.save() + # save last scan date for domain model + domain.start_scan_date = current_scan_time + domain.save() + return scan.id diff --git a/web/reNgine/settings.py b/web/reNgine/settings.py index ea156a29..e8cf077c 100644 --- a/web/reNgine/settings.py +++ b/web/reNgine/settings.py @@ -171,10 +171,6 @@ USE_L10N = True USE_TZ = True -# Temporary fix for celery beat crash -# See https://github.com/yogeshojha/rengine/issues/971 -DJANGO_CELERY_BEAT_TZ_AWARE = False - MEDIA_URL = '/media/' FILE_UPLOAD_MAX_MEMORY_SIZE = 100000000 FILE_UPLOAD_PERMISSIONS = 0o644 @@ -264,6 +260,13 @@ 'filename': 'celery.log', 'maxBytes': 1024 * 1024 * 100, # 100 mb }, + 'celery_beat': { + 'class': 'logging.handlers.RotatingFileHandler', + 'formatter': 'simple', + 'filename': 'celery_beat.log', + 'maxBytes': 1024 * 1024 * 100, # 100 mb + 'backupCount': 5, + }, }, 'formatters': { 'default': { @@ -328,6 +331,11 @@ 'handlers': ['null'], 'propagate': False, }, + 'django_celery_beat': { + 'handlers': ['celery_beat', 'console'], + 'level': 'DEBUG', + 'propagate': True, + }, }, 'root': { 'handlers': ['console'], diff --git a/web/reNgine/tasks.py b/web/reNgine/tasks.py index 7c9f452d..85280f80 100644 --- a/web/reNgine/tasks.py +++ b/web/reNgine/tasks.py @@ -63,6 +63,7 @@ def initiate_scan( results_dir=RENGINE_RESULTS, imported_subdomains=[], out_of_scope_subdomains=[], + initiated_by_id=None, url_filter=''): """Initiate a new scan. @@ -74,134 +75,149 @@ def initiate_scan( results_dir (str): Results directory. imported_subdomains (list): Imported subdomains. out_of_scope_subdomains (list): Out-of-scope subdomains. - url_filter (str): URL path. Default: '' + url_filter (str): URL path. Default: ''. + initiated_by (int): User ID initiating the scan. """ if CELERY_REMOTE_DEBUG: debug() - # Get scan history - scan = ScanHistory.objects.get(pk=scan_history_id) + logger.info('Initiating scan on celery') + scan = None + try: + # Get scan engine + engine_id = engine_id or scan.scan_type.id # scan history engine_id + engine = EngineType.objects.get(pk=engine_id) - # Get scan engine - engine_id = engine_id or scan.scan_type.id # scan history engine_id - engine = EngineType.objects.get(pk=engine_id) + # Get YAML config + config = yaml.safe_load(engine.yaml_configuration) + enable_http_crawl = config.get(ENABLE_HTTP_CRAWL, DEFAULT_ENABLE_HTTP_CRAWL) + gf_patterns = config.get(GF_PATTERNS, []) - # Get YAML config - config = yaml.safe_load(engine.yaml_configuration) - enable_http_crawl = config.get(ENABLE_HTTP_CRAWL, DEFAULT_ENABLE_HTTP_CRAWL) - gf_patterns = config.get(GF_PATTERNS, []) + # Get domain and set last_scan_date + domain = Domain.objects.get(pk=domain_id) + domain.last_scan_date = timezone.now() + domain.save() - # Get domain and set last_scan_date - domain = Domain.objects.get(pk=domain_id) - domain.last_scan_date = timezone.now() - domain.save() + # Get path filter + url_filter = url_filter.rstrip('/') - # Get path filter - url_filter = url_filter.rstrip('/') + # for live scan scan history id is passed as scan_history_id + # and no need to create scan_history object - # Get or create ScanHistory() object - if scan_type == LIVE_SCAN: # immediate + if scan_type == SCHEDULED_SCAN: # scheduled + # we need to create scan_history object for each scheduled scan + scan_history_id = create_scan_object( + host_id=domain_id, + engine_id=engine_id, + initiated_by_id=initiated_by_id, + ) scan = ScanHistory.objects.get(pk=scan_history_id) scan.scan_status = RUNNING_TASK - elif scan_type == SCHEDULED_SCAN: # scheduled - scan = ScanHistory() - scan.scan_status = INITIATED_TASK - scan.scan_type = engine - scan.celery_ids = [initiate_scan.request.id] - scan.domain = domain - scan.start_scan_date = timezone.now() - scan.tasks = engine.tasks - uuid_scan = uuid.uuid1() - scan.results_dir = f'{results_dir}/{domain.name}/scans/{uuid_scan}' - add_gf_patterns = gf_patterns and 'fetch_url' in engine.tasks - if add_gf_patterns and is_iterable(gf_patterns): - scan.used_gf_patterns = ','.join(gf_patterns) - scan.save() - try: + scan.scan_type = engine + scan.celery_ids = [initiate_scan.request.id] + scan.domain = domain + scan.start_scan_date = timezone.now() + scan.tasks = engine.tasks + uuid_scan = uuid.uuid1() + scan.results_dir = f'{results_dir}/{domain.name}/scans/{uuid_scan}' + add_gf_patterns = gf_patterns and 'fetch_url' in engine.tasks + if add_gf_patterns and is_iterable(gf_patterns): + scan.used_gf_patterns = ','.join(gf_patterns) + scan.save() + + # Create scan results dir os.makedirs(scan.results_dir, exist_ok=True) - except: - import traceback - - traceback.print_exc() - raise - # Build task context - ctx = { - 'scan_history_id': scan_history_id, - 'engine_id': engine_id, - 'domain_id': domain.id, - 'results_dir': scan.results_dir, - 'url_filter': url_filter, - 'yaml_configuration': config, - 'out_of_scope_subdomains': out_of_scope_subdomains - } - ctx_str = json.dumps(ctx, indent=2) - - # Send start notif - logger.warning(f'Starting scan {scan_history_id} with context:\n{ctx_str}') - send_scan_notif.delay( - scan_history_id, - subscan_id=None, - engine_id=engine_id, - status=CELERY_TASK_STATUS_MAP[scan.scan_status]) + # Build task context + ctx = { + 'scan_history_id': scan_history_id, + 'engine_id': engine_id, + 'domain_id': domain.id, + 'results_dir': scan.results_dir, + 'url_filter': url_filter, + 'yaml_configuration': config, + 'out_of_scope_subdomains': out_of_scope_subdomains + } + ctx_str = json.dumps(ctx, indent=2) + + # Send start notif + logger.warning(f'Starting scan {scan_history_id} with context:\n{ctx_str}') + send_scan_notif.delay( + scan_history_id, + subscan_id=None, + engine_id=engine_id, + status=CELERY_TASK_STATUS_MAP[scan.scan_status]) + + # Save imported subdomains in DB + save_imported_subdomains(imported_subdomains, ctx=ctx) + + # Create initial subdomain in DB: make a copy of domain as a subdomain so + # that other tasks using subdomains can use it. + subdomain_name = domain.name + subdomain, _ = save_subdomain(subdomain_name, ctx=ctx) - # Save imported subdomains in DB - save_imported_subdomains(imported_subdomains, ctx=ctx) - # Create initial subdomain in DB: make a copy of domain as a subdomain so - # that other tasks using subdomains can use it. - subdomain_name = domain.name - subdomain, _ = save_subdomain(subdomain_name, ctx=ctx) + # If enable_http_crawl is set, create an initial root HTTP endpoint so that + # HTTP crawling can start somewhere + http_url = f'{domain.name}{url_filter}' if url_filter else domain.name + endpoint, _ = save_endpoint( + http_url, + ctx=ctx, + crawl=enable_http_crawl, + is_default=True, + subdomain=subdomain + ) - # If enable_http_crawl is set, create an initial root HTTP endpoint so that - # HTTP crawling can start somewhere - http_url = f'{domain.name}{url_filter}' if url_filter else domain.name - endpoint, _ = save_endpoint( - http_url, - ctx=ctx, - crawl=enable_http_crawl, - is_default=True, - subdomain=subdomain - ) - save_subdomain_metadata(subdomain, endpoint) - - # Build Celery tasks, crafted according to the dependency graph below: - # subdomain_discovery --> port_scan --> fetch_url --> dir_file_fuzz - # osint vulnerability_scan - # osint dalfox xss scan - # screenshot - # waf_detection - workflow = chain( - group( - subdomain_discovery.si(ctx=ctx, description='Subdomain discovery'), - osint.si(ctx=ctx, description='OS Intelligence') - ), - port_scan.si(ctx=ctx, description='Port scan'), - fetch_url.si(ctx=ctx, description='Fetch URL'), - group( - dir_file_fuzz.si(ctx=ctx, description='Directories & files fuzz'), - vulnerability_scan.si(ctx=ctx, description='Vulnerability scan'), - screenshot.si(ctx=ctx, description='Screenshot'), - waf_detection.si(ctx=ctx, description='WAF detection') + save_subdomain_metadata(subdomain, endpoint) + + + # Build Celery tasks, crafted according to the dependency graph below: + # subdomain_discovery --> port_scan --> fetch_url --> dir_file_fuzz + # osint vulnerability_scan + # osint dalfox xss scan + # screenshot + # waf_detection + workflow = chain( + group( + subdomain_discovery.si(ctx=ctx, description='Subdomain discovery'), + osint.si(ctx=ctx, description='OS Intelligence') + ), + port_scan.si(ctx=ctx, description='Port scan'), + fetch_url.si(ctx=ctx, description='Fetch URL'), + group( + dir_file_fuzz.si(ctx=ctx, description='Directories & files fuzz'), + vulnerability_scan.si(ctx=ctx, description='Vulnerability scan'), + screenshot.si(ctx=ctx, description='Screenshot'), + waf_detection.si(ctx=ctx, description='WAF detection') + ) ) - ) - # Build callback - callback = report.si(ctx=ctx).set(link_error=[report.si(ctx=ctx)]) + # Build callback + callback = report.si(ctx=ctx).set(link_error=[report.si(ctx=ctx)]) - # Run Celery chord - logger.info(f'Running Celery workflow with {len(workflow.tasks) + 1} tasks') - task = chain(workflow, callback).on_error(callback).delay() - scan.celery_ids.append(task.id) - scan.save() + # Run Celery chord + logger.info(f'Running Celery workflow with {len(workflow.tasks) + 1} tasks') + task = chain(workflow, callback).on_error(callback).delay() + scan.celery_ids.append(task.id) + scan.save() - return { - 'success': True, - 'task_id': task.id - } + return { + 'success': True, + 'task_id': task.id + } + except Exception as e: + logger.exception(e) + if scan: + scan.scan_status = FAILED_TASK + scan.error_message = str(e) + scan.save() + return { + 'success': False, + 'error': str(e) + } @app.task(name='initiate_subscan', bind=False, queue='subscan_queue') def initiate_subscan( diff --git a/web/startScan/templates/startScan/schedule_scan_list.html b/web/startScan/templates/startScan/schedule_scan_list.html index 012b5427..50ae6d1e 100644 --- a/web/startScan/templates/startScan/schedule_scan_list.html +++ b/web/startScan/templates/startScan/schedule_scan_list.html @@ -48,7 +48,7 @@ Will run exactly at {{ task.clocked.clocked_time}} UTC {% endif %} - {{ task.last_run_at|none_or_never }} + {{ task.last_run_at|none_or_never }} UTC {{ task.total_run_count }} {% if task.one_off %} diff --git a/web/startScan/templates/startScan/schedule_scan_ui.html b/web/startScan/templates/startScan/schedule_scan_ui.html index 3ffbc1f6..4405ca73 100644 --- a/web/startScan/templates/startScan/schedule_scan_ui.html +++ b/web/startScan/templates/startScan/schedule_scan_ui.html @@ -205,5 +205,19 @@

Out of Scope Subdomains(Optional)

}); }); + + {% endblock page_level_script %} diff --git a/web/startScan/views.py b/web/startScan/views.py index 5f4c4bf5..79b9ea49 100644 --- a/web/startScan/views.py +++ b/web/startScan/views.py @@ -2,7 +2,7 @@ from celery import group from weasyprint import HTML -from datetime import datetime +from datetime import datetime, timedelta from django.contrib import messages from django.db.models import Count from django.http import HttpResponse, HttpResponseRedirect, JsonResponse @@ -272,9 +272,9 @@ def start_scan_ui(request, slug, domain_id): # Create ScanHistory object scan_history_id = create_scan_object( - domain_id, - engine_id, - request.user + host_id=domain_id, + engine_id=engine_id, + initiated_by_id=request.user.id ) scan = ScanHistory.objects.get(pk=scan_history_id) @@ -287,7 +287,8 @@ def start_scan_ui(request, slug, domain_id): 'results_dir': RENGINE_RESULTS, 'imported_subdomains': subdomains_in, 'out_of_scope_subdomains': subdomains_out, - 'url_filter': filterPath + 'url_filter': filterPath, + 'initiated_by_id': request.user.id } initiate_scan.apply_async(kwargs=kwargs) scan.save() @@ -329,9 +330,9 @@ def start_multiple_scan(request, slug): for domain_id in list_of_domains.split(","): # Start the celery task scan_history_id = create_scan_object( - domain_id, - engine_id, - request.user + host_id=domain_id, + engine_id=engine_id, + initiated_by_id=request.user.id ) # domain = get_object_or_404(Domain, id=domain_id) @@ -341,6 +342,7 @@ def start_multiple_scan(request, slug): 'engine_id': engine_id, 'scan_type': LIVE_SCAN, 'results_dir': RENGINE_RESULTS, + 'initiated_by_id': request.user.id # TODO: Add this to multiple scan view # 'imported_subdomains': subdomains_in, # 'out_of_scope_subdomains': subdomains_out @@ -536,29 +538,42 @@ def schedule_scan(request, host_id, slug): 'scan_history_id': 1, 'scan_type': SCHEDULED_SCAN, 'imported_subdomains': subdomains_in, - 'out_of_scope_subdomains': subdomains_out + 'out_of_scope_subdomains': subdomains_out, + 'initiated_by_id': request.user.id } - PeriodicTask.objects.create(interval=schedule, - name=task_name, - task='reNgine.tasks.initiate_scan', - kwargs=json.dumps(kwargs)) + PeriodicTask.objects.create( + interval=schedule, + name=task_name, + task='initiate_scan', + kwargs=json.dumps(kwargs) + ) elif scheduled_mode == 'clocked': schedule_time = request.POST['scheduled_time'] + timezone_offset = int(request.POST.get('timezone_offset', 0)) + # Convert received hour in UTC + local_time = datetime.strptime(schedule_time, '%Y-%m-%d %H:%M') + # Adjust hour to UTC + utc_time = local_time + timedelta(minutes=timezone_offset) + # Make hour "aware" in UTC + utc_time = timezone.make_aware(utc_time, timezone.utc) clock, _ = ClockedSchedule.objects.get_or_create( - clocked_time=schedule_time) + clocked_time=utc_time) kwargs = { 'scan_history_id': 0, 'domain_id': host_id, 'engine_id': engine.id, 'scan_type': SCHEDULED_SCAN, 'imported_subdomains': subdomains_in, - 'out_of_scope_subdomains': subdomains_out + 'out_of_scope_subdomains': subdomains_out, + 'initiated_by_id': request.user.id } - PeriodicTask.objects.create(clocked=clock, - one_off=True, - name=task_name, - task='reNgine.tasks.initiate_scan', - kwargs=json.dumps(kwargs)) + PeriodicTask.objects.create( + clocked=clock, + one_off=True, + name=task_name, + task='initiate_scan', + kwargs=json.dumps(kwargs) + ) messages.add_message( request, messages.INFO, @@ -630,29 +645,6 @@ def change_vuln_status(request, id): return HttpResponse('') -def create_scan_object(host_id, engine_id, initiated_by): - ''' - create task with pending status so that celery task will execute when - threads are free - ''' - # get current time - current_scan_time = timezone.now() - # fetch engine and domain object - engine = EngineType.objects.get(pk=engine_id) - domain = Domain.objects.get(pk=host_id) - scan = ScanHistory() - scan.scan_status = INITIATED_TASK - scan.domain = domain - scan.scan_type = engine - scan.start_scan_date = current_scan_time - scan.initiated_by = initiated_by - scan.save() - # save last scan date for domain model - domain.start_scan_date = current_scan_time - domain.save() - return scan.id - - @has_permission_decorator(PERM_MODIFY_SYSTEM_CONFIGURATIONS, redirect_url=FOUR_OH_FOUR_URL) def delete_all_scan_results(request): if request.method == 'POST': @@ -695,9 +687,9 @@ def start_organization_scan(request, id, slug): # Start Celery task for each organization's domains for domain in organization.get_domains(): scan_history_id = create_scan_object( - domain.id, - engine_id, - request.user + host_id=domain.id, + engine_id=engine_id, + initiated_by_id=request.user.id ) scan = ScanHistory.objects.get(pk=scan_history_id) @@ -707,6 +699,7 @@ def start_organization_scan(request, id, slug): 'engine_id': engine_id, 'scan_type': LIVE_SCAN, 'results_dir': RENGINE_RESULTS, + 'initiated_by_id': request.user.id, # TODO: Add this to multiple scan view # 'imported_subdomains': subdomains_in, # 'out_of_scope_subdomains': subdomains_out @@ -774,12 +767,13 @@ def schedule_organization_scan(request, slug, id): 'engine_id': engine.id, 'scan_history_id': 0, 'scan_type': SCHEDULED_SCAN, - 'imported_subdomains': None + 'imported_subdomains': None, + 'initiated_by_id': request.user.id }) PeriodicTask.objects.create( interval=schedule, name=task_name, - task='reNgine.tasks.initiate_scan', + task='initiate_scan', kwargs=_kwargs ) @@ -794,12 +788,13 @@ def schedule_organization_scan(request, slug, id): 'engine_id': engine.id, 'scan_history_id': 0, 'scan_type': LIVE_SCAN, - 'imported_subdomains': None + 'imported_subdomains': None, + 'initiated_by_id': request.user.id }) PeriodicTask.objects.create(clocked=clock, one_off=True, name=task_name, - task='reNgine.tasks.initiate_scan', + task='initiate_scan', kwargs=_kwargs ) @@ -810,7 +805,7 @@ def schedule_organization_scan(request, slug, id): messages.INFO, f'Scan started for {ndomains} domains in organization {organization.name}' ) - return HttpResponseRedirect(reverse('scheduled_scan_view', kwargs={'slug': slug, 'id': id})) + return HttpResponseRedirect(reverse('scheduled_scan_view', kwargs={'slug': slug})) # GET request engine = EngineType.objects From 11c43bb90fdec7d39dceb0f512b2931def34054c Mon Sep 17 00:00:00 2001 From: Psyray Date: Wed, 4 Sep 2024 22:00:27 +0200 Subject: [PATCH 254/262] fix(graph): de-duplicate dorks and vulnerabilities (#188) * fix(graph): deduplicate dorks and vulnerabilities * fix(typo): remove french language * fix(typo): remove french comments * fix(graph): remove carriage return --- web/api/serializers.py | 174 +++++++++++++++++++++++-------------- web/api/views.py | 53 ++++++++--- web/static/custom/mitch.js | 17 +++- 3 files changed, 168 insertions(+), 76 deletions(-) diff --git a/web/api/serializers.py b/web/api/serializers.py index 1fd0b7e9..d6bd14c4 100644 --- a/web/api/serializers.py +++ b/web/api/serializers.py @@ -1,6 +1,6 @@ +from collections import defaultdict from dashboard.models import * -from django.contrib.humanize.templatetags.humanize import (naturalday, - naturaltime) +from django.contrib.humanize.templatetags.humanize import (naturalday, naturaltime) from django.db.models import F, JSONField, Value from recon_note.models import * from reNgine.common_func import * @@ -573,11 +573,14 @@ def get_children(self, history): many=True, context={'scan_history': history}) + processed_subdomains = self.process_subdomains(subdomain_serializer.data) + email = Email.objects.filter(emails__in=scan_history) email_serializer = VisualiseEmailSerializer(email, many=True) dork = Dork.objects.filter(dorks__in=scan_history) dork_serializer = VisualiseDorkSerializer(dork, many=True) + processed_dorks = self.process_dorks(dork_serializer.data) employee = Employee.objects.filter(employees__in=scan_history) employee_serializer = VisualiseEmployeeSerializer(employee, many=True) @@ -587,69 +590,68 @@ def get_children(self, history): return_data = [] - if subdomain_serializer.data: + if processed_subdomains: return_data.append({ 'description': 'Subdomains', - 'children': subdomain_serializer.data}) - - if email_serializer.data or employee_serializer.data or dork_serializer.data or metainfo: - osint_data = [] - if email_serializer.data: - osint_data.append({ - 'description': 'Emails', - 'children': email_serializer.data}) - if employee_serializer.data: - osint_data.append({ - 'description': 'Employees', - 'children': employee_serializer.data}) - if dork_serializer.data: - osint_data.append({ - 'description': 'Dorks', - 'children': dork_serializer.data}) - - if metainfo: - metainfo_data = [] - usernames = ( - metainfo - .annotate(description=F('author')) - .values('description') - .distinct() - .annotate(children=Value([], output_field=JSONField())) - .filter(author__isnull=False) - ) - - if usernames: - metainfo_data.append({ - 'description': 'Usernames', - 'children': usernames}) - - software = ( - metainfo - .annotate(description=F('producer')) - .values('description') - .distinct() - .annotate(children=Value([], output_field=JSONField())) - .filter(producer__isnull=False) - ) - - if software: - metainfo_data.append({ - 'description': 'Software', - 'children': software}) - - os = ( - metainfo - .annotate(description=F('os')) - .values('description') - .distinct() - .annotate(children=Value([], output_field=JSONField())) - .filter(os__isnull=False) - ) - - if os: - metainfo_data.append({ - 'description': 'OS', - 'children': os}) + 'children': processed_subdomains}) + + osint_data = [] + if email_serializer.data: + osint_data.append({ + 'description': 'Emails', + 'children': email_serializer.data}) + if employee_serializer.data: + osint_data.append({ + 'description': 'Employees', + 'children': employee_serializer.data}) + if processed_dorks: + osint_data.append({ + 'description': 'Dorks', + 'children': processed_dorks}) + + if metainfo: + metainfo_data = [] + usernames = ( + metainfo + .annotate(description=F('author')) + .values('description') + .distinct() + .annotate(children=Value([], output_field=JSONField())) + .filter(author__isnull=False) + ) + + if usernames: + metainfo_data.append({ + 'description': 'Usernames', + 'children': usernames}) + + software = ( + metainfo + .annotate(description=F('producer')) + .values('description') + .distinct() + .annotate(children=Value([], output_field=JSONField())) + .filter(producer__isnull=False) + ) + + if software: + metainfo_data.append({ + 'description': 'Software', + 'children': software}) + + os = ( + metainfo + .annotate(description=F('os')) + .values('description') + .distinct() + .annotate(children=Value([], output_field=JSONField())) + .filter(os__isnull=False) + ) + + if os: + metainfo_data.append({ + 'description': 'OS', + 'children': os}) if metainfo: osint_data.append({ @@ -660,8 +662,54 @@ def get_children(self, history): 'description':'OSINT', 'children': osint_data}) + if osint_data: + return_data.append({ + 'description':'OSINT', + 'children': osint_data}) + return return_data + def process_subdomains(self, subdomains): + for subdomain in subdomains: + if 'children' in subdomain: + vuln_dict = defaultdict(list) + for child in subdomain['children']: + if child.get('description') == 'Vulnerabilities': + for vuln_severity in child['children']: + severity = vuln_severity['description'] + for vuln in vuln_severity['children']: + vuln_key = (vuln['description'], severity) + if vuln_key not in vuln_dict: + vuln_dict[vuln_key] = vuln + + # Reconstruct vulnerabilities structure without duplicates + new_vuln_structure = [] + for severity in ['Critical', 'High', 'Medium', 'Low', 'Informational', 'Unknown']: + severity_vulns = [v for k, v in vuln_dict.items() if k[1] == severity] + if severity_vulns: + new_vuln_structure.append({ + 'description': severity, + 'children': severity_vulns + }) + + # Replace old structure with new + subdomain['children'] = [child for child in subdomain['children'] if child.get('description') != 'Vulnerabilities'] + if new_vuln_structure: + subdomain['children'].append({ + 'description': 'Vulnerabilities', + 'children': new_vuln_structure + }) + + return subdomains + + def process_dorks(self, dorks): + unique_dorks = {} + for dork in dorks: + dork_key = (dork['description'], dork.get('dork_type', '')) + if dork_key not in unique_dorks: + unique_dorks[dork_key] = dork + + return list(unique_dorks.values()) class SubdomainChangesSerializer(serializers.ModelSerializer): diff --git a/web/api/views.py b/web/api/views.py index 9bf81801..30ddf734 100644 --- a/web/api/views.py +++ b/web/api/views.py @@ -5,6 +5,7 @@ import socket import subprocess from ipaddress import IPv4Network +from collections import defaultdict import requests import validators @@ -1407,16 +1408,48 @@ def get(self, request, format=None): class VisualiseData(APIView): - def get(self, request, format=None): - req = self.request - scan_id = req.query_params.get('scan_id') - if scan_id: - mitch_data = ScanHistory.objects.filter(id=scan_id) - serializer = VisualiseDataSerializer(mitch_data, many=True) - return Response(serializer.data) - else: - return Response() - + def get(self, request, format=None): + req = self.request + scan_id = req.query_params.get('scan_id') + if scan_id: + mitch_data = ScanHistory.objects.filter(id=scan_id) + serializer = VisualiseDataSerializer(mitch_data, many=True) + + # Data processing to remove duplicates + processed_data = self.process_visualisation_data(serializer.data) + + return Response(processed_data) + else: + return Response() + + def process_visualisation_data(self, data): + if not data: + return [] + + processed_data = data[0] # Assuming there's only one element in data + subdomains = processed_data.get('subdomains', []) + + # Use a dictionary to group vulnerabilities by subdomain + vuln_by_subdomain = defaultdict(list) + + for subdomain in subdomains: + subdomain_name = subdomain['name'] + vulnerabilities = subdomain.get('vulnerabilities', []) + + # Group unique vulnerabilities + unique_vulns = {} + for vuln in vulnerabilities: + vuln_key = (vuln['name'], vuln['severity']) + if vuln_key not in unique_vulns: + unique_vulns[vuln_key] = vuln + + vuln_by_subdomain[subdomain_name].extend(unique_vulns.values()) + + # Update subdomains with unique vulnerabilities + for subdomain in subdomains: + subdomain['vulnerabilities'] = vuln_by_subdomain[subdomain['name']] + + return processed_data class ListTechnology(APIView): def get(self, request, format=None): diff --git a/web/static/custom/mitch.js b/web/static/custom/mitch.js index 5962e26c..2fa7e1c5 100644 --- a/web/static/custom/mitch.js +++ b/web/static/custom/mitch.js @@ -39,7 +39,15 @@ function visualise_scan_results(scan_id) $.getJSON(`/api/queryAllScanResultVisualise/?scan_id=${scan_id}&format=json`, function(data) { $('#visualisation-loader').empty(); $('#visualisation-filter').show(); - var treeData = data[0]; + + // Check if data is an array and get the first element + var treeData = Array.isArray(data) ? data[0] : data; + + // Check if treeData exists and has children + if (!treeData || !treeData.children || treeData.children.length === 0) { + $('#visualisation-loader').html('

Aucune donnée à visualiser.

'); + return; + } // Calculate total nodes, max label length var totalNodes = 0; @@ -55,7 +63,10 @@ function visualise_scan_results(scan_id) var duration = 750; var root; - var subdomain_count = data[0]['children'][0]['children'].length; + // Find the 'Subdomains' node in the children + var subdomainsNode = treeData.children.find(child => child.description === 'Subdomains'); + var subdomain_count = subdomainsNode ? subdomainsNode.children.length : 0; + // size of the diagram var viewerWidth = screen_width - 100; var viewerHeight = screen_height + 500; @@ -518,6 +529,6 @@ function visualise_scan_results(scan_id) }).fail(function(){ $('#visualisation-loader').empty(); - $("#visualisation-loader").append(`
Sorry, could not visualize.
`); + $("#visualisation-loader").append(`
Sorry, it's impossible to visualize.
`); });; } From 17b0f34cbc659a25e0967635e5f60be390824603 Mon Sep 17 00:00:00 2001 From: Anonymoussaurus <50231698+AnonymousWP@users.noreply.github.com> Date: Thu, 5 Sep 2024 00:27:07 +0200 Subject: [PATCH 255/262] build(ci): improve CodeQL configuration --- .github/workflows/codeql-analysis.yml | 35 ++++++++++++++++++--------- 1 file changed, 23 insertions(+), 12 deletions(-) diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index bd8a831d..3dda7d6e 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -1,35 +1,46 @@ -name: "Code Quality" +name: "CodeQL Advanced" on: push: - branches: [ master ] + branches: [ "**" ] + paths-ignore: + - '**/*.md' + - '**/*.txt' pull_request: - branches: [ master ] - schedule: - - cron: '0 18 * * 5' + branches: [ "**" ] + paths-ignore: + - '**/*.md' + - '**/*.txt' jobs: analyze: - name: Analyze - runs-on: ubuntu-latest + name: Analyze (${{ matrix.language }}) + runs-on: ${{ 'ubuntu-latest' }} + permissions: + security-events: write + packages: read strategy: fail-fast: false matrix: - language: [ 'javascript', 'python' ] + include: + - language: javascript + build-mode: none + - language: python + build-mode: none steps: - name: Checkout repository uses: actions/checkout@v4 - # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL uses: github/codeql-action/init@v3 with: languages: ${{ matrix.language }} - - - name: Autobuild - uses: github/codeql-action/autobuild@v3 + build-mode: ${{ matrix.build-mode }} + queries: security-and-quality - name: Perform CodeQL Analysis uses: github/codeql-action/analyze@v3 + with: + category: "/language:${{matrix.language}}" From 9862f763568405c1920a09a9cc9ae0583ee15f56 Mon Sep 17 00:00:00 2001 From: psyray Date: Fri, 6 Sep 2024 17:35:23 +0200 Subject: [PATCH 256/262] dev(django): install django extensions to have more commands --- docker/celery/pyproject.toml | 1 + docker/web/pyproject.toml | 1 + web/reNgine/settings.py | 1 + 3 files changed, 3 insertions(+) diff --git a/docker/celery/pyproject.toml b/docker/celery/pyproject.toml index e9ced1d1..1c3b7806 100644 --- a/docker/celery/pyproject.toml +++ b/docker/celery/pyproject.toml @@ -17,6 +17,7 @@ django = "3.2.25" django-ace = "1.32.4" django-celery-beat = "2.6.0" django-debug-toolbar = "4.3.0" +django-extensions = "3.2.3" django-environ = "0.11.2" django-login-required-middleware = "0.9.0" django-role-permissions = "3.2.0" diff --git a/docker/web/pyproject.toml b/docker/web/pyproject.toml index e9ced1d1..4b78c33c 100644 --- a/docker/web/pyproject.toml +++ b/docker/web/pyproject.toml @@ -18,6 +18,7 @@ django-ace = "1.32.4" django-celery-beat = "2.6.0" django-debug-toolbar = "4.3.0" django-environ = "0.11.2" +django-extensions = "3.2.3" django-login-required-middleware = "0.9.0" django-role-permissions = "3.2.0" django-mathfilters = "1.0.0" diff --git a/web/reNgine/settings.py b/web/reNgine/settings.py index e8cf077c..c92de917 100644 --- a/web/reNgine/settings.py +++ b/web/reNgine/settings.py @@ -93,6 +93,7 @@ 'recon_note.apps.ReconNoteConfig', 'django_ace', 'django_celery_beat', + 'django_extensions', 'mathfilters', 'drf_yasg', 'rolepermissions' From 8183c047ffa473f839536050d57188e1ee2dae80 Mon Sep 17 00:00:00 2001 From: psyray Date: Thu, 12 Sep 2024 00:59:25 +0200 Subject: [PATCH 257/262] fix(docker): replace staticfiles volume to prevent empty directory --- docker/docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 68d23c49..fb139f64 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -147,7 +147,7 @@ services: target: /etc/nginx/certs/rengine_rsa.key volumes: - ./proxy/config/rengine.conf:/etc/nginx/conf.d/rengine.conf:ro - - ../web/staticfiles:/home/rengine/rengine/staticfiles/ + - ../web:/home/rengine/rengine:rw,z - scan_results:/home/rengine/scan_results networks: - rengine_network From 8fc82114cbb4a19b1bfd714c4db1e9fb98fb6422 Mon Sep 17 00:00:00 2001 From: psyray Date: Thu, 12 Sep 2024 02:40:03 +0200 Subject: [PATCH 258/262] chore(ui): disable update button in tool arsenal - Disabled the "Check Update" button in the Tool Arsenal section and replaced it with a placeholder indicating that the update feature is coming soon. --- web/scanEngine/templates/scanEngine/settings/tool_arsenal.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/web/scanEngine/templates/scanEngine/settings/tool_arsenal.html b/web/scanEngine/templates/scanEngine/settings/tool_arsenal.html index 84b16462..bab5ad51 100644 --- a/web/scanEngine/templates/scanEngine/settings/tool_arsenal.html +++ b/web/scanEngine/templates/scanEngine/settings/tool_arsenal.html @@ -92,7 +92,7 @@
Current Installed Version
{{tool.description}}

- +
From 09a6ee94d8ec93901936b126592ac245dfd6476e Mon Sep 17 00:00:00 2001 From: Psyray Date: Fri, 13 Sep 2024 15:33:24 +0200 Subject: [PATCH 259/262] ci(unit-tests): provide unit tests for UI (#189) * feat(tests): add unit tests for api views * feat(unit-tests): add 12 unit tests for API app * feat(unit-tests): minor changes on test scan & nmap * feat(unit-tests): add 32 unit tests for api app * feat(unit-tests): add 24 unit tests for api app * feat(unit-test): correct some bugs and refactor * feat(unit-tests): replace hardcoded values * feat(unit-tests): change viewset aliases basename * feat(unit-test): correctly cast all int parameter value * feat(unit-tests): bug fixes and add of 6 unit tests * feat(unit-tests): add docstring * feat(unit-tests): refactor API unit tests * refactor(api): improve import structure and add tests for activity logs - Refactored import statements in views.py for better readability and maintainability. - Added handling for single integer subdomain_ids in InitiateSubTask. - Enhanced querysets in ListActivityLogsViewSet and ListScanLogsViewSet to include ordering by id. - Introduced new tests for ListActivityLogsViewSet in test_scan.py. - Updated TestDataGenerator in test_utils.py to include methods for creating scan_activity and command. - Added a docstring to safe_int_cast in common_func.py. * fix(tests): update subscan creation and assertion logic Modified the subscan creation process in test_scan.py to handle multiple subscans and updated related assertions. * feat: add timezone-aware datetime handling and new dashboard view test - Updated date handling in the index view to use timezone-aware datetimes. - Refactored test imports to use a common BaseTestCase from utils.test_base. - Added a new test suite for dashboard views, including tests for various dashboard functionalities. - Enhanced TestDataGenerator to include discovered_date for endpoints. * test: add __all__ declarations to test files for better module export control Added __all__ declarations to various test files to explicitly define the public interface of these modules. This change improves module export control and clarifies which classes and functions are intended for external use. * test: add missing __all declarations - Introduced __all__ declarations in test_scan.py and test_base.py for better module export control. - Removed an unused import in test_dashboard.py. * feat: add new test cases and utility functions for startScan views and models - Added new test cases for startScan views and models. Introduced MockTemplate utility for mocking Django templates in tests. - Updated TestDataGenerator to include create_scan_activity and create_command methods. - Replaced direct type casting with safe_int_cast in startScan views. - Removed redundant client login steps in TestDashboardViews. - Deleted obsolete test_scan.py file. * feat(scanEngine): enhance test utilities and add unit tests for scanEngine views - Imports and Models: Updated imports in test_utils.py to include additional models from scanEngine and other related modules. - Test Data Generation: Refactored and expanded the TestDataGenerator class to include methods for creating various test objects related to scanEngine. - Views Cleanup: Removed debug print statements from scanEngine/views.py and refactored string concatenations to use f-strings. - Unit Tests: Added a new test file test_scan_engine.py containing comprehensive unit tests for the scanEngine views, covering functionalities like adding, updating, and deleting engines, wordlists, and tools. * refactor: update test data generator and scan engine tests - Added methods create_interesting_lookup_model and create_search_history to TestDataGenerator. - Removed the method create_lookup from TestDataGenerator. - Updated test_scan_engine.py to use interesting_lookup_model instead of lookup. * refactor: remove redundant code in views and test_base modules - Removed redundant installation command in add_tool function within views.py. - Removed unused import statement in test_base.py. * feat: enhance target and organization management with validation and tests - Imports and Dependencies: Reorganized and added necessary imports for better modularity and functionality. - File Upload Validation: Added checks for empty file uploads and invalid IP addresses during target addition. - Error Handling: Improved error handling in delete_target and delete_organization views to handle non-existent entities. - Form Enhancements: Updated AddOrganizationForm to use ModelForm and improved domain selection logic. - Utility Functions: Moved get_ip_info and get_ips_from_cidr_range functions to common_func.py for better reusability. - Validators: Added a new validate_ip function in validators.py to validate IP addresses. - Unit Tests: Added comprehensive unit tests for target and organization views to ensure proper functionality and error handling. * feat: add logging for error conditions & fix CodeQL warnings - Added logging for various error conditions in add_target and delete_target views. - Added __all__ declaration in test_target_app.py for better module export control. - Minor cleanup in common_func.py without functional changes. * refactor(logging): use lazy formatting for logger messages - Updated logger messages in web/targetApp/views.py to use lazy formatting. * feat(tests): add validation and error handling for note operations - Added validation checks and error handling for missing or invalid IDs in note operations. - Improved error messages for better clarity in the list_note, flip_todo_status, flip_important_status, and delete_note functions. - Added required field checks in the AddReconNote API view. * fix(tests): adjust test assertions * refactor: improve error handling and code readability in API views Overview - Enhanced error handling and logging across multiple API views. - Refactored code for better readability and maintainability. - Added new test cases to cover edge scenarios and improve test coverage. - Updated Docker configuration for development environment. Details - OllamaManager: Simplified error handling and added comments for clarity. - GPTAttackSuggestion: Improved error handling and refactored code for better readability. - FetchMostCommonVulnerability: Refactored query logic and improved error handling. - AddTarget: Added validation for domain names and improved error messages. - DeleteVulnerability: Added validation for input data and improved error handling. - ListTechnology: Refactored query logic for better readability. - get_ips_from_cidr_range: Improved error handling and logging. - Test cases: Added new test cases for various scenarios, including failure cases and edge cases. - Docker: Updated docker-compose.dev.yml to enable remote debugging and added a new port. - Miscellaneous: Various minor improvements and bug fixes across different files. --- web/api/tests/__init__.py | 9 + web/api/tests/test_endpoint.py | 108 + web/api/tests/test_ip.py | 197 + web/api/tests/test_organization.py | 81 + web/api/tests/test_osint.py | 156 + web/api/tests/test_project.py | 131 + web/api/tests/test_scan.py | 306 ++ web/api/tests/test_search.py | 73 + web/api/tests/test_subdomain.py | 222 ++ web/api/tests/test_target.py | 72 + web/api/tests/test_tools.py | 285 ++ web/api/tests/test_vulnerability.py | 231 ++ web/api/urls.py | 26 +- web/api/views.py | 799 ++-- web/dashboard/fixtures/dashboard.json | 37 + .../migrations/0006_project_insert_date.py | 6 +- web/dashboard/tests.py | 3 - web/dashboard/tests/__init__.py | 2 + web/dashboard/tests/test_dashboard.py | 88 + web/dashboard/views.py | 16 +- web/fixtures/auth.json | 2347 ++++++++++++ web/fixtures/django_celery_beat.json | 49 + web/manage.py | 19 +- web/reNgine/common_func.py | 77 +- web/reNgine/tasks.py | 4 + web/reNgine/validators.py | 5 + web/recon_note/fixtures/recon_note.json | 15 + web/recon_note/tests.py | 3 - web/recon_note/tests/__init__.py | 2 + web/recon_note/tests/test_recon_note.py | 81 + web/recon_note/views.py | 48 +- web/scanEngine/fixtures/scanEngine.json | 436 +++ .../scanEngine/settings/llm_toolkit.html | 9 +- web/scanEngine/tests.py | 3 - web/scanEngine/tests/__init__.py | 2 + web/scanEngine/tests/test_scan_engine.py | 227 ++ web/scanEngine/views.py | 30 +- web/startScan/fixtures/startScan.json | 3392 +++++++++++++++++ web/startScan/tests.py | 3 - web/startScan/tests/__init__.py | 2 + web/startScan/tests/test_start_scan.py | 220 ++ web/startScan/views.py | 8 +- web/targetApp/fixtures/targetApp.json | 520 +++ web/targetApp/forms.py | 46 +- web/targetApp/tests.py | 3 - web/targetApp/tests/__init__.py | 2 + web/targetApp/tests/test_target_app.py | 413 ++ web/targetApp/views.py | 212 +- web/tests/test_nmap.py | 6 +- web/tests/test_scan.py | 23 +- web/utils/test_base.py | 57 + web/utils/test_utils.py | 529 +++ 52 files changed, 11111 insertions(+), 530 deletions(-) create mode 100644 web/api/tests/__init__.py create mode 100644 web/api/tests/test_endpoint.py create mode 100644 web/api/tests/test_ip.py create mode 100644 web/api/tests/test_organization.py create mode 100644 web/api/tests/test_osint.py create mode 100644 web/api/tests/test_project.py create mode 100644 web/api/tests/test_scan.py create mode 100644 web/api/tests/test_search.py create mode 100644 web/api/tests/test_subdomain.py create mode 100644 web/api/tests/test_target.py create mode 100644 web/api/tests/test_tools.py create mode 100644 web/api/tests/test_vulnerability.py create mode 100644 web/dashboard/fixtures/dashboard.json delete mode 100644 web/dashboard/tests.py create mode 100644 web/dashboard/tests/__init__.py create mode 100644 web/dashboard/tests/test_dashboard.py create mode 100644 web/fixtures/auth.json create mode 100644 web/fixtures/django_celery_beat.json create mode 100644 web/recon_note/fixtures/recon_note.json delete mode 100644 web/recon_note/tests.py create mode 100644 web/recon_note/tests/__init__.py create mode 100644 web/recon_note/tests/test_recon_note.py create mode 100644 web/scanEngine/fixtures/scanEngine.json delete mode 100644 web/scanEngine/tests.py create mode 100644 web/scanEngine/tests/__init__.py create mode 100644 web/scanEngine/tests/test_scan_engine.py create mode 100644 web/startScan/fixtures/startScan.json delete mode 100644 web/startScan/tests.py create mode 100644 web/startScan/tests/__init__.py create mode 100644 web/startScan/tests/test_start_scan.py create mode 100644 web/targetApp/fixtures/targetApp.json delete mode 100644 web/targetApp/tests.py create mode 100644 web/targetApp/tests/__init__.py create mode 100644 web/targetApp/tests/test_target_app.py create mode 100644 web/utils/test_base.py create mode 100644 web/utils/test_utils.py diff --git a/web/api/tests/__init__.py b/web/api/tests/__init__.py new file mode 100644 index 00000000..3cdbffbb --- /dev/null +++ b/web/api/tests/__init__.py @@ -0,0 +1,9 @@ +from utils.test_base import * +from .test_vulnerability import * +from .test_subdomain import * +from .test_scan import * +from .test_tools import * +from .test_endpoint import * +from .test_project import * +from .test_organization import * +from .test_search import * diff --git a/web/api/tests/test_endpoint.py b/web/api/tests/test_endpoint.py new file mode 100644 index 00000000..adbb0c8c --- /dev/null +++ b/web/api/tests/test_endpoint.py @@ -0,0 +1,108 @@ +""" +This file contains the test cases for the API views. +""" + +from django.urls import reverse +from rest_framework import status +from utils.test_base import BaseTestCase + +__all__ = [ + 'TestEndPointViewSet', + 'TestEndPointChangesViewSet', + 'TestInterestingEndpointViewSet' +] + +class TestEndPointViewSet(BaseTestCase): + """Test case for the EndPoint ViewSet API.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + self.data_generator.create_endpoint() + + def test_list_endpoints(self): + """Test listing endpoints.""" + api_url = reverse("api:endpoints-list") + response = self.client.get( + api_url, + { + "project": self.data_generator.project.slug, + "scan_id": self.data_generator.scan_history.id, + "subdomain_id": self.data_generator.subdomain.id, + "target_id": self.data_generator.domain.id, + }, + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertGreaterEqual(len(response.data["results"]), 1) + self.assertEqual( + response.data["results"][0]["http_url"], + self.data_generator.endpoint.http_url, + ) + + def test_list_endpoints_by_subdomain(self): + """Test listing endpoints by subdomain.""" + api_url = reverse("api:endpoints-list") + response = self.client.get( + api_url, + { + "subdomain_id": self.data_generator.subdomain.id, + "scan_id": self.data_generator.scan_history.id, + "project": self.data_generator.project.slug, + "target_id": self.data_generator.domain.id, + }, + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertGreaterEqual(len(response.data["results"]), 1) + self.assertEqual( + response.data["results"][0]["http_url"], + self.data_generator.endpoint.http_url, + ) + +class TestEndPointChangesViewSet(BaseTestCase): + """Test case for endpoint changes viewset.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + self.data_generator.create_endpoint() + self.data_generator.create_scan_history() + self.data_generator.create_endpoint(name="endpoint2") + + def test_endpoint_changes_viewset(self): + """Test the EndPoint Changes ViewSet.""" + url = reverse("api:endpoint-changes-list") + response = self.client.get( + url, {"scan_id": self.data_generator.scan_history.id, "changes": "added"} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertGreaterEqual(len(response.data), 1) + self.assertEqual( + response.data["results"][0]["http_url"], + self.data_generator.endpoint.http_url, + ) + self.assertEqual(response.data["results"][0]["change"], "added") + +class TestInterestingEndpointViewSet(BaseTestCase): + """Test case for interesting endpoint viewset.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + self.data_generator.create_endpoint() + + def test_interesting_endpoint_viewset(self): + """Test retrieving interesting endpoints for a scan.""" + url = reverse("api:interesting-endpoints-list") + response = self.client.get( + url, {"scan_id": self.data_generator.scan_history.id} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertGreaterEqual(len(response.data), 1) + self.assertEqual( + response.data["results"][0]["http_url"], + self.data_generator.endpoint.http_url, + ) + diff --git a/web/api/tests/test_ip.py b/web/api/tests/test_ip.py new file mode 100644 index 00000000..06a33d1f --- /dev/null +++ b/web/api/tests/test_ip.py @@ -0,0 +1,197 @@ +""" +This file contains the test cases for the API views. +""" + +from unittest.mock import patch +from django.urls import reverse +from rest_framework import status +from utils.test_base import BaseTestCase +import socket + +__all__ = [ + 'TestIpAddressViewSet', + 'TestIPToDomain', + 'TestDomainIPHistory', + 'TestListIPs', + 'TestListPorts', + 'TestWhois', + 'TestReverseWhois' +] + +class TestIpAddressViewSet(BaseTestCase): + """Test case for IP address viewset.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + + def test_ip_address_viewset(self): + """Test retrieving IP addresses for a scan.""" + url = reverse("api:ip-addresses-list") + response = self.client.get( + url, {"scan_id": self.data_generator.scan_history.id} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertGreaterEqual(len(response.data), 1) + self.assertEqual( + response.data["results"][0]["ip_addresses"][0]["address"], + self.data_generator.ip_address.address, + ) + +class TestIPToDomain(BaseTestCase): + """Test case for IP to domain resolution.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + + @patch("api.views.socket.gethostbyaddr") + def test_ip_to_domain(self, mock_gethostbyaddr): + """Test resolving an IP address to a domain name.""" + mock_gethostbyaddr.return_value = ( + self.data_generator.domain.name, + [self.data_generator.domain.name], + [self.data_generator.subdomain.ip_addresses.first().address], + ) + url = reverse("api:ip_to_domain") + response = self.client.get( + url, + {"ip_address": self.data_generator.subdomain.ip_addresses.first().address}, + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + self.assertEqual( + response.data["ip_address"][0]["domain"], self.data_generator.domain.name + ) + + @patch("api.views.socket.gethostbyaddr") + def test_ip_to_domain_failure(self, mock_gethostbyaddr): + """Test IP to domain resolution when it fails.""" + mock_gethostbyaddr.side_effect = socket.herror + url = reverse("api:ip_to_domain") + response = self.client.get(url, {"ip_address": "192.0.2.1"}) + self.assertEqual(response.status_code, 200) + self.assertTrue(response.data["status"]) + self.assertEqual(response.data["ip_address"][0]["domain"], "192.0.2.1") + + @patch("api.views.socket.gethostbyaddr") + def test_ip_to_domain_multiple(self, mock_gethostbyaddr): + """Test IP to domain resolution with multiple domains.""" + mock_domains = ["example.com", "example.org"] + mock_gethostbyaddr.return_value = (mock_domains[0], mock_domains, ["192.0.2.1"]) + url = reverse("api:ip_to_domain") + response = self.client.get(url, {"ip_address": "192.0.2.1"}) + self.assertEqual(response.status_code, 200) + self.assertIn("domains", response.data["ip_address"][0]) + self.assertEqual(response.data["ip_address"][0]["domains"], mock_domains) + +class TestDomainIPHistory(BaseTestCase): + """Test case for domain IP history lookup.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + + @patch("api.views.query_ip_history.apply_async") + def test_domain_ip_history(self, mock_apply_async): + """Test domain IP history lookup.""" + mock_apply_async.return_value.wait.return_value = { + "status": True, + "data": "IP History data", + } + url = reverse("api:domain_ip_history") + response = self.client.get(url, {"domain": self.data_generator.domain.name}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + self.assertEqual(response.data["data"], "IP History data") + +class TestListIPs(BaseTestCase): + """Test case for listing IP addresses.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + + def test_list_ips(self): + """Test listing IP addresses for a target.""" + url = reverse("api:listIPs") + response = self.client.get(url, {"target_id": self.data_generator.domain.id}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn("ips", response.data) + self.assertGreaterEqual(len(response.data["ips"]), 1) + self.assertEqual( + response.data["ips"][0]["address"], self.data_generator.ip_address.address + ) + +class TestListPorts(BaseTestCase): + """Test case for listing ports.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + + def test_list_ports(self): + """Test listing ports for a target and scan.""" + url = reverse("api:listPorts") + response = self.client.get( + url, + { + "target_id": self.data_generator.domain.id, + "scan_id": self.data_generator.scan_history.id, + "ip_address": "1.1.1.1", + }, + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn("ports", response.data) + self.assertGreaterEqual(len(response.data["ports"]), 1) + self.assertEqual(response.data["ports"][0]["number"], 80) + self.assertEqual(response.data["ports"][0]["service_name"], "http") + +class TestWhois(BaseTestCase): + """Test case for WHOIS lookup.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + + @patch("api.views.query_whois.apply_async") + def test_whois(self, mock_apply_async): + """Test WHOIS lookup for a domain.""" + mock_apply_async.return_value.wait.return_value = { + "status": True, + "data": "Whois data", + } + url = reverse("api:whois") + response = self.client.get(url, {"ip_domain": self.data_generator.domain.name}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + self.assertEqual(response.data["data"], "Whois data") + +class TestReverseWhois(BaseTestCase): + """Test case for Reverse WHOIS lookup.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + + @patch("api.views.query_reverse_whois.apply_async") + def test_reverse_whois(self, mock_apply_async): + """Test Reverse WHOIS lookup for a domain.""" + mock_apply_async.return_value.wait.return_value = { + "status": True, + "data": "Reverse Whois data", + } + url = reverse("api:reverse_whois") + response = self.client.get( + url, {"lookup_keyword": self.data_generator.domain.name} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + self.assertEqual(response.data["data"], "Reverse Whois data") diff --git a/web/api/tests/test_organization.py b/web/api/tests/test_organization.py new file mode 100644 index 00000000..2649607f --- /dev/null +++ b/web/api/tests/test_organization.py @@ -0,0 +1,81 @@ +""" +This file contains the test cases for the API views. +""" + +from django.urls import reverse +from rest_framework import status +from utils.test_base import BaseTestCase +from targetApp.models import Organization + +__all__ = [ + 'TestListOrganizations', + 'TestListTargetsInOrganization', + 'TestListTargetsWithoutOrganization' +] + +class TestListOrganizations(BaseTestCase): + """Test case for listing organizations.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + + def test_list_empty_organizations(self): + """Test listing organizations when the database is empty.""" + Organization.objects.all().delete() + url = reverse("api:listOrganizations") + response = self.client.get(url) + self.assertEqual(response.status_code, 200) + self.assertEqual(len(response.json()['organizations']), 0) + + def test_list_organizations(self): + """Test listing all organizations.""" + url = reverse("api:listOrganizations") + response = self.client.get(url) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn("organizations", response.data) + self.assertGreaterEqual(len(response.data["organizations"]), 1) + self.assertEqual( + response.data["organizations"][0]["name"], + self.data_generator.organization.name, + ) + +class TestListTargetsInOrganization(BaseTestCase): + """Test case for listing targets in an organization.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + + def test_list_targets_in_organization(self): + """Test listing targets for a specific organization.""" + url = reverse("api:queryTargetsInOrganization") + response = self.client.get( + url, {"organization_id": self.data_generator.organization.id} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn("organization", response.data) + self.assertIn("domains", response.data) + self.assertGreaterEqual(len(response.data["domains"]), 1) + self.assertEqual( + response.data["domains"][0]["name"], self.data_generator.domain.name + ) + +class TestListTargetsWithoutOrganization(BaseTestCase): + """Test case for listing targets without an organization.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + + def test_list_targets_without_organization(self): + """Test listing targets that are not associated with any organization.""" + url = reverse("api:queryTargetsWithoutOrganization") + response = self.client.get(url) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn("domains", response.data) + self.assertGreaterEqual(len(response.data["domains"]), 1) + self.assertEqual(response.data["domains"][0]["name"], "vulnweb.com") diff --git a/web/api/tests/test_osint.py b/web/api/tests/test_osint.py new file mode 100644 index 00000000..8bd6ec3e --- /dev/null +++ b/web/api/tests/test_osint.py @@ -0,0 +1,156 @@ +""" +This file contains the test cases for the API views. +""" + +from django.urls import reverse +from rest_framework import status +from utils.test_base import BaseTestCase + +__all__ = [ + 'TestListDorkTypes', + 'TestListEmails', + 'TestListDorks', + 'TestListEmployees', + 'TestListOsintUsers', + 'TestListMetadata' +] + +class TestListDorkTypes(BaseTestCase): + """Test case for listing dork types.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + + def test_list_dork_types(self): + """Test listing dork types for a scan.""" + url = reverse("api:queryDorkTypes") + response = self.client.get( + url, {"scan_id": self.data_generator.scan_history.id} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn("dorks", response.data) + self.assertGreaterEqual(len(response.data["dorks"]), 1) + self.assertEqual( + response.data["dorks"][0]["type"], self.data_generator.dork.type + ) + +class TestListEmails(BaseTestCase): + """Test case for listing emails.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + + def test_list_emails(self): + """Test listing emails for a scan.""" + url = reverse("api:queryEmails") + response = self.client.get( + url, {"scan_id": self.data_generator.scan_history.id} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn("emails", response.data) + self.assertGreaterEqual(len(response.data["emails"]), 1) + self.assertEqual( + response.data["emails"][0]["address"], self.data_generator.email.address + ) + +class TestListDorks(BaseTestCase): + """Test case for listing dorks.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + + def test_list_dorks(self): + """Test listing dorks for a scan.""" + url = reverse("api:queryDorks") + response = self.client.get( + url, {"scan_id": self.data_generator.scan_history.id} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn("dorks", response.data) + self.assertIn("Test Dork", response.data["dorks"]) + self.assertGreaterEqual(len(response.data["dorks"]["Test Dork"]), 1) + self.assertEqual( + response.data["dorks"]["Test Dork"][0]["type"], + self.data_generator.dork.type, + ) + +class TestListEmployees(BaseTestCase): + """Test case for listing employees.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + + def test_list_employees(self): + """Test listing employees for a scan.""" + url = reverse("api:queryEmployees") + response = self.client.get( + url, {"scan_id": self.data_generator.scan_history.id} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn("employees", response.data) + self.assertGreaterEqual(len(response.data["employees"]), 1) + self.assertEqual( + response.data["employees"][0]["name"], self.data_generator.employee.name + ) + +class TestListOsintUsers(BaseTestCase): + """Test case for listing OSINT users.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + self.data_generator.create_metafinder_document() + + def test_list_osint_users(self): + """Test listing OSINT users for a scan.""" + url = reverse("api:queryMetadata") + response = self.client.get( + url, {"scan_id": self.data_generator.scan_history.id} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn("metadata", response.data) + self.assertGreaterEqual(len(response.data["metadata"]), 1) + self.assertEqual( + response.data["metadata"][0]["author"], + self.data_generator.metafinder_document.author, + ) + +class TestListMetadata(BaseTestCase): + """Test case for listing metadata.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + self.data_generator.create_metafinder_document() + + def test_list_metadata(self): + """Test listing metadata for a scan.""" + url = reverse("api:queryMetadata") + response = self.client.get( + url, {"scan_id": self.data_generator.scan_history.id} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn("metadata", response.data) + self.assertGreaterEqual(len(response.data["metadata"]), 1) + self.assertEqual( + response.data["metadata"][0]["doc_name"], + self.data_generator.metafinder_document.doc_name, + ) + self.assertEqual( + response.data["metadata"][0]["url"], + self.data_generator.metafinder_document.url, + ) + self.assertEqual( + response.data["metadata"][0]["title"], + self.data_generator.metafinder_document.title, + ) diff --git a/web/api/tests/test_project.py b/web/api/tests/test_project.py new file mode 100644 index 00000000..3522fcad --- /dev/null +++ b/web/api/tests/test_project.py @@ -0,0 +1,131 @@ +""" +This file contains the test cases for the API views. +""" + +from unittest.mock import patch +from django.utils import timezone +from django.urls import reverse +from rest_framework import status +from utils.test_base import BaseTestCase + +__all__ = [ + 'TestCreateProjectApi', + 'TestAddReconNote', + 'TestListTodoNotes', + 'TestGPTAttackSuggestion' +] + +class TestCreateProjectApi(BaseTestCase): + """Tests for the Create Project API.""" + + def test_create_project_success(self): + """Test successful project creation.""" + api_url = reverse("api:create_project") + response = self.client.get( + api_url, + { + "name": "New Project", + "insert_date": timezone.now(), + "slug": "new-project", + }, + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + self.assertEqual(response.data["project_name"], "New Project") + + def test_create_project_failure(self): + """Test project creation failure when no name is provided.""" + api_url = reverse("api:create_project") + response = self.client.get(api_url) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertFalse(response.data["status"]) + +class TestAddReconNote(BaseTestCase): + """Test case for the Add Recon Note API.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + + def test_add_recon_note(self): + """Test adding a recon note.""" + api_url = reverse("api:addReconNote") + data = { + "subdomain_id": self.data_generator.subdomain.id, + "scan_history_id": self.data_generator.scan_history.id, + "title": "Test Note", + "description": "This is a test note", + "project": self.data_generator.project.slug, + } + response = self.client.post(api_url, data) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + + def test_add_recon_note_missing_data(self): + """Test adding a recon note with missing data.""" + api_url = reverse("api:addReconNote") + data = {"title": "Test Note", "slug": "test-project"} + response = self.client.post(api_url, data) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertFalse(response.data["status"]) + +class TestListTodoNotes(BaseTestCase): + """Test case for listing todo notes.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + self.data_generator.create_todo_note() + + def test_list_todo_notes(self): + """Test listing todo notes for a project.""" + url = reverse("api:listTodoNotes") + response = self.client.get(url, {"project": self.data_generator.project.slug}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertGreaterEqual(len(response.data["notes"]), 1) + self.assertEqual( + response.data["notes"][0]["id"], self.data_generator.todo_note.id + ) + self.assertEqual( + response.data["notes"][0]["title"], self.data_generator.todo_note.title + ) + self.assertEqual( + response.data["notes"][0]["description"], + self.data_generator.todo_note.description, + ) + self.assertEqual( + response.data["notes"][0]["project"], + self.data_generator.todo_note.project.id, + ) + self.assertEqual( + response.data["notes"][0]["subdomain"], + self.data_generator.todo_note.subdomain.id, + ) + self.assertEqual( + response.data["notes"][0]["scan_history"], + self.data_generator.todo_note.scan_history.id, + ) + +class TestGPTAttackSuggestion(BaseTestCase): + """Tests for the GPT Attack Suggestion API.""" + + def setUp(self): + super().setUp() + self.data_generator.create_project_base() + + @patch("reNgine.gpt.GPTAttackSuggestionGenerator.get_attack_suggestion") + def test_get_attack_suggestion(self, mock_get_suggestion): + """Test getting an attack suggestion for a subdomain.""" + mock_get_suggestion.return_value = { + "status": True, + "description": "Test attack suggestion", + } + api_url = reverse("api:gpt_get_possible_attacks") + response = self.client.get( + api_url, {"subdomain_id": self.data_generator.subdomain.id} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + self.assertEqual(response.data["description"], "Test attack suggestion") diff --git a/web/api/tests/test_scan.py b/web/api/tests/test_scan.py new file mode 100644 index 00000000..a9e8994d --- /dev/null +++ b/web/api/tests/test_scan.py @@ -0,0 +1,306 @@ +""" +This file contains the test cases for the API views. +""" +import json +from unittest.mock import patch +from django.urls import reverse +from rest_framework import status +from utils.test_base import BaseTestCase + +__all__ = [ + 'TestScanStatus', + 'TestListScanHistory', + 'TestListActivityLogsViewSet', + 'TestListScanLogsViewSet', + 'TestStopScan', + 'TestInitiateSubTask', + 'TestListEngines', + 'TestVisualiseData', + 'TestListTechnology', + 'TestDirectoryViewSet', + 'TestListSubScans', + 'TestFetchSubscanResults', + 'TestListInterestingKeywords' +] + +class TestScanStatus(BaseTestCase): + """Test case for checking scan status.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + + def test_scan_status(self): + """Test checking the status of a scan.""" + url = reverse("api:scan_status") + response = self.client.get(url, {"project": self.data_generator.project.slug}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn("scans", response.data) + self.assertIn("tasks", response.data) + self.assertIsInstance(response.data["scans"], dict) + self.assertIsInstance(response.data["tasks"], dict) + if response.data["scans"]: + self.assertIn("id", response.data["scans"]["completed"][0]) + self.assertIn("scan_status", response.data["scans"]["completed"][0]) + +class TestListScanHistory(BaseTestCase): + """Test case for listing scan history.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + + def test_list_scan_history(self): + """Test listing scan history for a project.""" + url = reverse("api:listScanHistory") + response = self.client.get(url, {"project": self.data_generator.project.slug}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertGreaterEqual(len(response.data), 1) + self.assertEqual(response.data[0]["id"], self.data_generator.scan_history.id) + +class TestListActivityLogsViewSet(BaseTestCase): + """Tests for the ListActivityLogsViewSet.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + self.data_generator.create_scan_history() + self.data_generator.create_scan_activity() + self.data_generator.create_command() + + def test_get_queryset(self): + """Test retrieving activity logs.""" + url = reverse('api:activity-logs-list') + response = self.client.get(url, {'activity_id': self.data_generator.scan_activity.id}) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn('results', response.data) + self.assertGreaterEqual(len(response.data['results']), 1) + self.assertEqual(response.data['results'][0]['command'], self.data_generator.command.command) + + def test_get_queryset_no_logs(self): + """Test retrieving activity logs when there are none.""" + non_existent_activity_id = 9999 # An ID that doesn't exist + url = reverse('api:activity-logs-list') + response = self.client.get(url, {'activity_id': non_existent_activity_id}) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn('results', response.data) + self.assertEqual(len(response.data['results']), 0) + +class TestListScanLogsViewSet(BaseTestCase): + """Tests for the ListScanLogsViewSet class.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + + def test_list_scan_logs(self): + """Test retrieving scan logs.""" + url = reverse("api:scan-logs-list") + response = self.client.get( + url, {"scan_id": self.data_generator.scan_history.id} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn("results", response.data) + +class TestStopScan(BaseTestCase): + """Tests for the StopScan class.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + + @patch("api.views.StopScan") + def test_stop_scan(self, mock_stop_scan): + """Test stopping a scan.""" + mock_stop_scan.return_value = True + url = reverse("api:stop_scan") + data = {"scan_id": self.data_generator.scan_history.id} + response = self.client.post(url, data) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + +class TestInitiateSubTask(BaseTestCase): + """Tests for the InitiateSubTask class.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + + @patch("api.views.initiate_subscan") + def test_initiate_subtask(self, mock_initiate_subscan): + """Test initiating a subtask.""" + mock_initiate_subscan.return_value = True + url = reverse("api:initiate_subscan") + data = { + "subdomain_ids": [self.data_generator.subdomain.id,self.data_generator.subdomain.id], + "tasks": ['httpcrawl','osint'], + "engine_id": "1", + } + response = self.client.post(url, data=json.dumps(data), content_type='application/json') + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + +class TestListEngines(BaseTestCase): + """Test case for listing engines.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + + def test_list_engines(self): + """Test listing all available engines.""" + url = reverse("api:listEngines") + response = self.client.get(url) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn("engines", response.data) + self.assertGreaterEqual(len(response.data["engines"]), 1) + + + + +class TestVisualiseData(BaseTestCase): + """Test case for visualising scan data.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + + def test_visualise_data(self): + """Test retrieving visualisation data for a scan.""" + url = reverse("api:queryAllScanResultVisualise") + response = self.client.get( + url, {"scan_id": self.data_generator.scan_history.id} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertGreaterEqual(len(response.data), 1) + self.assertEqual(response.data["description"], self.data_generator.domain.name) + + +class TestListTechnology(BaseTestCase): + """Test case for listing technologies.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + + def test_list_technology(self): + """Test listing technologies for a target.""" + url = reverse("api:listTechnologies") + response = self.client.get(url, {"target_id": self.data_generator.domain.id}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn("technologies", response.data) + self.assertGreaterEqual(len(response.data["technologies"]), 1) + self.assertEqual( + response.data["technologies"][0]["name"], + self.data_generator.technology.name, + ) + +class TestDirectoryViewSet(BaseTestCase): + """Tests for the Directory ViewSet API.""" + + def setUp(self): + super().setUp() + self.data_generator.create_project_base() + self.data_generator.create_directory_scan() + self.data_generator.create_directory_file() + self.data_generator.directory_scan.directory_files.add( + self.data_generator.directory_file + ) + self.data_generator.subdomain.directories.add( + self.data_generator.directory_scan + ) + + def test_get_directory_files(self): + """Test retrieving directory files.""" + api_url = reverse("api:directories-list") + response = self.client.get( + api_url, {"scan_history": self.data_generator.scan_history.id} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn('results', response.data) + self.assertGreaterEqual(len(response.data["results"]), 1) + self.assertEqual( + response.data["results"][0]["name"], self.data_generator.directory_file.name + ) + + def test_get_directory_files_by_subdomain(self): + """Test retrieving directory files by subdomain.""" + api_url = reverse("api:directories-list") + response = self.client.get( + api_url, {"subdomain_id": self.data_generator.subdomain.id} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn('results', response.data) + self.assertGreaterEqual(len(response.data["results"]), 1) + self.assertEqual( + response.data["results"][0]["name"], self.data_generator.directory_file.name + ) + +class TestListSubScans(BaseTestCase): + """Test case for listing subscans.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + self.subscans = self.data_generator.create_subscan() + self.subscans[-1].celery_ids = ["test_celery_id"] + self.subscans[-1].save() + + def test_list_subscans(self): + """Test listing all subscans.""" + api_url = reverse("api:listSubScans") + response = self.client.post( + api_url, {"scan_history_id": self.data_generator.scan_history.id} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn('results', response.data) + self.assertTrue(response.data["status"]) + self.assertGreaterEqual(len(response.data["results"]), 1) + + # Test if the created subscan is in the results + found_subscan = next((s for s in response.data["results"] if s["celery_ids"][0] == "test_celery_id"), None) + self.assertIsNotNone(found_subscan, "Le subscan créé n'a pas été trouvé dans les résultats") + self.assertEqual(found_subscan["id"], self.subscans[-1].id) + +class TestFetchSubscanResults(BaseTestCase): + """Test case for fetching subscan results.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + self.data_generator.create_subscan() + + def test_fetch_subscan_results(self): + """Test fetching results of a subscan.""" + api_url = reverse("api:fetch_subscan_results") + response = self.client.get( + api_url, {"subscan_id": self.data_generator.subscans[-1].id} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn("subscan", response.data) + self.assertIn("result", response.data) + +class TestListInterestingKeywords(BaseTestCase): + """Tests for listing interesting keywords.""" + + @patch("api.views.get_lookup_keywords") + def test_list_interesting_keywords(self, mock_get_keywords): + """Test listing interesting keywords.""" + mock_get_keywords.return_value = ["keyword1", "keyword2"] + api_url = reverse("api:listInterestingKeywords") + response = self.client.get(api_url) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data, ["keyword1", "keyword2"]) diff --git a/web/api/tests/test_search.py b/web/api/tests/test_search.py new file mode 100644 index 00000000..6ed84117 --- /dev/null +++ b/web/api/tests/test_search.py @@ -0,0 +1,73 @@ +""" +This file contains the test cases for the API views. +""" + +from django.urls import reverse +from rest_framework import status +from utils.test_base import BaseTestCase + +__all__ = [ + 'TestSearchHistoryView', + 'TestUniversalSearch' +] + +class TestSearchHistoryView(BaseTestCase): + """Tests for the Search History API.""" + + def setUp(self): + super().setUp() + self.data_generator.create_search_history() + + def test_get_search_history(self): + """Test retrieving search history.""" + api_url = reverse("api:search_history") + response = self.client.get(api_url) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + self.assertGreaterEqual(len(response.data["results"]), 1) + self.assertEqual( + response.data["results"][0]["query"], + self.data_generator.search_history.query, + ) + +class TestUniversalSearch(BaseTestCase): + """Test case for the Universal Search API.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + self.data_generator.create_endpoint() + self.data_generator.create_vulnerability() + + def test_universal_search(self): + """Test the universal search functionality.""" + api_url = reverse("api:search") + response = self.client.get(api_url, {"query": "admin"}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + self.assertIn( + "admin.example.com", + [sub["name"] for sub in response.data["results"]["subdomains"]], + ) + self.assertIn( + "https://admin.example.com/endpoint", + [ep["http_url"] for ep in response.data["results"]["endpoints"]], + ) + + def test_universal_search_no_query(self): + """Test the universal search with no query parameter.""" + api_url = reverse("api:search") + response = self.client.get(api_url) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertFalse(response.data["status"]) + self.assertEqual(response.data["message"], "No query parameter provided!") + + def test_universal_search_with_special_characters(self): + """Test the universal search functionality with special characters.""" + api_url = reverse("api:search") + special_query = "admin'; DROP TABLE users;--" + response = self.client.get(api_url, {"query": special_query}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertFalse(response.data["status"]) + self.assertNotIn("users", response.data["results"]) diff --git a/web/api/tests/test_subdomain.py b/web/api/tests/test_subdomain.py new file mode 100644 index 00000000..0ceebb9c --- /dev/null +++ b/web/api/tests/test_subdomain.py @@ -0,0 +1,222 @@ +""" +This file contains the test cases for the API views. +""" + +from django.urls import reverse +from rest_framework import status +from startScan.models import Subdomain +from utils.test_base import BaseTestCase + +__all__ = [ + 'TestQueryInterestingSubdomains', + 'TestDeleteSubdomain', + 'TestListSubdomains', + 'TestSubdomainsViewSet', + 'TestSubdomainChangesViewSet', + 'TestToggleSubdomainImportantStatus', + 'TestSubdomainDatatableViewSet', + 'TestInterestingSubdomainViewSet' +] + +class TestQueryInterestingSubdomains(BaseTestCase): + """Tests for querying interesting subdomains.""" + + def setUp(self): + super().setUp() + self.data_generator.create_project_base() + self.data_generator.create_interesting_lookup_model() + + def test_query_interesting_subdomains(self): + """Test querying interesting subdomains for a given sca + n.""" + api_url = reverse("api:queryInterestingSubdomains") + response = self.client.get( + api_url, {"scan_id": self.data_generator.scan_history.id} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn("admin.example.com", [sub["name"] for sub in response.data]) + +class TestDeleteSubdomain(BaseTestCase): + """Tests for deleting subdomains.""" + + def setUp(self): + super().setUp() + self.data_generator.create_project_base() + + def test_delete_subdomain(self): + """Test deleting a subdomain.""" + api_url = reverse("api:delete_subdomain") + data = {"subdomain_ids": [str(self.data_generator.subdomain.id)]} + response = self.client.post(api_url, data) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + self.assertFalse( + Subdomain.objects.filter(id=self.data_generator.subdomain.id).exists() + ) + + def test_delete_nonexistent_subdomain(self): + """Test deleting a non-existent subdomain.""" + api_url = reverse("api:delete_subdomain") + data = {"subdomain_ids": ["nonexistent_id"]} + response = self.client.post(api_url, data) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + +class TestListSubdomains(BaseTestCase): + """Test case for listing subdomains.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + + def test_list_subdomains(self): + """Test listing subdomains for a target.""" + url = reverse("api:querySubdomains") + response = self.client.get(url, {"target_id": self.data_generator.domain.id}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn("subdomains", response.data) + self.assertGreaterEqual(len(response.data["subdomains"]), 1) + self.assertEqual( + response.data["subdomains"][0]["name"], self.data_generator.subdomain.name + ) + +class TestSubdomainsViewSet(BaseTestCase): + """Test case for subdomains viewset.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + + def test_subdomains_viewset(self): + """Test retrieving subdomains for a scan.""" + url = reverse("api:subdomains-list") + response = self.client.get( + url, {"scan_id": self.data_generator.scan_history.id} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertGreaterEqual(len(response.data), 1) + self.assertEqual( + response.data["results"][0]["name"], self.data_generator.subdomain.name + ) + +class TestSubdomainChangesViewSet(BaseTestCase): + """Test case for subdomain changes viewset.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + self.data_generator.create_scan_history() + self.data_generator.create_subdomain("admin1.example.com") + + def test_subdomain_changes_viewset(self): + """Test retrieving subdomain changes for a scan.""" + url = reverse("api:subdomain-changes-list") + response = self.client.get( + url, {"scan_id": self.data_generator.scan_history.id, "changes": "added"} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertGreaterEqual(len(response.data), 1) + self.assertEqual( + response.data["results"][0]["name"], self.data_generator.subdomain.name + ) + self.assertEqual(response.data["results"][0]["change"], "added") + +class TestToggleSubdomainImportantStatus(BaseTestCase): + """Test case for toggling subdomain important status.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + + def test_toggle_subdomain_important_status(self): + """Test toggling the important status of a subdomain.""" + api_url = reverse("api:toggle_subdomain") + initial_status = self.data_generator.subdomain.is_important + response = self.client.post( + api_url, {"subdomain_id": self.data_generator.subdomain.id} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + self.data_generator.subdomain.refresh_from_db() + self.assertNotEqual(initial_status, self.data_generator.subdomain.is_important) + +class TestSubdomainDatatableViewSet(BaseTestCase): + """Tests for the Subdomain Datatable ViewSet API.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + + def test_list_subdomains(self): + """Test listing subdomains.""" + api_url = reverse("api:subdomain-datatable-list") + response = self.client.get( + api_url, {"project": self.data_generator.project.slug} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertGreaterEqual(len(response.data["results"]), 1) + self.assertEqual( + response.data["results"][0]["name"], self.data_generator.subdomain.name + ) + + def test_list_subdomains_by_domain(self): + """Test listing subdomains by domain.""" + api_url = reverse("api:subdomain-datatable-list") + response = self.client.get( + api_url, + { + "target_id": self.data_generator.domain.id, + "project": self.data_generator.project.slug, + }, + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertGreaterEqual(len(response.data["results"]), 1) + self.assertEqual( + response.data["results"][0]["name"], self.data_generator.subdomain.name + ) + +class TestInterestingSubdomainViewSet(BaseTestCase): + """Test case for the Interesting Subdomain ViewSet API.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + self.data_generator.create_interesting_lookup_model() + + def test_list_interesting_subdomains(self): + """Test listing interesting subdomains.""" + api_url = reverse("api:interesting-subdomains-list") + response = self.client.get( + api_url, + { + "project": self.data_generator.project.slug, + "scan_id": self.data_generator.scan_history.id, + }, + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertGreaterEqual(len(response.data["results"]), 1) + self.assertEqual( + response.data["results"][0]["name"], self.data_generator.subdomain.name + ) + + def test_list_interesting_subdomains_by_domain(self): + """Test listing interesting subdomains by domain.""" + api_url = reverse("api:interesting-subdomains-list") + response = self.client.get( + api_url, + { + "target_id": self.data_generator.domain.id, + "project": self.data_generator.project.slug, + "scan_id": self.data_generator.scan_history.id, + }, + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertGreaterEqual(len(response.data["results"]), 1) + self.assertEqual( + response.data["results"][0]["name"], self.data_generator.subdomain.name + ) diff --git a/web/api/tests/test_target.py b/web/api/tests/test_target.py new file mode 100644 index 00000000..ac9f9a26 --- /dev/null +++ b/web/api/tests/test_target.py @@ -0,0 +1,72 @@ +""" +This file contains the test cases for the API views. +""" + +from django.urls import reverse +from rest_framework import status +from targetApp.models import Domain +from utils.test_base import BaseTestCase + +__all__ = [ + 'TestAddTarget', + 'TestListTargetsDatatableViewSet' +] + +class TestAddTarget(BaseTestCase): + """Test case for adding a target.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + Domain.objects.all().delete() + + def test_add_target(self): + """Test adding a new target.""" + api_url = reverse("api:addTarget") + data = { + "domain_name": "example.com", + "h1_team_handle": "team_handle", + "description": "Test description", + "organization": "Test Org", + "slug": self.data_generator.project.slug, + } + response = self.client.post(api_url, data) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + self.assertEqual(response.data["domain_name"], self.data_generator.domain.name) + self.assertTrue( + Domain.objects.filter(name=self.data_generator.domain.name).exists() + ) + + # Test adding duplicate target + response = self.client.post(api_url, data) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertFalse(response.data["status"]) + +class TestListTargetsDatatableViewSet(BaseTestCase): + """Tests for the List Targets Datatable API.""" + + def setUp(self): + super().setUp() + self.data_generator.create_project_base() + + def test_list_targets(self): + """Test listing targets.""" + api_url = reverse("api:targets-list") + response = self.client.get(api_url) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertGreaterEqual(len(response.data["results"]), 1) + self.assertEqual( + response.data["results"][0]["name"], self.data_generator.domain.name + ) + + def test_list_targets_with_slug(self): + """Test listing targets with project slug.""" + api_url = reverse("api:targets-list") + response = self.client.get(api_url, {"slug": "test-project"}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertGreaterEqual(len(response.data["results"]), 1) + self.assertEqual( + response.data["results"][0]["name"], self.data_generator.domain.name + ) diff --git a/web/api/tests/test_tools.py b/web/api/tests/test_tools.py new file mode 100644 index 00000000..9a92a0b6 --- /dev/null +++ b/web/api/tests/test_tools.py @@ -0,0 +1,285 @@ +""" +This file contains the test cases for the API views. +""" + +from unittest.mock import patch +from django.urls import reverse +from rest_framework import status +from startScan.models import SubScan +from utils.test_base import BaseTestCase + +__all__ = [ + 'TestOllamaManager', + 'TestWafDetector', + 'TestCMSDetector', + 'TestGfList', + 'TestUpdateTool', + 'TestUninstallTool', + 'TestGetExternalToolCurrentVersion', + 'TestRengineUpdateCheck', + 'TestGithubToolCheckGetLatestRelease', + 'TestGetFileContents', + 'TestDeleteMultipleRows' +] + +class TestOllamaManager(BaseTestCase): + """Tests for the OllamaManager API endpoints.""" + + @patch("requests.post") + def test_get_download_model(self, mock_post): + """Test downloading an Ollama model.""" + mock_post.return_value.json.return_value = {"status": "success"} + api_url = reverse("api:ollama_manager") + response = self.client.get(api_url, data={"model": "llama2"}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + + @patch("requests.post") + def test_get_download_model_failure(self, mock_post): + """Test failed downloading of an Ollama model.""" + mock_post.return_value.json.return_value = {"error": "pull model manifest: file does not exist"} + api_url = reverse("api:ollama_manager") + response = self.client.get(api_url, data={"model": "invalid-model"}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data["error"], "pull model manifest: file does not exist") + self.assertFalse(response.data["status"]) + + @patch("requests.delete") + def test_delete_model(self, mock_delete): + """Test deleting an Ollama model.""" + mock_delete.return_value.json.return_value = {"status": "success"} + api_url = reverse("api:ollama_manager") + response = self.client.delete( + api_url, data={"model": "gpt-4"}, content_type="application/json" + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + + def test_put_update_model(self): + """Test updating the selected Ollama model.""" + api_url = reverse("api:ollama_manager") + response = self.client.put( + api_url, data={"model": "gpt-4"}, content_type="application/json" + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + +class TestWafDetector(BaseTestCase): + """Tests for the WAF Detector API.""" + + @patch("api.views.run_wafw00f") + def test_waf_detection_success(self, mock_run_wafw00f): + """Test successful WAF detection.""" + mock_run_wafw00f.delay.return_value.get.return_value = ( + "WAF Detected: CloudFlare" + ) + api_url = reverse("api:waf_detector") + response = self.client.get(api_url, {"url": "https://www.cloudflare.com"}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + self.assertEqual(response.data["results"], "WAF Detected: CloudFlare") + + @patch("api.views.run_wafw00f") + def test_waf_detection_no_waf(self, mock_run_wafw00f): + """Test WAF detection when no WAF is detected.""" + mock_run_wafw00f.delay.return_value.get.return_value = "No WAF detected" + api_url = reverse("api:waf_detector") + response = self.client.get(api_url, {"url": "https://www.cloudflare.com"}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertFalse(response.data["status"]) + self.assertEqual(response.data["message"], "Could not detect any WAF!") + + def test_waf_detection_missing_url(self): + """Test WAF detection with missing URL parameter.""" + api_url = reverse("api:waf_detector") + response = self.client.get(api_url) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertFalse(response.data["status"]) + self.assertEqual(response.data["message"], "URL parameter is missing") + +class TestCMSDetector(BaseTestCase): + """Test case for CMS detection functionality.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + + @patch("api.views.run_cmseek.delay") + def test_cms_detector(self, mock_run_cmseek): + """Test CMS detection for a given URL.""" + mock_run_cmseek.return_value.get.return_value = { + "status": True, + "cms": "WordPress", + } + url = reverse("api:cms_detector") + response = self.client.get(url, {"url": self.data_generator.domain.name}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + self.assertEqual(response.data["cms"], "WordPress") + +class TestGfList(BaseTestCase): + """Test case for retrieving GF patterns.""" + + @patch("api.views.run_gf_list.delay") + def test_gf_list(self, mock_run_gf_list): + """Test retrieving a list of GF patterns.""" + mock_run_gf_list.return_value.get.return_value = { + "status": True, + "output": ["pattern1", "pattern2"], + } + url = reverse("api:gf_list") + response = self.client.get(url) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data, ["pattern1", "pattern2"]) + +class TestUpdateTool(BaseTestCase): + """Test case for updating a tool.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_installed_external_tool() + + @patch("api.views.run_command") + def test_update_tool(self, mock_run_command): + """Test updating a tool.""" + api_url = reverse("api:update_tool") + response = self.client.get( + api_url, {"tool_id": self.data_generator.installed_external_tool.id} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + mock_run_command.assert_called() + mock_run_command.apply_async.assert_called_once() + +class TestUninstallTool(BaseTestCase): + """Tests for the UninstallTool class.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_installed_external_tool() + + @patch("api.views.UninstallTool") + def test_uninstall_tool(self, mock_uninstall_tool): + """Test uninstalling a tool.""" + mock_uninstall_tool.return_value = True + url = reverse("api:uninstall_tool") + data = {"tool_id": self.data_generator.installed_external_tool.id} + response = self.client.get(url, data) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + +class TestGetExternalToolCurrentVersion(BaseTestCase): + """Test case for getting the current version of an external tool.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.tool = self.data_generator.create_installed_external_tool() + self.tool.version_lookup_command = "echo 'v1.0.0'" + self.tool.version_match_regex = r"v\d+\.\d+\.\d+" + self.tool.save() + + @patch("api.views.run_command") + def test_get_external_tool_current_version(self, mock_run_command): + """Test getting the current version of an external tool.""" + mock_run_command.return_value = (None, "v1.0.0") + url = reverse("api:external_tool_get_current_release") + response = self.client.get(url, {"tool_id": self.tool.id}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + self.assertEqual(response.data["version_number"], "v1.0.0") + self.assertEqual(response.data["tool_name"], self.tool.name) + +class TestRengineUpdateCheck(BaseTestCase): + """Tests for checking reNgine updates.""" + + @patch("requests.get") + def test_rengine_update_check(self, mock_get): + """Test checking for reNgine updates.""" + mock_get.return_value.json.return_value = [ + {"name": "v2.0.0", "body": "Changelog"} + ] + api_url = reverse("api:check_rengine_update") + response = self.client.get(api_url) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + self.assertIn("latest_version", response.data) + self.assertIn("current_version", response.data) + self.assertIn("update_available", response.data) + +class TestGithubToolCheckGetLatestRelease(BaseTestCase): + """Test case for checking the latest release of a GitHub tool.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.tool = self.data_generator.create_installed_external_tool() + self.tool.github_url = "https://github.com/example/tool" + self.tool.save() + + @patch("api.views.requests.get") + def test_github_tool_check_get_latest_release(self, mock_get): + """Test checking the latest release of a GitHub tool.""" + mock_get.return_value.json.return_value = [ + { + "url": "https://api.github.com/repos/example/tool/releases/1", + "id": 1, + "name": "v1.0.0", + "body": "Release notes", + } + ] + url = reverse("api:github_tool_latest_release") + response = self.client.get(url, {"tool_id": self.tool.id}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + self.assertEqual(response.data["name"], "v1.0.0") + +class TestGetFileContents(BaseTestCase): + """Test case for retrieving file contents.""" + + @patch("api.views.os.path.exists") + @patch("api.views.run_command") + def test_get_file_contents(self, mock_run_command, mock_exists): + """Test retrieving contents of a file.""" + mock_exists.return_value = True + mock_run_command.return_value = (0, "test content") + url = reverse("api:getFileContents") + response = self.client.get(url, {"nuclei_config": True}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + self.assertGreaterEqual(len(response.data["content"]), 1) + +class TestDeleteMultipleRows(BaseTestCase): + """Test case for deleting multiple rows.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + self.data_generator.create_subscan() + self.data_generator.create_subscan() + + def test_delete_multiple_rows(self): + """Test deleting multiple rows.""" + api_url = reverse("api:delete_rows") + data = { + "type": "subscan", + "rows": [ + int(self.data_generator.subscans[0].id), + int(self.data_generator.subscans[1].id), + ], + } + response = self.client.post(api_url, data) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + self.assertFalse( + SubScan.objects.filter( + id__in=[ + self.data_generator.subscans[0].id, + self.data_generator.subscans[1].id, + ] + ).exists() + ) diff --git a/web/api/tests/test_vulnerability.py b/web/api/tests/test_vulnerability.py new file mode 100644 index 00000000..c3cf416a --- /dev/null +++ b/web/api/tests/test_vulnerability.py @@ -0,0 +1,231 @@ +""" +This file contains the test cases for the API views. +""" + +from unittest.mock import patch, MagicMock +from django.urls import reverse +from rest_framework import status +from startScan.models import Vulnerability +from utils.test_base import BaseTestCase + +__all__ = [ + 'TestVulnerabilityViewSet', + 'TestGPTVulnerabilityReportGenerator', + 'TestDeleteVulnerability', + 'TestVulnerabilityReport', + 'TestFetchMostCommonVulnerability', + 'TestCVEDetails', + 'TestFetchMostVulnerable' +] + +class TestVulnerabilityViewSet(BaseTestCase): + """Tests for the Vulnerability ViewSet API.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + self.data_generator.create_endpoint() + self.data_generator.create_vulnerability() + + def test_list_vulnerabilities(self): + """Test listing vulnerabilities.""" + api_url = reverse("api:vulnerabilities-list") + response = self.client.get(api_url) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn('count', response.data) + self.assertIn('next', response.data) + self.assertIn('previous', response.data) + self.assertIn('results', response.data) + self.assertIsInstance(response.data['results'][0], dict) + self.assertGreaterEqual(len(response.data["results"]), 1) + self.assertEqual( + response.data["results"][0]["name"], + self.data_generator.vulnerabilities[0].name, + ) + + def test_list_vulnerabilities_by_scan(self): + """Test listing vulnerabilities by scan history.""" + api_url = reverse("api:vulnerabilities-list") + response = self.client.get( + api_url, {"scan_history": self.data_generator.scan_history.id} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertGreaterEqual(len(response.data["results"]), 1) + self.assertEqual( + response.data["results"][0]["name"], + self.data_generator.vulnerabilities[0].name, + ) + + def test_list_vulnerabilities_by_domain(self): + """Test listing vulnerabilities by domain.""" + api_url = reverse("api:vulnerabilities-list") + response = self.client.get(api_url, {"domain": "example.com"}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertGreaterEqual(len(response.data["results"]), 1) + self.assertEqual( + response.data["results"][0]["name"], + self.data_generator.vulnerabilities[0].name, + ) + + def test_list_vulnerabilities_by_severity(self): + """Test listing vulnerabilities by severity.""" + api_url = reverse("api:vulnerabilities-list") + response = self.client.get(api_url, {"severity": 1}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertGreaterEqual(len(response.data["results"]), 1) + self.assertEqual( + response.data["results"][0]["name"], + self.data_generator.vulnerabilities[0].name, + ) + +class TestGPTVulnerabilityReportGenerator(BaseTestCase): + """Tests for the GPT Vulnerability Report Generator API.""" + + def setUp(self): + super().setUp() + self.data_generator.create_project_base() + self.data_generator.create_endpoint() + self.data_generator.create_vulnerability() + + @patch("reNgine.tasks.gpt_vulnerability_description.apply_async") + def test_get_vulnerability_report(self, mock_apply_async): + """Test generating a vulnerability report.""" + mock_task = MagicMock() + mock_task.wait.return_value = { + "status": True, + "description": "Test vulnerability report", + } + mock_apply_async.return_value = mock_task + api_url = reverse("api:gpt_vulnerability_report_generator") + response = self.client.get( + api_url, {"id": self.data_generator.vulnerabilities[0].id} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + self.assertEqual(response.data["description"], "Test vulnerability report") + +class TestDeleteVulnerability(BaseTestCase): + """Tests for deleting vulnerabilities.""" + + def setUp(self): + super().setUp() + self.data_generator.create_project_base() + self.data_generator.create_endpoint() + self.data_generator.create_vulnerability() + + def test_delete_vulnerability(self): + """Test deleting a vulnerability.""" + api_url = reverse("api:delete_vulnerability") + data = {"vulnerability_ids": [self.data_generator.vulnerabilities[0].id]} + response = self.client.post(api_url, data) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + self.assertFalse( + Vulnerability.objects.filter( + id=self.data_generator.vulnerabilities[0].id + ).exists() + ) + +class TestVulnerabilityReport(BaseTestCase): + """Test case for vulnerability reporting functionality.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + self.data_generator.create_endpoint() + self.data_generator.create_vulnerability() + + @patch("api.views.send_hackerone_report") + def test_vulnerability_report(self, mock_send_report): + """Test sending a vulnerability report.""" + mock_send_report.return_value = True + url = reverse("api:vulnerability_report") + response = self.client.get( + url, {"vulnerability_id": self.data_generator.vulnerabilities[0].id} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + +class TestFetchMostCommonVulnerability(BaseTestCase): + """Test case for the Fetch Most Common Vulnerability API.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + self.data_generator.create_endpoint() + self.data_generator.create_vulnerability() + self.data_generator.create_vulnerability() + + def test_fetch_most_common_vulnerability(self): + """Test fetching the most common vulnerability.""" + api_url = reverse("api:fetch_most_common_vulnerability") + data = { + "target_id": int(self.data_generator.domain.id), + "scan_history_id": int(self.data_generator.scan_history.id), + "slug": self.data_generator.project.slug, + "limit": 10, + } + response = self.client.post(api_url, data) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + self.assertEqual( + response.data["result"][0]["name"], + self.data_generator.vulnerabilities[0].name, + ) + self.assertEqual(response.data["result"][0]["count"], 2) + +class TestCVEDetails(BaseTestCase): + """Test case for the CVE Details API.""" + + @patch("requests.get") + def test_get_cve_details(self, mock_get): + """Test getting CVE details.""" + mock_get.return_value.status_code = 200 + mock_get.return_value.json.return_value = { + "id": "CVE-2021-44228", + "summary": "Log4j vulnerability", + } + api_url = reverse("api:cve_details") + response = self.client.get(api_url, {"cve_id": "CVE-2021-44228"}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + self.assertEqual(response.data["result"]["id"], "CVE-2021-44228") + + def test_get_cve_details_missing_id(self): + """Test getting CVE details with missing ID.""" + api_url = reverse("api:cve_details") + response = self.client.get(api_url) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertFalse(response.data["status"]) + self.assertEqual(response.data["message"], "CVE ID not provided") + +class TestFetchMostVulnerable(BaseTestCase): + """Test case for the Fetch Most Vulnerable API.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + self.data_generator.create_endpoint() + self.data_generator.create_vulnerability() + self.data_generator.create_vulnerability() + + def test_fetch_most_vulnerable(self): + """Test fetching the most vulnerable subdomains.""" + api_url = reverse("api:fetch_most_vulnerable") + data = { + "target_id": int(self.data_generator.domain.id), + "scan_history_id": int(self.data_generator.scan_history.id), + "slug": self.data_generator.project.slug, + "limit": 10, + } + response = self.client.post(api_url, data) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + self.assertEqual( + response.data["result"][0]["name"], self.data_generator.subdomain.name + ) + self.assertEqual(response.data["result"][0]["vuln_count"], 2) diff --git a/web/api/urls.py b/web/api/urls.py index dee58443..ec9e1e10 100644 --- a/web/api/urls.py +++ b/web/api/urls.py @@ -6,19 +6,19 @@ app_name = 'api' router = routers.DefaultRouter() -router.register(r'listDatatableSubdomain', SubdomainDatatableViewSet) -router.register(r'listTargets', ListTargetsDatatableViewSet) -router.register(r'listSubdomains', SubdomainsViewSet) -router.register(r'listEndpoints', EndPointViewSet) -router.register(r'listDirectories', DirectoryViewSet) -router.register(r'listVulnerability', VulnerabilityViewSet) -router.register(r'listInterestingSubdomains', InterestingSubdomainViewSet) -router.register(r'listInterestingEndpoints', InterestingEndpointViewSet) -router.register(r'listSubdomainChanges', SubdomainChangesViewSet) -router.register(r'listEndPointChanges', EndPointChangesViewSet) -router.register(r'listIps', IpAddressViewSet) -router.register(r'listActivityLogs', ListActivityLogsViewSet) -router.register(r'listScanLogs', ListScanLogsViewSet) +router.register(r'listDatatableSubdomain', SubdomainDatatableViewSet, basename='subdomain-datatable') +router.register(r'listTargets', ListTargetsDatatableViewSet, basename='targets') +router.register(r'listSubdomains', SubdomainsViewSet, basename='subdomains') +router.register(r'listEndpoints', EndPointViewSet, basename='endpoints') +router.register(r'listDirectories', DirectoryViewSet, basename='directories') +router.register(r'listVulnerability', VulnerabilityViewSet, basename='vulnerabilities') +router.register(r'listInterestingSubdomains', InterestingSubdomainViewSet, basename='interesting-subdomains') +router.register(r'listInterestingEndpoints', InterestingEndpointViewSet, basename='interesting-endpoints') +router.register(r'listSubdomainChanges', SubdomainChangesViewSet, basename='subdomain-changes') +router.register(r'listEndPointChanges', EndPointChangesViewSet, basename='endpoint-changes') +router.register(r'listIps', IpAddressViewSet, basename='ip-addresses') +router.register(r'listActivityLogs', ListActivityLogsViewSet, basename='activity-logs') +router.register(r'listScanLogs', ListScanLogsViewSet, basename='scan-logs') urlpatterns = [ url('^', include(router.urls)), diff --git a/web/api/views.py b/web/api/views.py index 30ddf734..d753cb0a 100644 --- a/web/api/views.py +++ b/web/api/views.py @@ -3,13 +3,12 @@ import os.path from pathlib import Path import socket -import subprocess from ipaddress import IPv4Network from collections import defaultdict import requests import validators -from dashboard.models import * +from dashboard.models import OllamaSettings, Project, SearchHistory from django.db.models import CharField, Count, F, Q, Value from django.shortcuts import get_object_or_404 from django.utils import timezone @@ -19,22 +18,99 @@ from rest_framework.response import Response from rest_framework.views import APIView from rest_framework.status import HTTP_400_BAD_REQUEST +from rest_framework.parsers import JSONParser -from recon_note.models import * +from recon_note.models import TodoNote from reNgine.celery import app -from reNgine.common_func import * -from reNgine.definitions import ABORTED_TASK -from reNgine.settings import RENGINE_CURRENT_VERSION -from reNgine.settings import RENGINE_TOOL_PATH -from reNgine.tasks import * +from reNgine.common_func import ( + get_data_from_post_request, + get_interesting_endpoints, + get_interesting_subdomains, + get_lookup_keywords, + safe_int_cast +) +from reNgine.definitions import ( + ABORTED_TASK, + OLLAMA_INSTANCE, + NUCLEI_SEVERITY_MAP, + DEFAULT_GPT_MODELS, + RUNNING_TASK, + SUCCESS_TASK +) +from reNgine.settings import ( + RENGINE_CURRENT_VERSION, + RENGINE_TOOL_GITHUB_PATH +) +from reNgine.tasks import ( + create_scan_activity, + gpt_vulnerability_description, + initiate_subscan, + query_ip_history, + query_reverse_whois, + query_whois, + run_cmseek, + run_command, + run_gf_list, + run_wafw00f, + send_hackerone_report +) from reNgine.gpt import GPTAttackSuggestionGenerator -from reNgine.utilities import is_safe_path -from scanEngine.models import * -from startScan.models import * -from startScan.models import EndPoint -from targetApp.models import * - -from .serializers import * +from reNgine.utilities import is_safe_path, remove_lead_and_trail_slash +from scanEngine.models import EngineType, InstalledExternalTool +from startScan.models import ( + Command, + DirectoryFile, + DirectoryScan, + Dork, + Email, + Employee, + EndPoint, + IpAddress, + MetaFinderDocument, + Port, + ScanActivity, + ScanHistory, + Subdomain, + SubScan, + Technology, + Vulnerability, +) +from targetApp.models import Domain, Organization + +from .serializers import ( + CommandSerializer, + DirectoryFileSerializer, + DirectoryScanSerializer, + DomainSerializer, + DorkCountSerializer, + DorkSerializer, + EmailSerializer, + EmployeeSerializer, + EndpointOnlyURLsSerializer, + EndpointSerializer, + EndPointChangesSerializer, + EngineSerializer, + InterestingEndPointSerializer, + InterestingSubdomainSerializer, + IpSerializer, + IpSubdomainSerializer, + MetafinderDocumentSerializer, + MetafinderUserSerializer, + OnlySubdomainNameSerializer, + OrganizationSerializer, + OrganizationTargetsSerializer, + PortSerializer, + ReconNoteSerializer, + ScanHistorySerializer, + SearchHistorySerializer, + SubdomainChangesSerializer, + SubdomainSerializer, + SubScanResultSerializer, + SubScanSerializer, + TechnologyCountSerializer, + VisualiseDataSerializer, + VulnerabilitySerializer +) logger = logging.getLogger(__name__) @@ -46,129 +122,150 @@ def get(self, request): sends a POST request to download the model """ req = self.request - model_name = req.query_params.get('model') response = { 'status': False } + try: + model_name = req.query_params.get('model') + except Exception as e: + response['error'] = str(e) + return Response(response, status=400) + + if not model_name: + response['error'] = 'Model name is required' + return Response(response, status=400) + try: pull_model_api = f'{OLLAMA_INSTANCE}/api/pull' _response = requests.post( - pull_model_api, + pull_model_api, json={ 'name': model_name, 'stream': False } ).json() if _response.get('error'): - response['status'] = False response['error'] = _response.get('error') else: response['status'] = True except Exception as e: - response['error'] = str(e) + response['error'] = str(e) return Response(response) def delete(self, request): req = self.request - model_name = req.query_params.get('model') + response = {'status': False} + + # Get the model name from the request + model_name = get_data_from_post_request(req, 'model') + + # Check if the model name is provided + if not model_name: + response['error'] = 'Model name is required' + return Response(response, status=400) + delete_model_api = f'{OLLAMA_INSTANCE}/api/delete' - response = { - 'status': False - } + try: + # Make the API call to delete the model _response = requests.delete( - delete_model_api, - json={ - 'name': model_name - } + delete_model_api, + json={'name': model_name} ).json() + + # Check for errors in the response if _response.get('error'): - response['status'] = False response['error'] = _response.get('error') else: response['status'] = True except Exception as e: response['error'] = str(e) + return Response(response) def put(self, request): - req = self.request - model_name = req.query_params.get('model') - # check if model_name is in DEFAULT_GPT_MODELS - response = { - 'status': False - } - use_ollama = True - if any(model['name'] == model_name for model in DEFAULT_GPT_MODELS): - use_ollama = False + response = {'status': False} + + data = request.data + model_name = data.get('model') + + if not model_name: + response['error'] = 'Model name is required' + return Response(response, status=400) + + # Invert the condition to simplify the assignment + use_ollama = all(model['name'] != model_name for model in DEFAULT_GPT_MODELS) + try: + # Create or update OllamaSettings OllamaSettings.objects.update_or_create( defaults={ 'selected_model': model_name, - 'use_ollama': use_ollama + 'use_ollama': use_ollama, + 'selected': True }, id=1 ) response['status'] = True except Exception as e: response['error'] = str(e) - return Response(response) + return Response(response) class GPTAttackSuggestion(APIView): - def get(self, request): - req = self.request - subdomain_id = req.query_params.get('subdomain_id') - if not subdomain_id: - return Response({ - 'status': False, - 'error': 'Missing GET param Subdomain `subdomain_id`' - }) - try: - subdomain = Subdomain.objects.get(id=subdomain_id) - except Exception as e: - return Response({ - 'status': False, - 'error': 'Subdomain not found with id ' + subdomain_id - }) - if subdomain.attack_surface: - return Response({ - 'status': True, - 'subdomain_name': subdomain.name, - 'description': subdomain.attack_surface - }) - ip_addrs = subdomain.ip_addresses.all() - open_ports_str = '' - for ip in ip_addrs: - ports = ip.ports.all() - for port in ports: - open_ports_str += f'{port.number}/{port.service_name}, ' - tech_used = '' - for tech in subdomain.technologies.all(): - tech_used += f'{tech.name}, ' - input = f''' - Subdomain Name: {subdomain.name} - Subdomain Page Title: {subdomain.page_title} - Open Ports: {open_ports_str} - HTTP Status: {subdomain.http_status} - Technologies Used: {tech_used} - Content type: {subdomain.content_type} - Web Server: {subdomain.webserver} - Page Content Length: {subdomain.content_length} - ''' - gpt = GPTAttackSuggestionGenerator() - response = gpt.get_attack_suggestion(input) - response['subdomain_name'] = subdomain.name - if response.get('status'): - subdomain.attack_surface = response.get('description') - subdomain.save() - return Response(response) + def get(self, request): + req = self.request + subdomain_id = safe_int_cast(req.query_params.get('subdomain_id')) + if not subdomain_id: + return Response({ + 'status': False, + 'error': 'Missing GET param Subdomain `subdomain_id`' + }) + try: + subdomain = Subdomain.objects.get(id=subdomain_id) + except Subdomain.DoesNotExist: + return Response({ + 'status': False, + 'error': f'Subdomain not found with id {subdomain_id}' + }) + + if subdomain.attack_surface: + return Response({ + 'status': True, + 'subdomain_name': subdomain.name, + 'description': subdomain.attack_surface + }) + + ip_addrs = subdomain.ip_addresses.all() + open_ports = ', '.join(f'{port.number}/{port.service_name}' for ip in ip_addrs for port in ip.ports.all()) + tech_used = ', '.join(tech.name for tech in subdomain.technologies.all()) + + input_data = f''' + Subdomain Name: {subdomain.name} + Subdomain Page Title: {subdomain.page_title} + Open Ports: {open_ports} + HTTP Status: {subdomain.http_status} + Technologies Used: {tech_used} + Content type: {subdomain.content_type} + Web Server: {subdomain.webserver} + Page Content Length: {subdomain.content_length} + ''' + + gpt = GPTAttackSuggestionGenerator() + response = gpt.get_attack_suggestion(input_data) + response['subdomain_name'] = subdomain.name + + if response.get('status'): + subdomain.attack_surface = response.get('description') + subdomain.save() + + return Response(response) class GPTVulnerabilityReportGenerator(APIView): def get(self, request): req = self.request - vulnerability_id = req.query_params.get('id') + vulnerability_id = safe_int_cast(req.query_params.get('id')) if not vulnerability_id: return Response({ 'status': False, @@ -209,8 +306,8 @@ def get(self, request): class QueryInterestingSubdomains(APIView): def get(self, request): req = self.request - scan_id = req.query_params.get('scan_id') - domain_id = req.query_params.get('target_id') + scan_id = safe_int_cast(req.query_params.get('scan_id')) + domain_id = safe_int_cast(req.query_params.get('target_id')) if scan_id: queryset = get_interesting_subdomains(scan_history=scan_id) @@ -281,7 +378,7 @@ def get(self, request): return Response(response) try: - logger.info(f"Initiating WAF detection for URL: {url}") + logger.debug(f"Initiating WAF detection for URL: {url}") result = run_wafw00f.delay(url).get(timeout=30) if result.startswith("Unexpected error"): @@ -292,7 +389,7 @@ def get(self, request): else: response['message'] = 'Could not detect any WAF!' - logger.info(f"WAF detection result: {response}") + logger.debug(f"WAF detection result: {response}") except Exception as e: logger.error(f"Error during WAF detection: {str(e)}") response['message'] = f"An error occurred: {str(e)}" @@ -372,82 +469,47 @@ def post(self, request): req = self.request data = req.data + response = {'status': False} + try: - limit = data.get('limit', 20) + limit = safe_int_cast(data.get('limit', 20)) project_slug = data.get('slug') - scan_history_id = data.get('scan_history_id') - target_id = data.get('target_id') + scan_history_id = safe_int_cast(data.get('scan_history_id')) + target_id = safe_int_cast(data.get('target_id')) is_ignore_info = data.get('ignore_info', False) - response = {} - response['status'] = False - - if project_slug: - project = Project.objects.get(slug=project_slug) - vulnerabilities = Vulnerability.objects.filter(target_domain__project=project) - else: - vulnerabilities = Vulnerability.objects.all() - + vulnerabilities = ( + Vulnerability.objects.filter(target_domain__project__slug=project_slug) + if project_slug else Vulnerability.objects.all() + ) if scan_history_id: - vuln_query = ( - vulnerabilities - .filter(scan_history__id=scan_history_id) - .values("name", "severity") - ) - if is_ignore_info: - most_common_vulnerabilities = ( - vuln_query - .exclude(severity=0) - .annotate(count=Count('name')) - .order_by("-count")[:limit] - ) - else: - most_common_vulnerabilities = ( - vuln_query - .annotate(count=Count('name')) - .order_by("-count")[:limit] - ) - + vuln_query = vulnerabilities.filter(scan_history__id=scan_history_id).values("name", "severity") elif target_id: vuln_query = vulnerabilities.filter(target_domain__id=target_id).values("name", "severity") - if is_ignore_info: - most_common_vulnerabilities = ( - vuln_query - .exclude(severity=0) - .annotate(count=Count('name')) - .order_by("-count")[:limit] - ) - else: - most_common_vulnerabilities = ( - vuln_query - .annotate(count=Count('name')) - .order_by("-count")[:limit] - ) - else: vuln_query = vulnerabilities.values("name", "severity") - if is_ignore_info: - most_common_vulnerabilities = ( - vuln_query.exclude(severity=0) - .annotate(count=Count('name')) - .order_by("-count")[:limit] - ) - else: - most_common_vulnerabilities = ( - vuln_query.annotate(count=Count('name')) - .order_by("-count")[:limit] - ) + if is_ignore_info: + most_common_vulnerabilities = ( + vuln_query.exclude(severity=0) + .annotate(count=Count('name')) + .order_by("-count")[:limit] + ) + else: + most_common_vulnerabilities = ( + vuln_query.annotate(count=Count('name')) + .order_by("-count")[:limit] + ) - most_common_vulnerabilities = [vuln for vuln in most_common_vulnerabilities] + most_common_vulnerabilities = list(most_common_vulnerabilities) if most_common_vulnerabilities: response['status'] = True response['result'] = most_common_vulnerabilities + except Exception as e: - print(str(e)) - response = {} + print(e) return Response(response) @@ -458,9 +520,9 @@ def post(self, request): data = req.data project_slug = data.get('slug') - scan_history_id = data.get('scan_history_id') - target_id = data.get('target_id') - limit = data.get('limit', 20) + scan_history_id = safe_int_cast(data.get('scan_history_id')) + target_id = safe_int_cast(data.get('target_id')) + limit = safe_int_cast(data.get('limit', 20)) is_ignore_info = data.get('ignore_info', False) response = {} @@ -580,11 +642,21 @@ def post(self, request): req = self.request data = req.data - subdomain_id = data.get('subdomain_id') - scan_history_id = data.get('scan_history_id') + subdomain_id = safe_int_cast(data.get('subdomain_id')) + scan_history_id = safe_int_cast(data.get('scan_history_id')) title = data.get('title') description = data.get('description') project = data.get('project') + + if subdomain_id is None: + return Response({"status": False, "error": "Subdomain ID is required."}, status=400) + if scan_history_id is None: + return Response({"status": False, "error": "Scan history ID is required."}, status=400) + if not title: + return Response({"status": False, "error": "Title is required."}, status=400) + if not project: + return Response({"status": False, "error": "Project is required."}, status=400) + try: project = Project.objects.get(slug=project) @@ -620,7 +692,7 @@ def post(self, request): req = self.request data = req.data - subdomain_id = data.get('subdomain_id') + subdomain_id = safe_int_cast(data.get('subdomain_id')) response = {'status': False, 'message': 'No subdomain_id provided'} @@ -634,56 +706,60 @@ def post(self, request): class AddTarget(APIView): - def post(self, request): - req = self.request - data = req.data - h1_team_handle = data.get('h1_team_handle') - description = data.get('description') - domain_name = data.get('domain_name') - organization_name = data.get('organization') - slug = data.get('slug') - - # Validate domain name - if not validators.domain(domain_name): - return Response({'status': False, 'message': 'Invalid domain or IP'}) - - project = Project.objects.get(slug=slug) - - # Create domain object in DB - domain, _ = Domain.objects.get_or_create(name=domain_name) - domain.project = project - domain.h1_team_handle = h1_team_handle - domain.description = description - if not domain.insert_date: - domain.insert_date = timezone.now() - domain.save() - - # Create org object in DB - if organization_name: - organization_obj = None - organization_query = Organization.objects.filter(name=organization_name) - if organization_query.exists(): - organization_obj = organization_query[0] - else: - organization_obj = Organization.objects.create( - name=organization_name, - project=project, - insert_date=timezone.now()) - organization_obj.domains.add(domain) + def post(self, request): + req = self.request + data = req.data + h1_team_handle = data.get('h1_team_handle') + description = data.get('description') + domain_name = data.get('domain_name') + organization_name = data.get('organization') + slug = data.get('slug') + + # Validate domain name + if not validators.domain(domain_name): + return Response({'status': False, 'message': 'Invalid domain or IP'}, status=400) + + project = Project.objects.get(slug=slug) + + # Check if the domain already exists + if Domain.objects.filter(name=domain_name, project=project).exists(): + return Response({'status': False, 'message': 'Domain already exists as a target!'}, status=400) + + # Create domain object in DB + domain, _ = Domain.objects.get_or_create(name=domain_name) + domain.project = project + domain.h1_team_handle = h1_team_handle + domain.description = description + if not domain.insert_date: + domain.insert_date = timezone.now() + domain.save() + + # Create org object in DB + if organization_name: + organization_obj = None + organization_query = Organization.objects.filter(name=organization_name) + if organization_query.exists(): + organization_obj = organization_query[0] + else: + organization_obj = Organization.objects.create( + name=organization_name, + project=project, + insert_date=timezone.now()) + organization_obj.domains.add(domain) - return Response({ - 'status': True, - 'message': 'Domain successfully added as target !', - 'domain_name': domain_name, - 'domain_id': domain.id - }) + return Response({ + 'status': True, + 'message': 'Domain successfully added as target!', + 'domain_name': domain_name, + 'domain_id': domain.id + }) class FetchSubscanResults(APIView): def get(self, request): req = self.request # data = req.data - subscan_id = req.query_params.get('subscan_id') + subscan_id = safe_int_cast(req.query_params.get('subscan_id')) subscan = SubScan.objects.filter(id=subscan_id) if not subscan.exists(): return Response({ @@ -729,9 +805,9 @@ class ListSubScans(APIView): def post(self, request): req = self.request data = req.data - subdomain_id = data.get('subdomain_id', None) - scan_history = data.get('scan_history_id', None) - domain_id = data.get('domain_id', None) + subdomain_id = safe_int_cast(data.get('subdomain_id', None)) + scan_history = safe_int_cast(data.get('scan_history_id', None)) + domain_id = safe_int_cast(data.get('domain_id', None)) response = {} response['status'] = False @@ -776,24 +852,23 @@ class DeleteMultipleRows(APIView): def post(self, request): req = self.request data = req.data - + subscan_ids = get_data_from_post_request(request, 'rows') try: if data['type'] == 'subscan': - for row in data['rows']: - SubScan.objects.get(id=row).delete() - response = True + subscan_ids = [int(id) for id in subscan_ids] + SubScan.objects.filter(id__in=subscan_ids).delete() + return Response({'status': True}) + except ValueError: + return Response({'status': False, 'message': 'Invalid subscan ID provided'}, status=400) except Exception as e: - response = False - - return Response({'status': response}) - + return Response({'status': False, 'message': logger.debug(e)}, status=500) class StopScan(APIView): def post(self, request): req = self.request data = req.data - scan_id = data.get('scan_id') - subscan_id = data.get('subscan_id') + scan_id = safe_int_cast(data.get('scan_id')) + subscan_id = safe_int_cast(data.get('subscan_id')) response = {} task_ids = [] scan = None @@ -854,12 +929,21 @@ def post(self, request): class InitiateSubTask(APIView): + parser_classes = [JSONParser] + def post(self, request): - req = self.request - data = req.data - engine_id = data.get('engine_id') - scan_types = data['tasks'] - for subdomain_id in data['subdomain_ids']: + data = request.data + engine_id = safe_int_cast(data.get('engine_id')) + scan_types = data.get('tasks', []) + subdomain_ids = safe_int_cast(data.get('subdomain_ids', [])) + + if not scan_types or not subdomain_ids: + return Response({'status': False, 'error': 'Missing tasks or subdomain_ids'}, status=400) + + if isinstance(subdomain_ids, int): + subdomain_ids = [subdomain_ids] + + for subdomain_id in subdomain_ids: logger.info(f'Running subscans {scan_types} on subdomain "{subdomain_id}" ...') for stype in scan_types: ctx = { @@ -873,20 +957,33 @@ def post(self, request): class DeleteSubdomain(APIView): - def post(self, request): - req = self.request - for id in req.data['subdomain_ids']: - Subdomain.objects.get(id=id).delete() - return Response({'status': True}) - + def post(self, request): + subdomain_ids = get_data_from_post_request(request, 'subdomain_ids') + try: + subdomain_ids = [int(id) for id in subdomain_ids] + Subdomain.objects.filter(id__in=subdomain_ids).delete() + return Response({'status': True}) + except ValueError: + return Response({'status': False, 'message': 'Invalid subdomain ID provided'}, status=400) + except Exception as e: + return Response({'status': False, 'message': logger.debug(e)}, status=500) class DeleteVulnerability(APIView): - def post(self, request): - req = self.request - for id in req.data['vulnerability_ids']: - Vulnerability.objects.get(id=id).delete() - return Response({'status': True}) + def post(self, request): + vulnerability_ids = get_data_from_post_request(request, 'vulnerability_ids') + + # Check if vulnerability_ids is iterable + if not isinstance(vulnerability_ids, (list, tuple)): + return Response({'status': False, 'message': 'vulnerability_ids must be a list or tuple'}, status=400) + try: + # Convert to integers + vulnerability_ids = [int(id) for id in vulnerability_ids] + # Delete vulnerabilities + Vulnerability.objects.filter(id__in=vulnerability_ids).delete() + return Response({'status': True}) + except ValueError: + return Response({'status': False, 'message': 'Invalid vulnerability ID provided'}, status=400) class ListInterestingKeywords(APIView): def get(self, request, format=None): @@ -939,7 +1036,7 @@ def get(self, request): class UninstallTool(APIView): def get(self, request): req = self.request - tool_id = req.query_params.get('tool_id') + tool_id = safe_int_cast(req.query_params.get('tool_id')) tool_name = req.query_params.get('name') if tool_id: @@ -977,7 +1074,7 @@ def get(self, request): class UpdateTool(APIView): def get(self, request): req = self.request - tool_id = req.query_params.get('tool_id') + tool_id = safe_int_cast(req.query_params.get('tool_id')) tool_name = req.query_params.get('name') if tool_id: @@ -1006,7 +1103,7 @@ class GetExternalToolCurrentVersion(APIView): def get(self, request): req = self.request # toolname is also the command - tool_id = req.query_params.get('tool_id') + tool_id = safe_int_cast(req.query_params.get('tool_id')) tool_name = req.query_params.get('name') # can supply either tool id or tool_name @@ -1038,7 +1135,7 @@ class GithubToolCheckGetLatestRelease(APIView): def get(self, request): req = self.request - tool_id = req.query_params.get('tool_id') + tool_id = safe_int_cast(req.query_params.get('tool_id')) tool_name = req.query_params.get('name') if not InstalledExternalTool.objects.filter(id=tool_id).exists(): @@ -1187,6 +1284,7 @@ class IPToDomain(APIView): def get(self, request): req = self.request ip_address = req.query_params.get('ip_address') + response = {} if not ip_address: return Response({ 'status': False, @@ -1225,7 +1323,7 @@ def get(self, request): class VulnerabilityReport(APIView): def get(self, request): req = self.request - vulnerability_id = req.query_params.get('vulnerability_id') + vulnerability_id = safe_int_cast(req.query_params.get('vulnerability_id')) return Response({"status": send_hackerone_report(vulnerability_id)}) @@ -1242,9 +1340,9 @@ def get(self, request, format=None): if not os.path.exists(path): run_command(f'touch {path}') response['message'] = 'File Created!' - f = open(path, "r") - response['status'] = True - response['content'] = f.read() + with open(path, "r") as f: + response['status'] = True + response['content'] = f.read() return Response(response) if 'subfinder_config' in req.query_params: @@ -1252,9 +1350,9 @@ def get(self, request, format=None): if not os.path.exists(path): run_command(f'touch {path}') response['message'] = 'File Created!' - f = open(path, "r") - response['status'] = True - response['content'] = f.read() + with open(path, "r") as f: + response['status'] = True + response['content'] = f.read() return Response(response) if 'naabu_config' in req.query_params: @@ -1262,9 +1360,9 @@ def get(self, request, format=None): if not os.path.exists(path): run_command(f'touch {path}') response['message'] = 'File Created!' - f = open(path, "r") - response['status'] = True - response['content'] = f.read() + with open(path, "r") as f: + response['status'] = True + response['content'] = f.read() return Response(response) if 'theharvester_config' in req.query_params: @@ -1272,9 +1370,9 @@ def get(self, request, format=None): if not os.path.exists(path): run_command(f'touch {path}') response['message'] = 'File Created!' - f = open(path, "r") - response['status'] = True - response['content'] = f.read() + with open(path, "r") as f: + response['status'] = True + response['content'] = f.read() return Response(response) if 'amass_config' in req.query_params: @@ -1282,16 +1380,17 @@ def get(self, request, format=None): if not os.path.exists(path): run_command(f'touch {path}') response['message'] = 'File Created!' - f = open(path, "r") - response['status'] = True - response['content'] = f.read() + with open(path, "r") as f: + response['status'] = True + response['content'] = f.read() return Response(response) if 'gf_pattern' in req.query_params: basedir = str(Path.home() / '.gf') path = str(Path.home() / '.gf' / f'{name}.json') if is_safe_path(basedir, path) and os.path.exists(path): - content = open(path, "r").read() + with open(path, "r") as f: + content = f.read() response['status'] = True response['content'] = content else: @@ -1304,7 +1403,8 @@ def get(self, request, format=None): safe_dir = str(Path.home() / 'nuclei-templates') path = str(Path.home() / 'nuclei-templates' / f'{name}') if is_safe_path(safe_dir, path) and os.path.exists(path): - content = open(path.format(name), "r").read() + with open(path.format(name), "r") as f: + content = f.read() response['status'] = True response['content'] = content else: @@ -1317,9 +1417,9 @@ def get(self, request, format=None): if not os.path.exists(path): run_command(f'touch {path}') response['message'] = 'File Created!' - f = open(path, "r") - response['status'] = True - response['content'] = f.read() + with open(path, "r") as f: + response['status'] = True + response['content'] = f.read() return Response(response) response['message'] = 'Invalid Query Params' @@ -1342,13 +1442,13 @@ class ListTodoNotes(APIView): def get(self, request, format=None): req = self.request notes = TodoNote.objects.all().order_by('-id') - scan_id = req.query_params.get('scan_id') + scan_id = safe_int_cast(req.query_params.get('scan_id')) project = req.query_params.get('project') if project: notes = notes.filter(project__slug=project) - target_id = req.query_params.get('target_id') + target_id = safe_int_cast(req.query_params.get('target_id')) todo_id = req.query_params.get('todo_id') - subdomain_id = req.query_params.get('subdomain_id') + subdomain_id = safe_int_cast(req.query_params.get('subdomain_id')) if target_id: notes = notes.filter(scan_history__in=ScanHistory.objects.filter(domain__id=target_id)) elif scan_id: @@ -1382,7 +1482,6 @@ def get(self, request, format=None): class ListOrganizations(APIView): def get(self, request, format=None): - req = self.request organizations = Organization.objects.all() organization_serializer = OrganizationSerializer(organizations, many=True) return Response({'organizations': organization_serializer.data}) @@ -1391,7 +1490,7 @@ def get(self, request, format=None): class ListTargetsInOrganization(APIView): def get(self, request, format=None): req = self.request - organization_id = req.query_params.get('organization_id') + organization_id = safe_int_cast(req.query_params.get('organization_id')) organization = Organization.objects.filter(id=organization_id) targets = Domain.objects.filter(domains__in=organization) organization_serializer = OrganizationSerializer(organization, many=True) @@ -1410,7 +1509,7 @@ def get(self, request, format=None): class VisualiseData(APIView): def get(self, request, format=None): req = self.request - scan_id = req.query_params.get('scan_id') + scan_id = safe_int_cast(req.query_params.get('scan_id')) if scan_id: mitch_data = ScanHistory.objects.filter(id=scan_id) serializer = VisualiseDataSerializer(mitch_data, many=True) @@ -1452,37 +1551,31 @@ def process_visualisation_data(self, data): return processed_data class ListTechnology(APIView): - def get(self, request, format=None): - req = self.request - scan_id = req.query_params.get('scan_id') - target_id = req.query_params.get('target_id') + def get(self, request, format=None): + req = self.request + scan_id = safe_int_cast(req.query_params.get('scan_id')) + target_id = safe_int_cast(req.query_params.get('target_id')) + + # Determine the queryset based on the presence of target_id or scan_id + if target_id: + subdomain_filter = Subdomain.objects.filter(target_domain__id=target_id) + elif scan_id: + subdomain_filter = Subdomain.objects.filter(scan_history__id=scan_id) + else: + subdomain_filter = Subdomain.objects.all() - if target_id: - tech = Technology.objects.filter( - technologies__in=Subdomain.objects.filter( - target_domain__id=target_id)).annotate( - count=Count('name')).order_by('-count') - serializer = TechnologyCountSerializer(tech, many=True) - return Response({"technologies": serializer.data}) - elif scan_id: - tech = Technology.objects.filter( - technologies__in=Subdomain.objects.filter( - scan_history__id=scan_id)).annotate( - count=Count('name')).order_by('-count') - serializer = TechnologyCountSerializer(tech, many=True) - return Response({"technologies": serializer.data}) - else: - tech = Technology.objects.filter( - technologies__in=Subdomain.objects.all()).annotate( - count=Count('name')).order_by('-count') - serializer = TechnologyCountSerializer(tech, many=True) - return Response({"technologies": serializer.data}) + # Fetch technologies and serialize the results + tech = Technology.objects.filter(technologies__in=subdomain_filter).annotate( + count=Count('name')).order_by('-count') + serializer = TechnologyCountSerializer(tech, many=True) + + return Response({"technologies": serializer.data}) class ListDorkTypes(APIView): def get(self, request, format=None): req = self.request - scan_id = req.query_params.get('scan_id') + scan_id = safe_int_cast(req.query_params.get('scan_id')) if scan_id: dork = Dork.objects.filter( dorks__in=ScanHistory.objects.filter(id=scan_id) @@ -1500,7 +1593,7 @@ def get(self, request, format=None): class ListEmails(APIView): def get(self, request, format=None): req = self.request - scan_id = req.query_params.get('scan_id') + scan_id = safe_int_cast(req.query_params.get('scan_id')) if scan_id: email = Email.objects.filter( emails__in=ScanHistory.objects.filter(id=scan_id)).order_by('password') @@ -1511,7 +1604,7 @@ def get(self, request, format=None): class ListDorks(APIView): def get(self, request, format=None): req = self.request - scan_id = req.query_params.get('scan_id') + scan_id = safe_int_cast(req.query_params.get('scan_id')) type = req.query_params.get('type') if scan_id: dork = Dork.objects.filter( @@ -1534,7 +1627,7 @@ def get(self, request, format=None): class ListEmployees(APIView): def get(self, request, format=None): req = self.request - scan_id = req.query_params.get('scan_id') + scan_id = safe_int_cast(req.query_params.get('scan_id')) if scan_id: employee = Employee.objects.filter( employees__in=ScanHistory.objects.filter(id=scan_id)) @@ -1545,8 +1638,8 @@ def get(self, request, format=None): class ListPorts(APIView): def get(self, request, format=None): req = self.request - scan_id = req.query_params.get('scan_id') - target_id = req.query_params.get('target_id') + scan_id = safe_int_cast(req.query_params.get('scan_id')) + target_id = safe_int_cast(req.query_params.get('target_id')) ip_address = req.query_params.get('ip_address') if target_id: @@ -1574,9 +1667,9 @@ def get(self, request, format=None): class ListSubdomains(APIView): def get(self, request, format=None): req = self.request - scan_id = req.query_params.get('scan_id') + scan_id = safe_int_cast(req.query_params.get('scan_id')) project = req.query_params.get('project') - target_id = req.query_params.get('target_id') + target_id = safe_int_cast(req.query_params.get('target_id')) ip_address = req.query_params.get('ip_address') port = req.query_params.get('port') tech = req.query_params.get('tech') @@ -1633,7 +1726,7 @@ def post(self, req): class ListOsintUsers(APIView): def get(self, request, format=None): req = self.request - scan_id = req.query_params.get('scan_id') + scan_id = safe_int_cast(req.query_params.get('scan_id')) if scan_id: documents = MetaFinderDocument.objects.filter(scan_history__id=scan_id).exclude(author__isnull=True).values('author').distinct() serializer = MetafinderUserSerializer(documents, many=True) @@ -1643,7 +1736,7 @@ def get(self, request, format=None): class ListMetadata(APIView): def get(self, request, format=None): req = self.request - scan_id = req.query_params.get('scan_id') + scan_id = safe_int_cast(req.query_params.get('scan_id')) if scan_id: documents = MetaFinderDocument.objects.filter(scan_history__id=scan_id).distinct() serializer = MetafinderDocumentSerializer(documents, many=True) @@ -1653,8 +1746,8 @@ def get(self, request, format=None): class ListIPs(APIView): def get(self, request, format=None): req = self.request - scan_id = req.query_params.get('scan_id') - target_id = req.query_params.get('target_id') + scan_id = safe_int_cast(req.query_params.get('scan_id')) + target_id = safe_int_cast(req.query_params.get('target_id')) port = req.query_params.get('port') @@ -1683,10 +1776,11 @@ def get(self, request, format=None): class IpAddressViewSet(viewsets.ModelViewSet): queryset = Subdomain.objects.none() serializer_class = IpSubdomainSerializer + ordering = ('name',) def get_queryset(self): req = self.request - scan_id = req.query_params.get('scan_id') + scan_id = safe_int_cast(req.query_params.get('scan_id')) if scan_id: self.queryset = Subdomain.objects.filter( @@ -1701,16 +1795,17 @@ def paginate_queryset(self, queryset, view=None): if 'no_page' in self.request.query_params: return None return self.paginator.paginate_queryset( - queryset, self.request, view=self) + queryset.order_by(*self.ordering), self.request, view=self) class SubdomainsViewSet(viewsets.ModelViewSet): queryset = Subdomain.objects.none() serializer_class = SubdomainSerializer + ordering = ('name',) def get_queryset(self): req = self.request - scan_id = req.query_params.get('scan_id') + scan_id = safe_int_cast(req.query_params.get('scan_id')) if scan_id: if 'only_screenshot' in self.request.query_params: return ( @@ -1723,7 +1818,7 @@ def paginate_queryset(self, queryset, view=None): if 'no_page' in self.request.query_params: return None return self.paginator.paginate_queryset( - queryset, self.request, view=self) + queryset.order_by(*self.ordering), self.request, view=self) class SubdomainChangesViewSet(viewsets.ModelViewSet): @@ -1735,12 +1830,13 @@ class SubdomainChangesViewSet(viewsets.ModelViewSet): ''' queryset = Subdomain.objects.none() serializer_class = SubdomainChangesSerializer + ordering = ('name',) def get_queryset(self): req = self.request - scan_id = req.query_params.get('scan_id') + scan_id = safe_int_cast(req.query_params.get('scan_id')) changes = req.query_params.get('changes') - domain_id = ScanHistory.objects.filter(id=scan_id)[0].domain.id + domain_id = safe_int_cast(ScanHistory.objects.filter(id=safe_int_cast(scan_id)).first().domain.id) scan_history_query = ( ScanHistory.objects .filter(domain=domain_id) @@ -1805,7 +1901,7 @@ def paginate_queryset(self, queryset, view=None): if 'no_page' in self.request.query_params: return None return self.paginator.paginate_queryset( - queryset, self.request, view=self) + queryset.order_by(*self.ordering), self.request, view=self) class EndPointChangesViewSet(viewsets.ModelViewSet): @@ -1814,12 +1910,13 @@ class EndPointChangesViewSet(viewsets.ModelViewSet): ''' queryset = EndPoint.objects.none() serializer_class = EndPointChangesSerializer + ordering = ('http_url',) def get_queryset(self): req = self.request - scan_id = req.query_params.get('scan_id') + scan_id = safe_int_cast(req.query_params.get('scan_id')) changes = req.query_params.get('changes') - domain_id = ScanHistory.objects.filter(id=scan_id).first().domain.id + domain_id = safe_int_cast(ScanHistory.objects.filter(id=scan_id).first().domain.id) scan_history = ( ScanHistory.objects .filter(domain=domain_id) @@ -1876,17 +1973,18 @@ def paginate_queryset(self, queryset, view=None): if 'no_page' in self.request.query_params: return None return self.paginator.paginate_queryset( - queryset, self.request, view=self) + queryset.order_by(*self.ordering), self.request, view=self) class InterestingSubdomainViewSet(viewsets.ModelViewSet): queryset = Subdomain.objects.none() serializer_class = SubdomainSerializer + ordering = ('name',) def get_queryset(self): req = self.request - scan_id = req.query_params.get('scan_id') - domain_id = req.query_params.get('target_id') + scan_id = safe_int_cast(req.query_params.get('scan_id')) + domain_id = safe_int_cast(req.query_params.get('target_id')) if 'only_subdomains' in self.request.query_params: self.serializer_class = InterestingSubdomainSerializer @@ -1930,17 +2028,19 @@ def paginate_queryset(self, queryset, view=None): if 'no_page' in self.request.query_params: return None return self.paginator.paginate_queryset( - queryset, self.request, view=self) + queryset.order_by(*self.ordering), self.request, view=self) class InterestingEndpointViewSet(viewsets.ModelViewSet): queryset = EndPoint.objects.none() serializer_class = EndpointSerializer + ordering = ('http_url',) def get_queryset(self): req = self.request - scan_id = req.query_params.get('scan_id') - target_id = req.query_params.get('target_id') + scan_id = safe_int_cast(req.query_params.get('scan_id')) + target_id = safe_int_cast(req.query_params.get('target_id')) + if 'only_endpoints' in self.request.query_params: self.serializer_class = InterestingEndPointSerializer if scan_id: @@ -1954,7 +2054,7 @@ def paginate_queryset(self, queryset, view=None): if 'no_page' in self.request.query_params: return None return self.paginator.paginate_queryset( - queryset, self.request, view=self) + queryset.order_by(*self.ordering), self.request, view=self) class SubdomainDatatableViewSet(viewsets.ModelViewSet): @@ -1963,8 +2063,8 @@ class SubdomainDatatableViewSet(viewsets.ModelViewSet): def get_queryset(self): req = self.request - scan_id = req.query_params.get('scan_id') - target_id = req.query_params.get('target_id') + scan_id = safe_int_cast(req.query_params.get('scan_id')) + target_id = safe_int_cast(req.query_params.get('target_id')) url_query = req.query_params.get('query_param') ip_address = req.query_params.get('ip_address') name = req.query_params.get('name') @@ -2219,8 +2319,8 @@ class ListActivityLogsViewSet(viewsets.ModelViewSet): queryset = Command.objects.none() def get_queryset(self): req = self.request - activity_id = req.query_params.get('activity_id') - self.queryset = Command.objects.filter(activity__id=activity_id) + activity_id = safe_int_cast(req.query_params.get('activity_id')) + self.queryset = Command.objects.filter(activity__id=activity_id).order_by('id') return self.queryset @@ -2229,8 +2329,8 @@ class ListScanLogsViewSet(viewsets.ModelViewSet): queryset = Command.objects.none() def get_queryset(self): req = self.request - scan_id = req.query_params.get('scan_id') - self.queryset = Command.objects.filter(scan_history__id=scan_id) + scan_id = safe_int_cast(req.query_params.get('scan_id')) + self.queryset = Command.objects.filter(scan_history__id=scan_id).order_by('id') return self.queryset @@ -2238,8 +2338,8 @@ class ListEndpoints(APIView): def get(self, request, format=None): req = self.request - scan_id = req.query_params.get('scan_id') - target_id = req.query_params.get('target_id') + scan_id = safe_int_cast(req.query_params.get('scan_id')) + target_id = safe_int_cast(req.query_params.get('target_id')) subdomain_name = req.query_params.get('subdomain_name') pattern = req.query_params.get('pattern') @@ -2279,10 +2379,10 @@ class EndPointViewSet(viewsets.ModelViewSet): def get_queryset(self): req = self.request - scan_id = req.query_params.get('scan_history') - target_id = req.query_params.get('target_id') + scan_id = safe_int_cast(req.query_params.get('scan_history')) + target_id = safe_int_cast(req.query_params.get('target_id')) url_query = req.query_params.get('query_param') - subdomain_id = req.query_params.get('subdomain_id') + subdomain_id = safe_int_cast(req.query_params.get('subdomain_id')) project = req.query_params.get('project') endpoints_obj = EndPoint.objects.filter(scan_history__domain__project__slug=project) @@ -2295,15 +2395,17 @@ def get_queryset(self): endpoints_obj .filter(scan_history__id=scan_id) .distinct() + .order_by('id') ) else: - endpoints = endpoints_obj.distinct() + endpoints = endpoints_obj.distinct().order_by('id') if url_query: endpoints = ( endpoints .filter(Q(target_domain__name=url_query)) .distinct() + .order_by('id') ) if gf_tag: @@ -2516,31 +2618,28 @@ def special_lookup(self, search_value): class DirectoryViewSet(viewsets.ModelViewSet): - queryset = DirectoryFile.objects.none() - serializer_class = DirectoryFileSerializer + queryset = DirectoryFile.objects.none() + serializer_class = DirectoryFileSerializer - def get_queryset(self): - req = self.request - scan_id = req.query_params.get('scan_history') - subdomain_id = req.query_params.get('subdomain_id') - subdomains = None - if not (scan_id or subdomain_id): - return Response({ - 'status': False, - 'message': 'Scan id or subdomain id must be provided.' - }) - elif scan_id: - subdomains = Subdomain.objects.filter(scan_history__id=scan_id) - elif subdomain_id: - subdomains = Subdomain.objects.filter(id=subdomain_id) - dirs_scans = DirectoryScan.objects.filter(directories__in=subdomains) - qs = ( - DirectoryFile.objects - .filter(directory_files__in=dirs_scans) - .distinct() - ) - self.queryset = qs - return self.queryset + def get_queryset(self): + req = self.request + scan_id = safe_int_cast(req.query_params.get('scan_history')) + subdomain_id = safe_int_cast(req.query_params.get('subdomain_id')) + subdomains = None + if not (scan_id or subdomain_id): + return DirectoryFile.objects.none() + elif scan_id: + subdomains = Subdomain.objects.filter(scan_history__id=scan_id) + elif subdomain_id: + subdomains = Subdomain.objects.filter(id=subdomain_id) + dirs_scans = DirectoryScan.objects.filter(directories__in=subdomains) + qs = ( + DirectoryFile.objects + .filter(directory_files__in=dirs_scans) + .distinct() + .order_by('id') + ) + return qs class VulnerabilityViewSet(viewsets.ModelViewSet): @@ -2549,11 +2648,11 @@ class VulnerabilityViewSet(viewsets.ModelViewSet): def get_queryset(self): req = self.request - scan_id = req.query_params.get('scan_history') - target_id = req.query_params.get('target_id') + scan_id = safe_int_cast(req.query_params.get('scan_history')) + target_id = safe_int_cast(req.query_params.get('target_id')) domain = req.query_params.get('domain') severity = req.query_params.get('severity') - subdomain_id = req.query_params.get('subdomain_id') + subdomain_id = safe_int_cast(req.query_params.get('subdomain_id')) subdomain_name = req.query_params.get('subdomain') vulnerability_name = req.query_params.get('vulnerability_name') slug = self.request.GET.get('project', None) diff --git a/web/dashboard/fixtures/dashboard.json b/web/dashboard/fixtures/dashboard.json new file mode 100644 index 00000000..75dde2cf --- /dev/null +++ b/web/dashboard/fixtures/dashboard.json @@ -0,0 +1,37 @@ +[ +{ + "model": "dashboard.project", + "pk": 1, + "fields": { + "name": "Default", + "slug": "default", + "insert_date": "2024-09-03T21:23:21.459Z" + } +}, +{ + "model": "dashboard.project", + "pk": 2, + "fields": { + "name": "My Project", + "slug": "my-project", + "insert_date": "2024-09-04T00:32:08.839Z" + } +}, +{ + "model": "dashboard.project", + "pk": 3, + "fields": { + "name": "My Other Project", + "slug": "my-other-project", + "insert_date": "2024-09-04T00:32:31.475Z" + } +}, +{ + "model": "dashboard.ollamasettings", + "pk": 1, + "fields": { + "selected_model": "gpt-3", + "use_ollama": false + } +} +] diff --git a/web/dashboard/migrations/0006_project_insert_date.py b/web/dashboard/migrations/0006_project_insert_date.py index 6d51f4e2..8f6e116f 100644 --- a/web/dashboard/migrations/0006_project_insert_date.py +++ b/web/dashboard/migrations/0006_project_insert_date.py @@ -1,7 +1,7 @@ # Generated by Django 3.2.4 on 2023-07-06 09:08 from django.db import migrations, models - +from django.utils import timezone class Migration(migrations.Migration): @@ -13,7 +13,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name='project', name='insert_date', - field=models.DateTimeField(default='2023-06-06'), + field=models.DateTimeField(default=timezone.now), preserve_default=False, ), - ] + ] \ No newline at end of file diff --git a/web/dashboard/tests.py b/web/dashboard/tests.py deleted file mode 100644 index 7ce503c2..00000000 --- a/web/dashboard/tests.py +++ /dev/null @@ -1,3 +0,0 @@ -from django.test import TestCase - -# Create your tests here. diff --git a/web/dashboard/tests/__init__.py b/web/dashboard/tests/__init__.py new file mode 100644 index 00000000..7936b984 --- /dev/null +++ b/web/dashboard/tests/__init__.py @@ -0,0 +1,2 @@ +from utils.test_base import * +from .test_dashboard import * diff --git a/web/dashboard/tests/test_dashboard.py b/web/dashboard/tests/test_dashboard.py new file mode 100644 index 00000000..2c0fd174 --- /dev/null +++ b/web/dashboard/tests/test_dashboard.py @@ -0,0 +1,88 @@ +""" +This file contains the test cases for the dashboard views. +""" +import json +from unittest.mock import patch, MagicMock +from django.urls import reverse +from utils.test_base import BaseTestCase + +__all__ = [ + 'TestDashboardViews' +] + +class TestDashboardViews(BaseTestCase): + """Test cases for dashboard views.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + + def test_index_view(self): + """Test the index view of the dashboard.""" + response = self.client.get(reverse('dashboardIndex', kwargs={'slug': self.data_generator.project.slug})) + self.assertEqual(response.status_code, 200) + self.assertIn('dashboard_data_active', response.context) + dashboard_data = response.context['dashboard_data_active'] + self.assertIsInstance(dashboard_data, str) + self.assertIn('active', dashboard_data) + + def test_profile_view(self): + """Test the profile view.""" + response = self.client.get(reverse('profile', kwargs={'slug': self.data_generator.project.slug})) + self.assertEqual(response.status_code, 200) + self.assertIn('form', response.context) + self.assertEqual(response.context['current_project'], self.data_generator.project) + + @patch('dashboard.views.get_user_model') + def test_admin_interface_view(self, mock_get_user_model): + """Test the admin interface view.""" + mock_user_model = mock_get_user_model.return_value + mock_queryset = MagicMock() + mock_queryset.order_by.return_value = mock_queryset + mock_user_model.objects.all.return_value = mock_queryset + response = self.client.get(reverse('admin_interface', kwargs={'slug': self.data_generator.project.slug})) + self.assertEqual(response.status_code, 200) + self.assertIn('users', response.context) + + @patch('dashboard.views.get_user_model') + def test_admin_interface_update_view(self, mock_get_user_model): + """Test the admin interface update view.""" + mock_user_model = mock_get_user_model.return_value + mock_user_model.objects.get.return_value = self.user + response = self.client.get(reverse('admin_interface_update', kwargs={'slug': self.data_generator.project.slug}), {'mode': 'change_status', 'user': 1}) + self.assertEqual(response.status_code, 302) + + def test_search_view(self): + """Test the search view.""" + response = self.client.get(reverse('search', kwargs={'slug': self.data_generator.project.slug})) + self.assertEqual(response.status_code, 200) + + def test_projects_view(self): + """Test the projects view.""" + response = self.client.get(reverse('list_projects', kwargs={'slug': self.data_generator.project.slug})) + self.assertEqual(response.status_code, 200) + self.assertIn('projects', response.context) + + def test_delete_project_view(self): + """Test the delete project view.""" + response = self.client.post(reverse('delete_project', args=[self.data_generator.project.id])) + self.assertEqual(response.status_code, 200) + self.assertEqual(json.loads(response.content), {'status': 'true'}) + + @patch('dashboard.views.Project.objects.create') + @patch('dashboard.views.get_user_model') + def test_onboarding_view(self, mock_get_user_model, mock_project_create): + """Test the onboarding view.""" + mock_project_create.return_value = self.data_generator.project + mock_user_model = mock_get_user_model.return_value + mock_user_model.objects.create_user.return_value = MagicMock() + response = self.client.post(reverse('onboarding'), { + 'project_name': 'New Project', + 'create_username': 'newuser', + 'create_password': 'newpass', + 'create_user_role': 'admin', + 'key_openai': 'openai_key', + 'key_netlas': 'netlas_key' + }) + self.assertEqual(response.status_code, 302) diff --git a/web/dashboard/views.py b/web/dashboard/views.py index 494d9256..f921bc98 100644 --- a/web/dashboard/views.py +++ b/web/dashboard/views.py @@ -1,7 +1,7 @@ import json import logging -from datetime import timedelta +from datetime import datetime, timedelta from django.contrib.auth import get_user_model from django.contrib import messages @@ -97,11 +97,12 @@ def index(request, slug): endpoints_in_last_week = [] for date in last_7_dates: - _target = count_targets_by_date.filter(date=date) - _subdomain = count_subdomains_by_date.filter(date=date) - _vuln = count_vulns_by_date.filter(date=date) - _scan = count_scans_by_date.filter(date=date) - _endpoint = count_endpoints_by_date.filter(date=date) + aware_date = timezone.make_aware(datetime.combine(date, datetime.min.time())) + _target = count_targets_by_date.filter(date=aware_date) + _subdomain = count_subdomains_by_date.filter(date=aware_date) + _vuln = count_vulns_by_date.filter(date=aware_date) + _scan = count_scans_by_date.filter(date=aware_date) + _endpoint = count_endpoints_by_date.filter(date=aware_date) if _target: targets_in_last_week.append(_target[0]['created_count']) else: @@ -273,11 +274,12 @@ def on_user_logged_out(sender, request, **kwargs): @receiver(user_logged_in) def on_user_logged_in(sender, request, **kwargs): + user = kwargs.get('user') messages.add_message( request, messages.INFO, 'Hi @' + - request.user.username + + user.username + ' welcome back!') diff --git a/web/fixtures/auth.json b/web/fixtures/auth.json new file mode 100644 index 00000000..a22e89ef --- /dev/null +++ b/web/fixtures/auth.json @@ -0,0 +1,2347 @@ +[ +{ + "model": "auth.permission", + "pk": 1, + "fields": { + "name": "Can add log entry", + "content_type": 1, + "codename": "add_logentry" + } +}, +{ + "model": "auth.permission", + "pk": 2, + "fields": { + "name": "Can change log entry", + "content_type": 1, + "codename": "change_logentry" + } +}, +{ + "model": "auth.permission", + "pk": 3, + "fields": { + "name": "Can delete log entry", + "content_type": 1, + "codename": "delete_logentry" + } +}, +{ + "model": "auth.permission", + "pk": 4, + "fields": { + "name": "Can view log entry", + "content_type": 1, + "codename": "view_logentry" + } +}, +{ + "model": "auth.permission", + "pk": 5, + "fields": { + "name": "Can add permission", + "content_type": 2, + "codename": "add_permission" + } +}, +{ + "model": "auth.permission", + "pk": 6, + "fields": { + "name": "Can change permission", + "content_type": 2, + "codename": "change_permission" + } +}, +{ + "model": "auth.permission", + "pk": 7, + "fields": { + "name": "Can delete permission", + "content_type": 2, + "codename": "delete_permission" + } +}, +{ + "model": "auth.permission", + "pk": 8, + "fields": { + "name": "Can view permission", + "content_type": 2, + "codename": "view_permission" + } +}, +{ + "model": "auth.permission", + "pk": 9, + "fields": { + "name": "Can add group", + "content_type": 3, + "codename": "add_group" + } +}, +{ + "model": "auth.permission", + "pk": 10, + "fields": { + "name": "Can change group", + "content_type": 3, + "codename": "change_group" + } +}, +{ + "model": "auth.permission", + "pk": 11, + "fields": { + "name": "Can delete group", + "content_type": 3, + "codename": "delete_group" + } +}, +{ + "model": "auth.permission", + "pk": 12, + "fields": { + "name": "Can view group", + "content_type": 3, + "codename": "view_group" + } +}, +{ + "model": "auth.permission", + "pk": 13, + "fields": { + "name": "Can add user", + "content_type": 4, + "codename": "add_user" + } +}, +{ + "model": "auth.permission", + "pk": 14, + "fields": { + "name": "Can change user", + "content_type": 4, + "codename": "change_user" + } +}, +{ + "model": "auth.permission", + "pk": 15, + "fields": { + "name": "Can delete user", + "content_type": 4, + "codename": "delete_user" + } +}, +{ + "model": "auth.permission", + "pk": 16, + "fields": { + "name": "Can view user", + "content_type": 4, + "codename": "view_user" + } +}, +{ + "model": "auth.permission", + "pk": 17, + "fields": { + "name": "Can add content type", + "content_type": 5, + "codename": "add_contenttype" + } +}, +{ + "model": "auth.permission", + "pk": 18, + "fields": { + "name": "Can change content type", + "content_type": 5, + "codename": "change_contenttype" + } +}, +{ + "model": "auth.permission", + "pk": 19, + "fields": { + "name": "Can delete content type", + "content_type": 5, + "codename": "delete_contenttype" + } +}, +{ + "model": "auth.permission", + "pk": 20, + "fields": { + "name": "Can view content type", + "content_type": 5, + "codename": "view_contenttype" + } +}, +{ + "model": "auth.permission", + "pk": 21, + "fields": { + "name": "Can add session", + "content_type": 6, + "codename": "add_session" + } +}, +{ + "model": "auth.permission", + "pk": 22, + "fields": { + "name": "Can change session", + "content_type": 6, + "codename": "change_session" + } +}, +{ + "model": "auth.permission", + "pk": 23, + "fields": { + "name": "Can delete session", + "content_type": 6, + "codename": "delete_session" + } +}, +{ + "model": "auth.permission", + "pk": 24, + "fields": { + "name": "Can view session", + "content_type": 6, + "codename": "view_session" + } +}, +{ + "model": "auth.permission", + "pk": 25, + "fields": { + "name": "Can add search history", + "content_type": 7, + "codename": "add_searchhistory" + } +}, +{ + "model": "auth.permission", + "pk": 26, + "fields": { + "name": "Can change search history", + "content_type": 7, + "codename": "change_searchhistory" + } +}, +{ + "model": "auth.permission", + "pk": 27, + "fields": { + "name": "Can delete search history", + "content_type": 7, + "codename": "delete_searchhistory" + } +}, +{ + "model": "auth.permission", + "pk": 28, + "fields": { + "name": "Can view search history", + "content_type": 7, + "codename": "view_searchhistory" + } +}, +{ + "model": "auth.permission", + "pk": 29, + "fields": { + "name": "Can add project", + "content_type": 8, + "codename": "add_project" + } +}, +{ + "model": "auth.permission", + "pk": 30, + "fields": { + "name": "Can change project", + "content_type": 8, + "codename": "change_project" + } +}, +{ + "model": "auth.permission", + "pk": 31, + "fields": { + "name": "Can delete project", + "content_type": 8, + "codename": "delete_project" + } +}, +{ + "model": "auth.permission", + "pk": 32, + "fields": { + "name": "Can view project", + "content_type": 8, + "codename": "view_project" + } +}, +{ + "model": "auth.permission", + "pk": 33, + "fields": { + "name": "Can add netlas api key", + "content_type": 9, + "codename": "add_netlasapikey" + } +}, +{ + "model": "auth.permission", + "pk": 34, + "fields": { + "name": "Can change netlas api key", + "content_type": 9, + "codename": "change_netlasapikey" + } +}, +{ + "model": "auth.permission", + "pk": 35, + "fields": { + "name": "Can delete netlas api key", + "content_type": 9, + "codename": "delete_netlasapikey" + } +}, +{ + "model": "auth.permission", + "pk": 36, + "fields": { + "name": "Can view netlas api key", + "content_type": 9, + "codename": "view_netlasapikey" + } +}, +{ + "model": "auth.permission", + "pk": 37, + "fields": { + "name": "Can add open ai api key", + "content_type": 10, + "codename": "add_openaiapikey" + } +}, +{ + "model": "auth.permission", + "pk": 38, + "fields": { + "name": "Can change open ai api key", + "content_type": 10, + "codename": "change_openaiapikey" + } +}, +{ + "model": "auth.permission", + "pk": 39, + "fields": { + "name": "Can delete open ai api key", + "content_type": 10, + "codename": "delete_openaiapikey" + } +}, +{ + "model": "auth.permission", + "pk": 40, + "fields": { + "name": "Can view open ai api key", + "content_type": 10, + "codename": "view_openaiapikey" + } +}, +{ + "model": "auth.permission", + "pk": 41, + "fields": { + "name": "Can add ollama settings", + "content_type": 11, + "codename": "add_ollamasettings" + } +}, +{ + "model": "auth.permission", + "pk": 42, + "fields": { + "name": "Can change ollama settings", + "content_type": 11, + "codename": "change_ollamasettings" + } +}, +{ + "model": "auth.permission", + "pk": 43, + "fields": { + "name": "Can delete ollama settings", + "content_type": 11, + "codename": "delete_ollamasettings" + } +}, +{ + "model": "auth.permission", + "pk": 44, + "fields": { + "name": "Can view ollama settings", + "content_type": 11, + "codename": "view_ollamasettings" + } +}, +{ + "model": "auth.permission", + "pk": 45, + "fields": { + "name": "Can add domain", + "content_type": 12, + "codename": "add_domain" + } +}, +{ + "model": "auth.permission", + "pk": 46, + "fields": { + "name": "Can change domain", + "content_type": 12, + "codename": "change_domain" + } +}, +{ + "model": "auth.permission", + "pk": 47, + "fields": { + "name": "Can delete domain", + "content_type": 12, + "codename": "delete_domain" + } +}, +{ + "model": "auth.permission", + "pk": 48, + "fields": { + "name": "Can view domain", + "content_type": 12, + "codename": "view_domain" + } +}, +{ + "model": "auth.permission", + "pk": 49, + "fields": { + "name": "Can add organization", + "content_type": 13, + "codename": "add_organization" + } +}, +{ + "model": "auth.permission", + "pk": 50, + "fields": { + "name": "Can change organization", + "content_type": 13, + "codename": "change_organization" + } +}, +{ + "model": "auth.permission", + "pk": 51, + "fields": { + "name": "Can delete organization", + "content_type": 13, + "codename": "delete_organization" + } +}, +{ + "model": "auth.permission", + "pk": 52, + "fields": { + "name": "Can view organization", + "content_type": 13, + "codename": "view_organization" + } +}, +{ + "model": "auth.permission", + "pk": 53, + "fields": { + "name": "Can add domain info", + "content_type": 14, + "codename": "add_domaininfo" + } +}, +{ + "model": "auth.permission", + "pk": 54, + "fields": { + "name": "Can change domain info", + "content_type": 14, + "codename": "change_domaininfo" + } +}, +{ + "model": "auth.permission", + "pk": 55, + "fields": { + "name": "Can delete domain info", + "content_type": 14, + "codename": "delete_domaininfo" + } +}, +{ + "model": "auth.permission", + "pk": 56, + "fields": { + "name": "Can view domain info", + "content_type": 14, + "codename": "view_domaininfo" + } +}, +{ + "model": "auth.permission", + "pk": 57, + "fields": { + "name": "Can add domain registration", + "content_type": 15, + "codename": "add_domainregistration" + } +}, +{ + "model": "auth.permission", + "pk": 58, + "fields": { + "name": "Can change domain registration", + "content_type": 15, + "codename": "change_domainregistration" + } +}, +{ + "model": "auth.permission", + "pk": 59, + "fields": { + "name": "Can delete domain registration", + "content_type": 15, + "codename": "delete_domainregistration" + } +}, +{ + "model": "auth.permission", + "pk": 60, + "fields": { + "name": "Can view domain registration", + "content_type": 15, + "codename": "view_domainregistration" + } +}, +{ + "model": "auth.permission", + "pk": 61, + "fields": { + "name": "Can add dns record", + "content_type": 16, + "codename": "add_dnsrecord" + } +}, +{ + "model": "auth.permission", + "pk": 62, + "fields": { + "name": "Can change dns record", + "content_type": 16, + "codename": "change_dnsrecord" + } +}, +{ + "model": "auth.permission", + "pk": 63, + "fields": { + "name": "Can delete dns record", + "content_type": 16, + "codename": "delete_dnsrecord" + } +}, +{ + "model": "auth.permission", + "pk": 64, + "fields": { + "name": "Can view dns record", + "content_type": 16, + "codename": "view_dnsrecord" + } +}, +{ + "model": "auth.permission", + "pk": 65, + "fields": { + "name": "Can add name server", + "content_type": 17, + "codename": "add_nameserver" + } +}, +{ + "model": "auth.permission", + "pk": 66, + "fields": { + "name": "Can change name server", + "content_type": 17, + "codename": "change_nameserver" + } +}, +{ + "model": "auth.permission", + "pk": 67, + "fields": { + "name": "Can delete name server", + "content_type": 17, + "codename": "delete_nameserver" + } +}, +{ + "model": "auth.permission", + "pk": 68, + "fields": { + "name": "Can view name server", + "content_type": 17, + "codename": "view_nameserver" + } +}, +{ + "model": "auth.permission", + "pk": 69, + "fields": { + "name": "Can add registrar", + "content_type": 18, + "codename": "add_registrar" + } +}, +{ + "model": "auth.permission", + "pk": 70, + "fields": { + "name": "Can change registrar", + "content_type": 18, + "codename": "change_registrar" + } +}, +{ + "model": "auth.permission", + "pk": 71, + "fields": { + "name": "Can delete registrar", + "content_type": 18, + "codename": "delete_registrar" + } +}, +{ + "model": "auth.permission", + "pk": 72, + "fields": { + "name": "Can view registrar", + "content_type": 18, + "codename": "view_registrar" + } +}, +{ + "model": "auth.permission", + "pk": 73, + "fields": { + "name": "Can add whois status", + "content_type": 19, + "codename": "add_whoisstatus" + } +}, +{ + "model": "auth.permission", + "pk": 74, + "fields": { + "name": "Can change whois status", + "content_type": 19, + "codename": "change_whoisstatus" + } +}, +{ + "model": "auth.permission", + "pk": 75, + "fields": { + "name": "Can delete whois status", + "content_type": 19, + "codename": "delete_whoisstatus" + } +}, +{ + "model": "auth.permission", + "pk": 76, + "fields": { + "name": "Can view whois status", + "content_type": 19, + "codename": "view_whoisstatus" + } +}, +{ + "model": "auth.permission", + "pk": 77, + "fields": { + "name": "Can add historical ip", + "content_type": 20, + "codename": "add_historicalip" + } +}, +{ + "model": "auth.permission", + "pk": 78, + "fields": { + "name": "Can change historical ip", + "content_type": 20, + "codename": "change_historicalip" + } +}, +{ + "model": "auth.permission", + "pk": 79, + "fields": { + "name": "Can delete historical ip", + "content_type": 20, + "codename": "delete_historicalip" + } +}, +{ + "model": "auth.permission", + "pk": 80, + "fields": { + "name": "Can view historical ip", + "content_type": 20, + "codename": "view_historicalip" + } +}, +{ + "model": "auth.permission", + "pk": 81, + "fields": { + "name": "Can add related domain", + "content_type": 21, + "codename": "add_relateddomain" + } +}, +{ + "model": "auth.permission", + "pk": 82, + "fields": { + "name": "Can change related domain", + "content_type": 21, + "codename": "change_relateddomain" + } +}, +{ + "model": "auth.permission", + "pk": 83, + "fields": { + "name": "Can delete related domain", + "content_type": 21, + "codename": "delete_relateddomain" + } +}, +{ + "model": "auth.permission", + "pk": 84, + "fields": { + "name": "Can view related domain", + "content_type": 21, + "codename": "view_relateddomain" + } +}, +{ + "model": "auth.permission", + "pk": 85, + "fields": { + "name": "Can add configuration", + "content_type": 22, + "codename": "add_configuration" + } +}, +{ + "model": "auth.permission", + "pk": 86, + "fields": { + "name": "Can change configuration", + "content_type": 22, + "codename": "change_configuration" + } +}, +{ + "model": "auth.permission", + "pk": 87, + "fields": { + "name": "Can delete configuration", + "content_type": 22, + "codename": "delete_configuration" + } +}, +{ + "model": "auth.permission", + "pk": 88, + "fields": { + "name": "Can view configuration", + "content_type": 22, + "codename": "view_configuration" + } +}, +{ + "model": "auth.permission", + "pk": 89, + "fields": { + "name": "Can add engine type", + "content_type": 23, + "codename": "add_enginetype" + } +}, +{ + "model": "auth.permission", + "pk": 90, + "fields": { + "name": "Can change engine type", + "content_type": 23, + "codename": "change_enginetype" + } +}, +{ + "model": "auth.permission", + "pk": 91, + "fields": { + "name": "Can delete engine type", + "content_type": 23, + "codename": "delete_enginetype" + } +}, +{ + "model": "auth.permission", + "pk": 92, + "fields": { + "name": "Can view engine type", + "content_type": 23, + "codename": "view_enginetype" + } +}, +{ + "model": "auth.permission", + "pk": 93, + "fields": { + "name": "Can add hackerone", + "content_type": 24, + "codename": "add_hackerone" + } +}, +{ + "model": "auth.permission", + "pk": 94, + "fields": { + "name": "Can change hackerone", + "content_type": 24, + "codename": "change_hackerone" + } +}, +{ + "model": "auth.permission", + "pk": 95, + "fields": { + "name": "Can delete hackerone", + "content_type": 24, + "codename": "delete_hackerone" + } +}, +{ + "model": "auth.permission", + "pk": 96, + "fields": { + "name": "Can view hackerone", + "content_type": 24, + "codename": "view_hackerone" + } +}, +{ + "model": "auth.permission", + "pk": 97, + "fields": { + "name": "Can add installed external tool", + "content_type": 25, + "codename": "add_installedexternaltool" + } +}, +{ + "model": "auth.permission", + "pk": 98, + "fields": { + "name": "Can change installed external tool", + "content_type": 25, + "codename": "change_installedexternaltool" + } +}, +{ + "model": "auth.permission", + "pk": 99, + "fields": { + "name": "Can delete installed external tool", + "content_type": 25, + "codename": "delete_installedexternaltool" + } +}, +{ + "model": "auth.permission", + "pk": 100, + "fields": { + "name": "Can view installed external tool", + "content_type": 25, + "codename": "view_installedexternaltool" + } +}, +{ + "model": "auth.permission", + "pk": 101, + "fields": { + "name": "Can add interesting lookup model", + "content_type": 26, + "codename": "add_interestinglookupmodel" + } +}, +{ + "model": "auth.permission", + "pk": 102, + "fields": { + "name": "Can change interesting lookup model", + "content_type": 26, + "codename": "change_interestinglookupmodel" + } +}, +{ + "model": "auth.permission", + "pk": 103, + "fields": { + "name": "Can delete interesting lookup model", + "content_type": 26, + "codename": "delete_interestinglookupmodel" + } +}, +{ + "model": "auth.permission", + "pk": 104, + "fields": { + "name": "Can view interesting lookup model", + "content_type": 26, + "codename": "view_interestinglookupmodel" + } +}, +{ + "model": "auth.permission", + "pk": 105, + "fields": { + "name": "Can add notification", + "content_type": 27, + "codename": "add_notification" + } +}, +{ + "model": "auth.permission", + "pk": 106, + "fields": { + "name": "Can change notification", + "content_type": 27, + "codename": "change_notification" + } +}, +{ + "model": "auth.permission", + "pk": 107, + "fields": { + "name": "Can delete notification", + "content_type": 27, + "codename": "delete_notification" + } +}, +{ + "model": "auth.permission", + "pk": 108, + "fields": { + "name": "Can view notification", + "content_type": 27, + "codename": "view_notification" + } +}, +{ + "model": "auth.permission", + "pk": 109, + "fields": { + "name": "Can add proxy", + "content_type": 28, + "codename": "add_proxy" + } +}, +{ + "model": "auth.permission", + "pk": 110, + "fields": { + "name": "Can change proxy", + "content_type": 28, + "codename": "change_proxy" + } +}, +{ + "model": "auth.permission", + "pk": 111, + "fields": { + "name": "Can delete proxy", + "content_type": 28, + "codename": "delete_proxy" + } +}, +{ + "model": "auth.permission", + "pk": 112, + "fields": { + "name": "Can view proxy", + "content_type": 28, + "codename": "view_proxy" + } +}, +{ + "model": "auth.permission", + "pk": 113, + "fields": { + "name": "Can add vulnerability report setting", + "content_type": 29, + "codename": "add_vulnerabilityreportsetting" + } +}, +{ + "model": "auth.permission", + "pk": 114, + "fields": { + "name": "Can change vulnerability report setting", + "content_type": 29, + "codename": "change_vulnerabilityreportsetting" + } +}, +{ + "model": "auth.permission", + "pk": 115, + "fields": { + "name": "Can delete vulnerability report setting", + "content_type": 29, + "codename": "delete_vulnerabilityreportsetting" + } +}, +{ + "model": "auth.permission", + "pk": 116, + "fields": { + "name": "Can view vulnerability report setting", + "content_type": 29, + "codename": "view_vulnerabilityreportsetting" + } +}, +{ + "model": "auth.permission", + "pk": 117, + "fields": { + "name": "Can add wordlist", + "content_type": 30, + "codename": "add_wordlist" + } +}, +{ + "model": "auth.permission", + "pk": 118, + "fields": { + "name": "Can change wordlist", + "content_type": 30, + "codename": "change_wordlist" + } +}, +{ + "model": "auth.permission", + "pk": 119, + "fields": { + "name": "Can delete wordlist", + "content_type": 30, + "codename": "delete_wordlist" + } +}, +{ + "model": "auth.permission", + "pk": 120, + "fields": { + "name": "Can view wordlist", + "content_type": 30, + "codename": "view_wordlist" + } +}, +{ + "model": "auth.permission", + "pk": 121, + "fields": { + "name": "Can add dork", + "content_type": 31, + "codename": "add_dork" + } +}, +{ + "model": "auth.permission", + "pk": 122, + "fields": { + "name": "Can change dork", + "content_type": 31, + "codename": "change_dork" + } +}, +{ + "model": "auth.permission", + "pk": 123, + "fields": { + "name": "Can delete dork", + "content_type": 31, + "codename": "delete_dork" + } +}, +{ + "model": "auth.permission", + "pk": 124, + "fields": { + "name": "Can view dork", + "content_type": 31, + "codename": "view_dork" + } +}, +{ + "model": "auth.permission", + "pk": 125, + "fields": { + "name": "Can add email", + "content_type": 32, + "codename": "add_email" + } +}, +{ + "model": "auth.permission", + "pk": 126, + "fields": { + "name": "Can change email", + "content_type": 32, + "codename": "change_email" + } +}, +{ + "model": "auth.permission", + "pk": 127, + "fields": { + "name": "Can delete email", + "content_type": 32, + "codename": "delete_email" + } +}, +{ + "model": "auth.permission", + "pk": 128, + "fields": { + "name": "Can view email", + "content_type": 32, + "codename": "view_email" + } +}, +{ + "model": "auth.permission", + "pk": 129, + "fields": { + "name": "Can add employee", + "content_type": 33, + "codename": "add_employee" + } +}, +{ + "model": "auth.permission", + "pk": 130, + "fields": { + "name": "Can change employee", + "content_type": 33, + "codename": "change_employee" + } +}, +{ + "model": "auth.permission", + "pk": 131, + "fields": { + "name": "Can delete employee", + "content_type": 33, + "codename": "delete_employee" + } +}, +{ + "model": "auth.permission", + "pk": 132, + "fields": { + "name": "Can view employee", + "content_type": 33, + "codename": "view_employee" + } +}, +{ + "model": "auth.permission", + "pk": 133, + "fields": { + "name": "Can add end point", + "content_type": 34, + "codename": "add_endpoint" + } +}, +{ + "model": "auth.permission", + "pk": 134, + "fields": { + "name": "Can change end point", + "content_type": 34, + "codename": "change_endpoint" + } +}, +{ + "model": "auth.permission", + "pk": 135, + "fields": { + "name": "Can delete end point", + "content_type": 34, + "codename": "delete_endpoint" + } +}, +{ + "model": "auth.permission", + "pk": 136, + "fields": { + "name": "Can view end point", + "content_type": 34, + "codename": "view_endpoint" + } +}, +{ + "model": "auth.permission", + "pk": 137, + "fields": { + "name": "Can add ip address", + "content_type": 35, + "codename": "add_ipaddress" + } +}, +{ + "model": "auth.permission", + "pk": 138, + "fields": { + "name": "Can change ip address", + "content_type": 35, + "codename": "change_ipaddress" + } +}, +{ + "model": "auth.permission", + "pk": 139, + "fields": { + "name": "Can delete ip address", + "content_type": 35, + "codename": "delete_ipaddress" + } +}, +{ + "model": "auth.permission", + "pk": 140, + "fields": { + "name": "Can view ip address", + "content_type": 35, + "codename": "view_ipaddress" + } +}, +{ + "model": "auth.permission", + "pk": 141, + "fields": { + "name": "Can add port", + "content_type": 36, + "codename": "add_port" + } +}, +{ + "model": "auth.permission", + "pk": 142, + "fields": { + "name": "Can change port", + "content_type": 36, + "codename": "change_port" + } +}, +{ + "model": "auth.permission", + "pk": 143, + "fields": { + "name": "Can delete port", + "content_type": 36, + "codename": "delete_port" + } +}, +{ + "model": "auth.permission", + "pk": 144, + "fields": { + "name": "Can view port", + "content_type": 36, + "codename": "view_port" + } +}, +{ + "model": "auth.permission", + "pk": 145, + "fields": { + "name": "Can add scan history", + "content_type": 37, + "codename": "add_scanhistory" + } +}, +{ + "model": "auth.permission", + "pk": 146, + "fields": { + "name": "Can change scan history", + "content_type": 37, + "codename": "change_scanhistory" + } +}, +{ + "model": "auth.permission", + "pk": 147, + "fields": { + "name": "Can delete scan history", + "content_type": 37, + "codename": "delete_scanhistory" + } +}, +{ + "model": "auth.permission", + "pk": 148, + "fields": { + "name": "Can view scan history", + "content_type": 37, + "codename": "view_scanhistory" + } +}, +{ + "model": "auth.permission", + "pk": 149, + "fields": { + "name": "Can add subdomain", + "content_type": 38, + "codename": "add_subdomain" + } +}, +{ + "model": "auth.permission", + "pk": 150, + "fields": { + "name": "Can change subdomain", + "content_type": 38, + "codename": "change_subdomain" + } +}, +{ + "model": "auth.permission", + "pk": 151, + "fields": { + "name": "Can delete subdomain", + "content_type": 38, + "codename": "delete_subdomain" + } +}, +{ + "model": "auth.permission", + "pk": 152, + "fields": { + "name": "Can view subdomain", + "content_type": 38, + "codename": "view_subdomain" + } +}, +{ + "model": "auth.permission", + "pk": 153, + "fields": { + "name": "Can add technology", + "content_type": 39, + "codename": "add_technology" + } +}, +{ + "model": "auth.permission", + "pk": 154, + "fields": { + "name": "Can change technology", + "content_type": 39, + "codename": "change_technology" + } +}, +{ + "model": "auth.permission", + "pk": 155, + "fields": { + "name": "Can delete technology", + "content_type": 39, + "codename": "delete_technology" + } +}, +{ + "model": "auth.permission", + "pk": 156, + "fields": { + "name": "Can view technology", + "content_type": 39, + "codename": "view_technology" + } +}, +{ + "model": "auth.permission", + "pk": 157, + "fields": { + "name": "Can add vulnerability", + "content_type": 40, + "codename": "add_vulnerability" + } +}, +{ + "model": "auth.permission", + "pk": 158, + "fields": { + "name": "Can change vulnerability", + "content_type": 40, + "codename": "change_vulnerability" + } +}, +{ + "model": "auth.permission", + "pk": 159, + "fields": { + "name": "Can delete vulnerability", + "content_type": 40, + "codename": "delete_vulnerability" + } +}, +{ + "model": "auth.permission", + "pk": 160, + "fields": { + "name": "Can view vulnerability", + "content_type": 40, + "codename": "view_vulnerability" + } +}, +{ + "model": "auth.permission", + "pk": 161, + "fields": { + "name": "Can add sub scan", + "content_type": 41, + "codename": "add_subscan" + } +}, +{ + "model": "auth.permission", + "pk": 162, + "fields": { + "name": "Can change sub scan", + "content_type": 41, + "codename": "change_subscan" + } +}, +{ + "model": "auth.permission", + "pk": 163, + "fields": { + "name": "Can delete sub scan", + "content_type": 41, + "codename": "delete_subscan" + } +}, +{ + "model": "auth.permission", + "pk": 164, + "fields": { + "name": "Can view sub scan", + "content_type": 41, + "codename": "view_subscan" + } +}, +{ + "model": "auth.permission", + "pk": 165, + "fields": { + "name": "Can add scan activity", + "content_type": 42, + "codename": "add_scanactivity" + } +}, +{ + "model": "auth.permission", + "pk": 166, + "fields": { + "name": "Can change scan activity", + "content_type": 42, + "codename": "change_scanactivity" + } +}, +{ + "model": "auth.permission", + "pk": 167, + "fields": { + "name": "Can delete scan activity", + "content_type": 42, + "codename": "delete_scanactivity" + } +}, +{ + "model": "auth.permission", + "pk": 168, + "fields": { + "name": "Can view scan activity", + "content_type": 42, + "codename": "view_scanactivity" + } +}, +{ + "model": "auth.permission", + "pk": 169, + "fields": { + "name": "Can add meta finder document", + "content_type": 43, + "codename": "add_metafinderdocument" + } +}, +{ + "model": "auth.permission", + "pk": 170, + "fields": { + "name": "Can change meta finder document", + "content_type": 43, + "codename": "change_metafinderdocument" + } +}, +{ + "model": "auth.permission", + "pk": 171, + "fields": { + "name": "Can delete meta finder document", + "content_type": 43, + "codename": "delete_metafinderdocument" + } +}, +{ + "model": "auth.permission", + "pk": 172, + "fields": { + "name": "Can view meta finder document", + "content_type": 43, + "codename": "view_metafinderdocument" + } +}, +{ + "model": "auth.permission", + "pk": 173, + "fields": { + "name": "Can add vulnerability reference", + "content_type": 44, + "codename": "add_vulnerabilityreference" + } +}, +{ + "model": "auth.permission", + "pk": 174, + "fields": { + "name": "Can change vulnerability reference", + "content_type": 44, + "codename": "change_vulnerabilityreference" + } +}, +{ + "model": "auth.permission", + "pk": 175, + "fields": { + "name": "Can delete vulnerability reference", + "content_type": 44, + "codename": "delete_vulnerabilityreference" + } +}, +{ + "model": "auth.permission", + "pk": 176, + "fields": { + "name": "Can view vulnerability reference", + "content_type": 44, + "codename": "view_vulnerabilityreference" + } +}, +{ + "model": "auth.permission", + "pk": 177, + "fields": { + "name": "Can add vulnerability tags", + "content_type": 45, + "codename": "add_vulnerabilitytags" + } +}, +{ + "model": "auth.permission", + "pk": 178, + "fields": { + "name": "Can change vulnerability tags", + "content_type": 45, + "codename": "change_vulnerabilitytags" + } +}, +{ + "model": "auth.permission", + "pk": 179, + "fields": { + "name": "Can delete vulnerability tags", + "content_type": 45, + "codename": "delete_vulnerabilitytags" + } +}, +{ + "model": "auth.permission", + "pk": 180, + "fields": { + "name": "Can view vulnerability tags", + "content_type": 45, + "codename": "view_vulnerabilitytags" + } +}, +{ + "model": "auth.permission", + "pk": 181, + "fields": { + "name": "Can add directory file", + "content_type": 46, + "codename": "add_directoryfile" + } +}, +{ + "model": "auth.permission", + "pk": 182, + "fields": { + "name": "Can change directory file", + "content_type": 46, + "codename": "change_directoryfile" + } +}, +{ + "model": "auth.permission", + "pk": 183, + "fields": { + "name": "Can delete directory file", + "content_type": 46, + "codename": "delete_directoryfile" + } +}, +{ + "model": "auth.permission", + "pk": 184, + "fields": { + "name": "Can view directory file", + "content_type": 46, + "codename": "view_directoryfile" + } +}, +{ + "model": "auth.permission", + "pk": 185, + "fields": { + "name": "Can add directory scan", + "content_type": 47, + "codename": "add_directoryscan" + } +}, +{ + "model": "auth.permission", + "pk": 186, + "fields": { + "name": "Can change directory scan", + "content_type": 47, + "codename": "change_directoryscan" + } +}, +{ + "model": "auth.permission", + "pk": 187, + "fields": { + "name": "Can delete directory scan", + "content_type": 47, + "codename": "delete_directoryscan" + } +}, +{ + "model": "auth.permission", + "pk": 188, + "fields": { + "name": "Can view directory scan", + "content_type": 47, + "codename": "view_directoryscan" + } +}, +{ + "model": "auth.permission", + "pk": 189, + "fields": { + "name": "Can add cve id", + "content_type": 48, + "codename": "add_cveid" + } +}, +{ + "model": "auth.permission", + "pk": 190, + "fields": { + "name": "Can change cve id", + "content_type": 48, + "codename": "change_cveid" + } +}, +{ + "model": "auth.permission", + "pk": 191, + "fields": { + "name": "Can delete cve id", + "content_type": 48, + "codename": "delete_cveid" + } +}, +{ + "model": "auth.permission", + "pk": 192, + "fields": { + "name": "Can view cve id", + "content_type": 48, + "codename": "view_cveid" + } +}, +{ + "model": "auth.permission", + "pk": 193, + "fields": { + "name": "Can add cwe id", + "content_type": 49, + "codename": "add_cweid" + } +}, +{ + "model": "auth.permission", + "pk": 194, + "fields": { + "name": "Can change cwe id", + "content_type": 49, + "codename": "change_cweid" + } +}, +{ + "model": "auth.permission", + "pk": 195, + "fields": { + "name": "Can delete cwe id", + "content_type": 49, + "codename": "delete_cweid" + } +}, +{ + "model": "auth.permission", + "pk": 196, + "fields": { + "name": "Can view cwe id", + "content_type": 49, + "codename": "view_cweid" + } +}, +{ + "model": "auth.permission", + "pk": 197, + "fields": { + "name": "Can add waf", + "content_type": 50, + "codename": "add_waf" + } +}, +{ + "model": "auth.permission", + "pk": 198, + "fields": { + "name": "Can change waf", + "content_type": 50, + "codename": "change_waf" + } +}, +{ + "model": "auth.permission", + "pk": 199, + "fields": { + "name": "Can delete waf", + "content_type": 50, + "codename": "delete_waf" + } +}, +{ + "model": "auth.permission", + "pk": 200, + "fields": { + "name": "Can view waf", + "content_type": 50, + "codename": "view_waf" + } +}, +{ + "model": "auth.permission", + "pk": 201, + "fields": { + "name": "Can add country iso", + "content_type": 51, + "codename": "add_countryiso" + } +}, +{ + "model": "auth.permission", + "pk": 202, + "fields": { + "name": "Can change country iso", + "content_type": 51, + "codename": "change_countryiso" + } +}, +{ + "model": "auth.permission", + "pk": 203, + "fields": { + "name": "Can delete country iso", + "content_type": 51, + "codename": "delete_countryiso" + } +}, +{ + "model": "auth.permission", + "pk": 204, + "fields": { + "name": "Can view country iso", + "content_type": 51, + "codename": "view_countryiso" + } +}, +{ + "model": "auth.permission", + "pk": 205, + "fields": { + "name": "Can add command", + "content_type": 52, + "codename": "add_command" + } +}, +{ + "model": "auth.permission", + "pk": 206, + "fields": { + "name": "Can change command", + "content_type": 52, + "codename": "change_command" + } +}, +{ + "model": "auth.permission", + "pk": 207, + "fields": { + "name": "Can delete command", + "content_type": 52, + "codename": "delete_command" + } +}, +{ + "model": "auth.permission", + "pk": 208, + "fields": { + "name": "Can view command", + "content_type": 52, + "codename": "view_command" + } +}, +{ + "model": "auth.permission", + "pk": 209, + "fields": { + "name": "Can add gpt vulnerability report", + "content_type": 53, + "codename": "add_gptvulnerabilityreport" + } +}, +{ + "model": "auth.permission", + "pk": 210, + "fields": { + "name": "Can change gpt vulnerability report", + "content_type": 53, + "codename": "change_gptvulnerabilityreport" + } +}, +{ + "model": "auth.permission", + "pk": 211, + "fields": { + "name": "Can delete gpt vulnerability report", + "content_type": 53, + "codename": "delete_gptvulnerabilityreport" + } +}, +{ + "model": "auth.permission", + "pk": 212, + "fields": { + "name": "Can view gpt vulnerability report", + "content_type": 53, + "codename": "view_gptvulnerabilityreport" + } +}, +{ + "model": "auth.permission", + "pk": 213, + "fields": { + "name": "Can add s3 bucket", + "content_type": 54, + "codename": "add_s3bucket" + } +}, +{ + "model": "auth.permission", + "pk": 214, + "fields": { + "name": "Can change s3 bucket", + "content_type": 54, + "codename": "change_s3bucket" + } +}, +{ + "model": "auth.permission", + "pk": 215, + "fields": { + "name": "Can delete s3 bucket", + "content_type": 54, + "codename": "delete_s3bucket" + } +}, +{ + "model": "auth.permission", + "pk": 216, + "fields": { + "name": "Can view s3 bucket", + "content_type": 54, + "codename": "view_s3bucket" + } +}, +{ + "model": "auth.permission", + "pk": 217, + "fields": { + "name": "Can add todo note", + "content_type": 55, + "codename": "add_todonote" + } +}, +{ + "model": "auth.permission", + "pk": 218, + "fields": { + "name": "Can change todo note", + "content_type": 55, + "codename": "change_todonote" + } +}, +{ + "model": "auth.permission", + "pk": 219, + "fields": { + "name": "Can delete todo note", + "content_type": 55, + "codename": "delete_todonote" + } +}, +{ + "model": "auth.permission", + "pk": 220, + "fields": { + "name": "Can view todo note", + "content_type": 55, + "codename": "view_todonote" + } +}, +{ + "model": "auth.permission", + "pk": 221, + "fields": { + "name": "Can add crontab", + "content_type": 56, + "codename": "add_crontabschedule" + } +}, +{ + "model": "auth.permission", + "pk": 222, + "fields": { + "name": "Can change crontab", + "content_type": 56, + "codename": "change_crontabschedule" + } +}, +{ + "model": "auth.permission", + "pk": 223, + "fields": { + "name": "Can delete crontab", + "content_type": 56, + "codename": "delete_crontabschedule" + } +}, +{ + "model": "auth.permission", + "pk": 224, + "fields": { + "name": "Can view crontab", + "content_type": 56, + "codename": "view_crontabschedule" + } +}, +{ + "model": "auth.permission", + "pk": 225, + "fields": { + "name": "Can add interval", + "content_type": 57, + "codename": "add_intervalschedule" + } +}, +{ + "model": "auth.permission", + "pk": 226, + "fields": { + "name": "Can change interval", + "content_type": 57, + "codename": "change_intervalschedule" + } +}, +{ + "model": "auth.permission", + "pk": 227, + "fields": { + "name": "Can delete interval", + "content_type": 57, + "codename": "delete_intervalschedule" + } +}, +{ + "model": "auth.permission", + "pk": 228, + "fields": { + "name": "Can view interval", + "content_type": 57, + "codename": "view_intervalschedule" + } +}, +{ + "model": "auth.permission", + "pk": 229, + "fields": { + "name": "Can add periodic task", + "content_type": 58, + "codename": "add_periodictask" + } +}, +{ + "model": "auth.permission", + "pk": 230, + "fields": { + "name": "Can change periodic task", + "content_type": 58, + "codename": "change_periodictask" + } +}, +{ + "model": "auth.permission", + "pk": 231, + "fields": { + "name": "Can delete periodic task", + "content_type": 58, + "codename": "delete_periodictask" + } +}, +{ + "model": "auth.permission", + "pk": 232, + "fields": { + "name": "Can view periodic task", + "content_type": 58, + "codename": "view_periodictask" + } +}, +{ + "model": "auth.permission", + "pk": 233, + "fields": { + "name": "Can add periodic tasks", + "content_type": 59, + "codename": "add_periodictasks" + } +}, +{ + "model": "auth.permission", + "pk": 234, + "fields": { + "name": "Can change periodic tasks", + "content_type": 59, + "codename": "change_periodictasks" + } +}, +{ + "model": "auth.permission", + "pk": 235, + "fields": { + "name": "Can delete periodic tasks", + "content_type": 59, + "codename": "delete_periodictasks" + } +}, +{ + "model": "auth.permission", + "pk": 236, + "fields": { + "name": "Can view periodic tasks", + "content_type": 59, + "codename": "view_periodictasks" + } +}, +{ + "model": "auth.permission", + "pk": 237, + "fields": { + "name": "Can add solar event", + "content_type": 60, + "codename": "add_solarschedule" + } +}, +{ + "model": "auth.permission", + "pk": 238, + "fields": { + "name": "Can change solar event", + "content_type": 60, + "codename": "change_solarschedule" + } +}, +{ + "model": "auth.permission", + "pk": 239, + "fields": { + "name": "Can delete solar event", + "content_type": 60, + "codename": "delete_solarschedule" + } +}, +{ + "model": "auth.permission", + "pk": 240, + "fields": { + "name": "Can view solar event", + "content_type": 60, + "codename": "view_solarschedule" + } +}, +{ + "model": "auth.permission", + "pk": 241, + "fields": { + "name": "Can add clocked", + "content_type": 61, + "codename": "add_clockedschedule" + } +}, +{ + "model": "auth.permission", + "pk": 242, + "fields": { + "name": "Can change clocked", + "content_type": 61, + "codename": "change_clockedschedule" + } +}, +{ + "model": "auth.permission", + "pk": 243, + "fields": { + "name": "Can delete clocked", + "content_type": 61, + "codename": "delete_clockedschedule" + } +}, +{ + "model": "auth.permission", + "pk": 244, + "fields": { + "name": "Can view clocked", + "content_type": 61, + "codename": "view_clockedschedule" + } +}, +{ + "model": "auth.permission", + "pk": 245, + "fields": { + "name": "Modify Scan Report", + "content_type": 4, + "codename": "modify_scan_report" + } +}, +{ + "model": "auth.permission", + "pk": 246, + "fields": { + "name": "Modify Scan Configurations", + "content_type": 4, + "codename": "modify_scan_configurations" + } +}, +{ + "model": "auth.permission", + "pk": 247, + "fields": { + "name": "Modify Wordlists", + "content_type": 4, + "codename": "modify_wordlists" + } +}, +{ + "model": "auth.permission", + "pk": 248, + "fields": { + "name": "Modify Targets", + "content_type": 4, + "codename": "modify_targets" + } +}, +{ + "model": "auth.permission", + "pk": 249, + "fields": { + "name": "Initiate Scans Subscans", + "content_type": 4, + "codename": "initiate_scans_subscans" + } +}, +{ + "model": "auth.permission", + "pk": 250, + "fields": { + "name": "Modify Scan Results", + "content_type": 4, + "codename": "modify_scan_results" + } +}, +{ + "model": "auth.permission", + "pk": 251, + "fields": { + "name": "Modify Interesting Lookup", + "content_type": 4, + "codename": "modify_interesting_lookup" + } +}, +{ + "model": "auth.group", + "pk": 1, + "fields": { + "name": "penetration_tester", + "permissions": [] + } +}, +{ + "model": "auth.group", + "pk": 2, + "fields": { + "name": "auditor", + "permissions": [] + } +}, +{ + "model": "auth.user", + "pk": 1, + "fields": { + "password": "pbkdf2_sha256$260000$CqI5854efTfSzMuRGapUcC$czVBVTOeS0doZ2xNWBTDxIPbyAvkef+vB41oaDGNYDE=", + "last_login": "2024-09-03T21:23:15.084Z", + "is_superuser": true, + "username": "rengine", + "first_name": "", + "last_name": "", + "email": "rengine@example.com", + "is_staff": true, + "is_active": true, + "date_joined": "2024-09-03T21:22:52.707Z", + "groups": [], + "user_permissions": [] + } +}, +{ + "model": "auth.user", + "pk": 2, + "fields": { + "password": "pbkdf2_sha256$260000$CZZAPOMIBDEuFR6GWP44YY$zmorFyJzWeyX0+jnFnItiRwjsaPQ9S3g8KgqDNaluc0=", + "last_login": null, + "is_superuser": false, + "username": "pentest", + "first_name": "", + "last_name": "", + "email": "", + "is_staff": false, + "is_active": true, + "date_joined": "2024-09-04T00:31:18.262Z", + "groups": [ + 1 + ], + "user_permissions": [ + 249, + 251, + 246, + 245, + 250, + 248, + 247 + ] + } +}, +{ + "model": "auth.user", + "pk": 3, + "fields": { + "password": "pbkdf2_sha256$260000$8XXTKhzOwW3sDLL7kQmgjb$a1nISv6K/uW+KBw4yzQCkRJb6ydo7Nk/Y99RAqUGXQo=", + "last_login": null, + "is_superuser": false, + "username": "audit", + "first_name": "", + "last_name": "", + "email": "", + "is_staff": false, + "is_active": true, + "date_joined": "2024-09-04T00:31:43.172Z", + "groups": [ + 2 + ], + "user_permissions": [ + 251, + 245, + 250 + ] + } +} +] diff --git a/web/fixtures/django_celery_beat.json b/web/fixtures/django_celery_beat.json new file mode 100644 index 00000000..5228e755 --- /dev/null +++ b/web/fixtures/django_celery_beat.json @@ -0,0 +1,49 @@ +[ +{ + "model": "django_celery_beat.crontabschedule", + "pk": 1, + "fields": { + "minute": "0", + "hour": "4", + "day_of_month": "*", + "month_of_year": "*", + "day_of_week": "*", + "timezone": "UTC" + } +}, +{ + "model": "django_celery_beat.periodictasks", + "pk": 1, + "fields": { + "last_update": "2024-09-03T22:37:20.601Z" + } +}, +{ + "model": "django_celery_beat.periodictask", + "pk": 1, + "fields": { + "name": "celery.backend_cleanup", + "task": "celery.backend_cleanup", + "interval": null, + "crontab": 1, + "solar": null, + "clocked": null, + "args": "[]", + "kwargs": "{}", + "queue": null, + "exchange": null, + "routing_key": null, + "headers": "{}", + "priority": null, + "expires": null, + "expire_seconds": 43200, + "one_off": false, + "start_time": null, + "enabled": true, + "last_run_at": null, + "total_run_count": 0, + "date_changed": "2024-09-03T22:37:20.601Z", + "description": "" + } +} +] diff --git a/web/manage.py b/web/manage.py index 0c77627f..5a180a8f 100644 --- a/web/manage.py +++ b/web/manage.py @@ -11,11 +11,20 @@ def main(): os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'reNgine.settings') - # show rengine artwork - f = open('art/reNgine.txt', 'r') - file_contents = f.read() - print (file_contents) - f.close() + + # List of commands that should not display the rengine artwork + skip_art_commands = ['test', 'dumpdata'] + + # Do not show rengine artwork if we are running tests + if all(cmd not in sys.argv for cmd in skip_art_commands): + # show rengine artwork + try: + with open('art/reNgine.txt', 'r', encoding='utf-8') as f: + file_contents = f.read() + print(file_contents) + except FileNotFoundError: + print("Failed to display reNgine artwork.") + try: from django.core.management import execute_from_command_line except ImportError as exc: diff --git a/web/reNgine/common_func.py b/web/reNgine/common_func.py index 7a59e13c..75e508ff 100644 --- a/web/reNgine/common_func.py +++ b/web/reNgine/common_func.py @@ -80,7 +80,7 @@ def get_lookup_keywords(): list: Lookup keywords. """ lookup_model = InterestingLookupModel.objects.first() - lookup_obj = InterestingLookupModel.objects.filter(custom_type=True).order_by('-id').first() + lookup_obj = InterestingLookupModel.objects.filter().order_by('-id').first() custom_lookup_keywords = [] default_lookup_keywords = [] if lookup_model: @@ -371,7 +371,7 @@ def get_interesting_endpoints(scan_history=None, target=None): """ lookup_keywords = get_lookup_keywords() - lookup_obj = InterestingLookupModel.objects.filter(custom_type=True).order_by('-id').first() + lookup_obj = InterestingLookupModel.objects.filter().order_by('-id').first() if not lookup_obj: return EndPoint.objects.none() url_lookup = lookup_obj.url_lookup @@ -1167,3 +1167,76 @@ def create_scan_object(host_id, engine_id, initiated_by_id=None): domain.start_scan_date = current_scan_time domain.save() return scan.id + +def get_data_from_post_request(request, field): + """ + Get data from a POST request. + + Args: + request (HttpRequest): The request object. + field (str): The field to get data from. + Returns: + list: The data from the specified field. + """ + if hasattr(request.data, 'getlist'): + return request.data.getlist(field) + else: + return request.data.get(field, []) + +def safe_int_cast(value, default=None): + """ + Convert a value to an integer if possible, otherwise return a default value. + + Args: + value: The value or the array of values to convert to an integer. + default: The default value to return if conversion fails. + + Returns: + int or default: The integer value if conversion is successful, otherwise the default value. + """ + if isinstance(value, list): + return [safe_int_cast(item) for item in value] + try: + return int(value) + except (ValueError, TypeError): + return default + +def get_ip_info(ip_address): + """ + get_ip_info retrieves information about a given IP address, determining whether it is an IPv4 or IPv6 address. It returns an appropriate IP address object if the input is valid, or None if the input is not a valid IP address. + + Args: + ip_address (str): The IP address to validate and retrieve information for. + + Returns: + IPv4Address or IPv6Address or None: An IP address object if the input is valid, otherwise None. + """ + is_ipv4 = bool(validators.ipv4(ip_address)) + is_ipv6 = bool(validators.ipv6(ip_address)) + ip_data = None + if is_ipv4: + ip_data = ipaddress.IPv4Address(ip_address) + elif is_ipv6: + ip_data = ipaddress.IPv6Address(ip_address) + else: + return None + return ip_data + +def get_ips_from_cidr_range(target): + """ + get_ips_from_cidr_range generates a list of IP addresses from a given CIDR range. It returns the list of valid IPv4 addresses or logs an error if the provided CIDR range is invalid. + + Args: + target (str): The CIDR range from which to generate IP addresses. + + Returns: + list of str: A list of IP addresses as strings if the CIDR range is valid; otherwise, an empty list is returned. + + Raises: + ValueError: If the target is not a valid CIDR range, an error is logged. + """ + try: + return [str(ip) for ip in ipaddress.IPv4Network(target)] + except ValueError: + logger.error(f'{target} is not a valid CIDR range. Skipping.') + return [] \ No newline at end of file diff --git a/web/reNgine/tasks.py b/web/reNgine/tasks.py index 85280f80..50ba36a7 100644 --- a/web/reNgine/tasks.py +++ b/web/reNgine/tasks.py @@ -87,6 +87,10 @@ def initiate_scan( try: # Get scan engine engine_id = engine_id or scan.scan_type.id # scan history engine_id + logger.info(f'Engine ID: {engine_id}') + engines = EngineType.objects.all() + for engine in engines: + logger.info(f'Engine: {engine.id} - {engine.engine_name}') engine = EngineType.objects.get(pk=engine_id) # Get YAML config diff --git a/web/reNgine/validators.py b/web/reNgine/validators.py index 482ef808..be2b6dcc 100644 --- a/web/reNgine/validators.py +++ b/web/reNgine/validators.py @@ -23,3 +23,8 @@ def validate_short_name(value): raise ValidationError(_('%(value)s is not a valid short name,' + ' can only contain - and _'), params={'value': value}) + +def validate_ip(ip): + """Validate if the given IP address is valid.""" + if not (validators.ipv4(ip) or validators.ipv6(ip)): + raise ValidationError(_('Invalid IP address: %(ip)s'), params={'ip': ip}) diff --git a/web/recon_note/fixtures/recon_note.json b/web/recon_note/fixtures/recon_note.json new file mode 100644 index 00000000..0e9be820 --- /dev/null +++ b/web/recon_note/fixtures/recon_note.json @@ -0,0 +1,15 @@ +[ +{ + "model": "recon_note.todonote", + "pk": 1, + "fields": { + "title": "Check for vuln", + "description": "There's a lot", + "scan_history": 1, + "subdomain": 24, + "is_done": false, + "is_important": false, + "project": 1 + } +} +] diff --git a/web/recon_note/tests.py b/web/recon_note/tests.py deleted file mode 100644 index 7ce503c2..00000000 --- a/web/recon_note/tests.py +++ /dev/null @@ -1,3 +0,0 @@ -from django.test import TestCase - -# Create your tests here. diff --git a/web/recon_note/tests/__init__.py b/web/recon_note/tests/__init__.py new file mode 100644 index 00000000..e6c6fbbc --- /dev/null +++ b/web/recon_note/tests/__init__.py @@ -0,0 +1,2 @@ +from utils.test_base import * +from .test_recon_note import * diff --git a/web/recon_note/tests/test_recon_note.py b/web/recon_note/tests/test_recon_note.py new file mode 100644 index 00000000..4684388a --- /dev/null +++ b/web/recon_note/tests/test_recon_note.py @@ -0,0 +1,81 @@ +""" +TestScanReconNoteViews contains unit tests for the scan recon note functionality within the application. +It verifies the behavior of the API endpoints related to adding, listing, and deleting recon notes. + +Methods: + setUp: Initializes the test environment by creating a base project and a test TodoNote. + test_add_recon_note_success: Tests the successful addition of a recon note. + test_add_recon_note_missing_data: Tests the addition of a recon note with missing required data. + test_list_recon_notes: Tests the retrieval of all recon notes associated with a project. + test_delete_recon_note_success: Tests the successful deletion of a recon note. + test_delete_recon_note_not_found: Tests the deletion of a recon note that does not exist. +""" + + +from django.urls import reverse +from rest_framework import status +from utils.test_base import BaseTestCase +from recon_note.models import TodoNote + +__all__ = [ + 'TestScanReconNoteViews', +] + +class TestScanReconNoteViews(BaseTestCase): + """Test case for the Scan Recon Note views.""" + + def setUp(self): + """Set up the test environment.""" + super().setUp() + self.data_generator.create_project_base() # Create a base project + self.todo_note = self.data_generator.create_todo_note() # Create a test TodoNote + + def test_add_recon_note_success(self): + """Test adding a recon note successfully.""" + api_url = reverse("api:addReconNote") + data = { + "subdomain_id": self.data_generator.subdomain.id, + "scan_history_id": self.data_generator.scan_history.id, + "title": "New Recon Note", + "description": "This is a new recon note", + "project": self.data_generator.project.slug, + } + response = self.client.post(api_url, data) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.json()["status"]) + + def test_add_recon_note_missing_data(self): + """Test adding a recon note with missing data.""" + api_url = reverse("api:addReconNote") + data = { + "title": "Incomplete Note", + "slug": self.data_generator.project.slug, + } + response = self.client.post(api_url, data) + self.assertIn(response.status_code, [status.HTTP_400_BAD_REQUEST]) + self.assertFalse(response.json()["status"]) + self.assertIn("error", response.json()) + self.assertEqual(response.json()["error"], "Subdomain ID is required.") + + def test_list_recon_notes(self): + """Test listing all recon notes.""" + api_url = reverse("list_note", kwargs={'slug': self.data_generator.project.slug}) + response = self.client.get(api_url, {"project": self.data_generator.project.slug}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + + def test_delete_recon_note_success(self): + """Test deleting a recon note successfully.""" + api_url = reverse("delete_note") + data = {"id": self.todo_note.id} + response = self.client.post(api_url, data) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.json()["status"]) + self.assertFalse(TodoNote.objects.filter(id=self.todo_note.id).exists()) + + def test_delete_recon_note_not_found(self): + """Test deleting a recon note that does not exist.""" + api_url = reverse("delete_note") + data = {"id": 99999} # Non-existent ID + response = self.client.post(api_url, data) + self.assertIn(response.status_code, [status.HTTP_404_NOT_FOUND]) + self.assertFalse(response.json()["status"]) \ No newline at end of file diff --git a/web/recon_note/views.py b/web/recon_note/views.py index 562ae933..53bf4f75 100644 --- a/web/recon_note/views.py +++ b/web/recon_note/views.py @@ -3,13 +3,13 @@ from django.http import JsonResponse from django.shortcuts import render -from recon_note.models import * -from startScan.models import * - +from recon_note.models import TodoNote def list_note(request, slug): - context = {} - context['recon_note_active'] = 'active' + if not slug: + return JsonResponse({'status': False, 'error': 'Slug is required.'}, status=400) + + context = {'recon_note_active': 'active'} return render(request, 'note/index.html', context) def flip_todo_status(request): @@ -17,7 +17,18 @@ def flip_todo_status(request): body_unicode = request.body.decode('utf-8') body = json.loads(body_unicode) - note = TodoNote.objects.get(id=body['id']) + # Check if the ID is present in the request body + note_id = body.get('id') + if note_id is None: + return JsonResponse({'status': False, 'error': 'ID is required.'}, status=400) + + # Check if the note exists before attempting to update its status + try: + note = TodoNote.objects.get(id=note_id) + except TodoNote.DoesNotExist: + return JsonResponse({'status': False, 'error': 'Note not found.'}, status=404) + + # Toggle the done status of the note note.is_done = not note.is_done note.save() @@ -28,7 +39,18 @@ def flip_important_status(request): body_unicode = request.body.decode('utf-8') body = json.loads(body_unicode) - note = TodoNote.objects.get(id=body['id']) + # Check if the ID is present in the request body + note_id = body.get('id') + if note_id is None: + return JsonResponse({'status': False, 'error': 'ID is required.'}, status=400) + + # Check if the note exists before attempting to update its status + try: + note = TodoNote.objects.get(id=note_id) + except TodoNote.DoesNotExist: + return JsonResponse({'status': False, 'error': 'Note not found.'}, status=404) + + # Toggle the important status of the note note.is_important = not note.is_important note.save() @@ -36,9 +58,15 @@ def flip_important_status(request): def delete_note(request): if request.method == "POST": - body_unicode = request.body.decode('utf-8') - body = json.loads(body_unicode) + # Check if the ID is present in the request body + note_id = request.POST.get('id') + if note_id is None: + return JsonResponse({'status': False, 'error': 'ID is required.'}, status=400) + + # Check if the note exists before attempting to delete it + if not TodoNote.objects.filter(id=note_id).exists(): + return JsonResponse({'status': False, 'error': 'Note not found.'}, status=404) - TodoNote.objects.filter(id=body['id']).delete() + TodoNote.objects.filter(id=note_id).delete() return JsonResponse({'status': True}) diff --git a/web/scanEngine/fixtures/scanEngine.json b/web/scanEngine/fixtures/scanEngine.json new file mode 100644 index 00000000..daa47911 --- /dev/null +++ b/web/scanEngine/fixtures/scanEngine.json @@ -0,0 +1,436 @@ +[ +{ + "model": "scanEngine.enginetype", + "pk": 1, + "fields": { + "engine_name": "Full Scan", + "yaml_configuration": "subdomain_discovery: {\r\n 'uses_tools': ['subfinder', 'ctfr', 'sublist3r', 'tlsx', 'oneforall', 'netlas'],\r\n 'enable_http_crawl': true,\r\n 'threads': 30,\r\n 'timeout': 5,\r\n}\r\nhttp_crawl: {}\r\nport_scan: {\r\n 'enable_http_crawl': true,\r\n 'timeout': 5,\r\n # 'exclude_ports': [],\r\n # 'exclude_subdomains': [],\r\n 'ports': ['top-100'],\r\n 'rate_limit': 150,\r\n 'threads': 30,\r\n 'passive': false,\r\n # 'use_naabu_config': false,\r\n # 'enable_nmap': true,\r\n # 'nmap_cmd': '',\r\n # 'nmap_script': '',\r\n # 'nmap_script_args': ''\r\n}\r\nosint: {\r\n 'discover': [\r\n 'emails',\r\n 'metainfo',\r\n 'employees'\r\n ],\r\n 'dorks': [\r\n 'login_pages',\r\n 'admin_panels',\r\n 'dashboard_pages',\r\n 'stackoverflow',\r\n 'social_media',\r\n 'project_management',\r\n 'code_sharing',\r\n 'config_files',\r\n 'jenkins',\r\n 'wordpress_files',\r\n 'php_error',\r\n 'exposed_documents',\r\n 'db_files',\r\n 'git_exposed'\r\n ],\r\n 'intensity': 'normal',\r\n 'documents_limit': 50\r\n}\r\ndir_file_fuzz: {\r\n 'auto_calibration': true,\r\n 'enable_http_crawl': true,\r\n 'rate_limit': 150,\r\n 'extensions': ['html', 'php','git','yaml','conf','cnf','config','gz','env','log','db','mysql','bak','asp','aspx','txt','conf','sql','json','yml','pdf'],\r\n 'follow_redirect': false,\r\n 'max_time': 0,\r\n 'match_http_status': [200, 204],\r\n 'recursive_level': 2,\r\n 'stop_on_error': false,\r\n 'timeout': 5,\r\n 'threads': 30,\r\n 'wordlist_name': 'dicc'\r\n}\r\nfetch_url: {\r\n 'uses_tools': ['gospider', 'hakrawler', 'waybackurls', 'katana', 'gau'],\r\n 'remove_duplicate_endpoints': true,\r\n 'duplicate_fields': ['content_length', 'page_title'],\r\n 'follow_redirect': false,\r\n 'enable_http_crawl': true,\r\n 'gf_patterns': ['debug_logic', 'idor', 'interestingEXT', 'interestingparams', 'interestingsubs', 'lfi', 'rce', 'redirect', 'sqli', 'ssrf', 'ssti', 'xss'],\r\n 'ignore_file_extensions': ['png', 'jpg', 'jpeg', 'gif', 'mp4', 'mpeg', 'mp3'],\r\n 'threads': 30\r\n}\r\nvulnerability_scan: {\r\n 'run_nuclei': true,\r\n 'run_dalfox': true,\r\n 'run_crlfuzz': true,\r\n 'enable_http_crawl': true,\r\n 'concurrency': 50,\r\n 'intensity': 'normal',\r\n 'rate_limit': 150,\r\n 'retries': 1,\r\n 'timeout': 5,\r\n 'fetch_gpt_report': true,\r\n 'nuclei': {\r\n 'use_nuclei_config': false,\r\n 'severities': ['unknown', 'info', 'low', 'medium', 'high', 'critical']\r\n }\r\n}\r\nwaf_detection: {\r\n\r\n}\r\nscreenshot: {\r\n 'enable_http_crawl': true,\r\n 'intensity': 'normal',\r\n 'timeout': 10,\r\n 'threads': 40\r\n}\r\n\r\n# custom_header: \"Cookie: Test\"", + "default_engine": true + } +}, +{ + "model": "scanEngine.enginetype", + "pk": 2, + "fields": { + "engine_name": "Subdomain Scan", + "yaml_configuration": "subdomain_discovery: {\r\n 'uses_tools': [\r\n 'subfinder', \r\n 'ctfr', \r\n 'sublist3r', \r\n 'tlsx', \r\n 'oneforall', \r\n 'netlas'\r\n ],\r\n 'enable_http_crawl': true,\r\n 'threads': 30,\r\n 'timeout': 5,\r\n}\r\nhttp_crawl: {}", + "default_engine": true + } +}, +{ + "model": "scanEngine.enginetype", + "pk": 3, + "fields": { + "engine_name": "OSINT", + "yaml_configuration": "osint: {\r\n 'discover': [\r\n 'emails',\r\n 'metainfo',\r\n 'employees'\r\n ],\r\n 'dorks': [\r\n 'login_pages',\r\n 'admin_panels',\r\n 'dashboard_pages',\r\n 'stackoverflow',\r\n 'social_media',\r\n 'project_management',\r\n 'code_sharing',\r\n 'config_files',\r\n 'jenkins',\r\n 'wordpress_files',\r\n 'php_error',\r\n 'exposed_documents',\r\n 'db_files',\r\n 'git_exposed'\r\n ],\r\n 'custom_dorks': [\r\n {'lookup_site': '_target_', 'lookup_extensions': 'php'}\r\n ],\r\n 'intensity': 'normal',\r\n 'documents_limit': 50\r\n}", + "default_engine": true + } +}, +{ + "model": "scanEngine.enginetype", + "pk": 4, + "fields": { + "engine_name": "Vulnerability Scan", + "yaml_configuration": "subdomain_discovery: {\r\n 'uses_tools': ['subfinder', 'ctfr', 'sublist3r', 'tlsx', 'oneforall', 'netlas'],\r\n 'enable_http_crawl': true,\r\n 'threads': 30,\r\n 'timeout': 5,\r\n}\r\nhttp_crawl: {}\r\nosint: {\r\n 'discover': [\r\n 'emails',\r\n 'metainfo',\r\n 'employees'\r\n ],\r\n 'dorks': [\r\n 'login_pages',\r\n 'admin_panels',\r\n 'dashboard_pages',\r\n 'stackoverflow',\r\n 'social_media',\r\n 'project_management',\r\n 'code_sharing',\r\n 'config_files',\r\n 'jenkins',\r\n 'wordpress_files',\r\n 'php_error',\r\n 'exposed_documents',\r\n 'db_files',\r\n 'git_exposed'\r\n ],\r\n 'intensity': 'normal',\r\n 'documents_limit': 50\r\n}\r\nvulnerability_scan: {\r\n 'run_nuclei': true,\r\n 'run_dalfox': true,\r\n 'run_crlfuzz': true,\r\n 'enable_http_crawl': true,\r\n 'concurrency': 50,\r\n 'intensity': 'normal',\r\n 'rate_limit': 150,\r\n 'retries': 1,\r\n 'timeout': 5,\r\n 'fetch_gpt_report': true,\r\n 'nuclei': {\r\n 'use_nuclei_config': false,\r\n 'severities': ['unknown', 'info', 'low', 'medium', 'high', 'critical']\r\n }\r\n}", + "default_engine": true + } +}, +{ + "model": "scanEngine.enginetype", + "pk": 5, + "fields": { + "engine_name": "Port Scan", + "yaml_configuration": "http_crawl: {}\r\nport_scan: {\r\n 'enable_http_crawl': true,\r\n 'timeout': 5,\r\n # 'exclude_ports': [],\r\n # 'exclude_subdomains': [],\r\n 'ports': ['top-100'],\r\n 'rate_limit': 150,\r\n 'threads': 30,\r\n 'passive': false,\r\n # 'use_naabu_config': false,\r\n # 'enable_nmap': true,\r\n # 'nmap_cmd': '',\r\n # 'nmap_script': '',\r\n # 'nmap_script_args': ''\r\n}", + "default_engine": true + } +}, +{ + "model": "scanEngine.enginetype", + "pk": 6, + "fields": { + "engine_name": "reNgine Recommended", + "yaml_configuration": "subdomain_discovery: {\r\n 'uses_tools': ['subfinder', 'ctfr', 'sublist3r', 'tlsx', 'oneforall', 'netlas'],\r\n 'enable_http_crawl': true,\r\n 'threads': 30,\r\n 'timeout': 5,\r\n}\r\nhttp_crawl: {}\r\nosint: {\r\n 'discover': [\r\n 'emails',\r\n 'metainfo'\r\n ],\r\n 'dorks': [\r\n 'login_pages',\r\n 'admin_panels',\r\n 'dashboard_pages',\r\n 'config_files',\r\n 'exposed_documents',\r\n ],\r\n 'intensity': 'normal',\r\n 'documents_limit': 50\r\n}\r\nvulnerability_scan: {\r\n 'run_nuclei': true,\r\n 'run_dalfox': true,\r\n 'run_crlfuzz': true,\r\n 'enable_http_crawl': false,\r\n 'concurrency': 50,\r\n 'intensity': 'normal',\r\n 'rate_limit': 150,\r\n 'retries': 1,\r\n 'timeout': 5,\r\n 'fetch_gpt_report': true,\r\n 'nuclei': {\r\n 'use_nuclei_config': false,\r\n 'severities': ['low', 'medium', 'high', 'critical']\r\n }\r\n}", + "default_engine": true + } +}, +{ + "model": "scanEngine.enginetype", + "pk": 7, + "fields": { + "engine_name": "Full (perso)", + "yaml_configuration": "# Global vars for all tools\r\n#\r\n# Custom header - FFUF, Nuclei, Dalfox, CRL Fuzz, HTTPx, Fetch URL (Hakrawler, Katana, Gospider)\r\n# custom_header: {\r\n# 'Cookie':'Test',\r\n# 'User-Agent': 'Mozilla/5.0',\r\n# 'Custom-Header': 'My custom header'\r\n# }\r\n# 'user_agent': '' # Dalfox only\r\n# 'enable_http_crawl': true # All tools\r\n# 'timeout': 10 # Subdomain discovery, Screenshot, Port scan, FFUF, Nuclei \r\n# 'threads': 30 # All tools\r\n# 'rate_limit': 150 # Port scan, FFUF, Nuclei\r\n# 'intensity': 'normal' # Screenshot (grab only the root endpoints of each subdomain), Nuclei (reduce number of endpoints to scan), OSINT (not implemented yet)\r\n# 'retries': 1 # Nuclei\r\n\r\nsubdomain_discovery: {\r\n 'uses_tools': ['subfinder', 'ctfr', 'sublist3r', 'tlsx', 'oneforall', 'netlas'], # amass-passive, amass-active, All\r\n 'enable_http_crawl': true,\r\n 'threads': 30,\r\n 'timeout': 5,\r\n # 'use_subfinder_config': false,\r\n # 'use_amass_config': false,\r\n # 'amass_wordlist': 'deepmagic.com-prefixes-top50000'\r\n}\r\nhttp_crawl: {\r\n # 'custom_header': {\r\n # 'Cookie':'Test',\r\n # 'User-Agent': 'Mozilla/5.0'\r\n # },\r\n # 'threads': 30,\r\n # 'follow_redirect': false\r\n}\r\nport_scan: {\r\n 'enable_http_crawl': true,\r\n 'timeout': 5,\r\n # 'exclude_ports': [],\r\n # 'exclude_subdomains': [],\r\n 'ports': ['top-100'],\r\n 'rate_limit': 150,\r\n 'threads': 30,\r\n 'passive': false,\r\n # 'use_naabu_config': false,\r\n # 'enable_nmap': true,\r\n # 'nmap_cmd': '',\r\n # 'nmap_script': '',\r\n # 'nmap_script_args': ''\r\n}\r\nosint: {\r\n 'discover': [\r\n 'emails',\r\n 'metainfo',\r\n 'employees'\r\n ],\r\n 'dorks': [\r\n 'login_pages',\r\n 'admin_panels',\r\n 'dashboard_pages',\r\n 'stackoverflow',\r\n 'social_media',\r\n 'project_management',\r\n 'code_sharing',\r\n 'config_files',\r\n 'jenkins',\r\n 'wordpress_files',\r\n 'php_error',\r\n 'exposed_documents',\r\n 'db_files',\r\n 'git_exposed'\r\n ],\r\n # 'custom_dorks': [],\r\n 'intensity': 'normal',\r\n 'documents_limit': 50\r\n}\r\ndir_file_fuzz: {\r\n # 'custom_header': {\r\n # 'Cookie':'Test',\r\n # 'User-Agent': 'Mozilla/5.0',\r\n # 'Custom-Header': 'My custom header'\r\n # },\r\n 'auto_calibration': true,\r\n 'enable_http_crawl': true,\r\n 'rate_limit': 150,\r\n 'extensions': [],\r\n 'follow_redirect': false,\r\n 'max_time': 0,\r\n 'match_http_status': [200, 204],\r\n 'recursive_level': 0,\r\n 'stop_on_error': false,\r\n 'timeout': 5,\r\n 'threads': 30,\r\n 'wordlist_name': 'dicc',\r\n}\r\nfetch_url: {\r\n # 'custom_header': {\r\n # 'Cookie':'Test',\r\n # 'User-Agent': 'Mozilla/5.0',\r\n # 'Custom-Header': 'My custom header'\r\n # },\r\n 'uses_tools': ['gospider', 'hakrawler', 'waybackurls', 'katana', 'gau'],\r\n 'remove_duplicate_endpoints': true,\r\n 'duplicate_fields': ['content_length', 'page_title'],\r\n 'follow_redirect': false,\r\n 'enable_http_crawl': true,\r\n 'gf_patterns': ['debug_logic', 'idor', 'interestingEXT', 'interestingparams', 'interestingsubs', 'lfi', 'rce', 'redirect', 'sqli', 'ssrf', 'ssti', 'xss'],\r\n 'ignore_file_extensions': ['png', 'jpg', 'jpeg', 'gif', 'mp4', 'mpeg', 'mp3'],\r\n 'threads': 30,\r\n # 'exclude_subdomains': false\r\n}\r\nvulnerability_scan: {\r\n # 'custom_header': {\r\n # 'Cookie':'Test',\r\n # 'User-Agent': 'Mozilla/5.0',\r\n # 'Custom-Header': 'My custom header'\r\n # },\r\n 'run_nuclei': true,\r\n 'run_dalfox': false,\r\n 'run_crlfuzz': false,\r\n 'run_s3scanner': false,\r\n 'enable_http_crawl': true,\r\n 'concurrency': 50,\r\n 'intensity': 'normal',\r\n 'rate_limit': 150,\r\n 'retries': 1,\r\n 'timeout': 5,\r\n 'fetch_gpt_report': true,\r\n 'nuclei': {\r\n 'use_nuclei_config': false,\r\n 'severities': ['unknown', 'info', 'low', 'medium', 'high', 'critical'],\r\n # 'tags': [], # Nuclei tags (https://github.com/projectdiscovery/nuclei-templates)\r\n # 'templates': [], # Nuclei templates (https://github.com/projectdiscovery/nuclei-templates)\r\n # 'custom_templates': [] # Nuclei custom templates uploaded in reNgine\r\n }\r\n}\r\nwaf_detection: {\r\n 'enable_http_crawl': true\r\n}\r\nscreenshot: {\r\n 'enable_http_crawl': true,\r\n 'intensity': 'normal',\r\n 'timeout': 10,\r\n 'threads': 40\r\n}", + "default_engine": false + } +}, +{ + "model": "scanEngine.interestinglookupmodel", + "pk": 1, + "fields": { + "keywords": "admin, ftp, cpanel, dashboard", + "custom_type": false, + "title_lookup": true, + "url_lookup": true, + "condition_200_http_lookup": false + } +}, +{ + "model": "scanEngine.installedexternaltool", + "pk": 1, + "fields": { + "logo_url": "https://raw.githubusercontent.com/projectdiscovery/subfinder/master/static/subfinder-logo.png", + "name": "subfinder", + "description": "Subfinder is a subdomain discovery tool that discovers valid subdomains for websites by using passive online sources.", + "github_url": "https://github.com/projectdiscovery/subfinder", + "license_url": "https://github.com/projectdiscovery/subfinder/blob/master/LICENSE.md", + "version_lookup_command": "subfinder -version", + "update_command": "go install -v github.com/projectdiscovery/subfinder/v2/cmd/subfinder@latest", + "install_command": "go install -v github.com/projectdiscovery/subfinder/v2/cmd/subfinder@latest", + "version_match_regex": "v(\\d+\\.)?(\\d+\\.)?(\\*|\\d+)", + "is_default": true, + "is_subdomain_gathering": true, + "is_github_cloned": false, + "github_clone_path": null, + "subdomain_gathering_command": null + } +}, +{ + "model": "scanEngine.installedexternaltool", + "pk": 2, + "fields": { + "logo_url": "https://raw.githubusercontent.com/projectdiscovery/nuclei/master/static/nuclei-logo.png", + "name": "Nuclei", + "description": "Nuclei is used to send requests across targets based on a template leading to zero false positives and providing fast scanning on large number of hosts. Nuclei offers scanning for a variety of protocols including TCP, DNS, HTTP, File, etc. With powerful and flexible templating, all kinds of security checks can be modelled with Nuclei.", + "github_url": "https://github.com/projectdiscovery/nuclei", + "license_url": "https://github.com/projectdiscovery/nuclei/blob/master/LICENSE.md", + "version_lookup_command": "nuclei -version", + "update_command": "nuclei -update", + "install_command": "go install -v github.com/projectdiscovery/nuclei/v2/cmd/nuclei@latest", + "version_match_regex": "v(\\d+\\.)?(\\d+\\.)?(\\*|\\d+)", + "is_default": true, + "is_subdomain_gathering": false, + "is_github_cloned": false, + "github_clone_path": null, + "subdomain_gathering_command": null + } +}, +{ + "model": "scanEngine.installedexternaltool", + "pk": 3, + "fields": { + "logo_url": "https://raw.githubusercontent.com/projectdiscovery/httpx/master/static/httpx-logo.png", + "name": "httpx", + "description": "httpx is a fast and multi-purpose HTTP toolkit allow to run multiple probers using retryablehttp library, it is designed to maintain the result reliability with increased threads.", + "github_url": "https://github.com/projectdiscovery/httpx", + "license_url": "https://github.com/projectdiscovery/httpx/blob/master/LICENSE.md", + "version_lookup_command": "/go/bin/httpx -version", + "update_command": "go install -v github.com/projectdiscovery/httpx/cmd/httpx@latest", + "install_command": "go install -v github.com/projectdiscovery/httpx/cmd/httpx@latest", + "version_match_regex": "v(\\d+\\.)?(\\d+\\.)?(\\*|\\d+)", + "is_default": true, + "is_subdomain_gathering": false, + "is_github_cloned": false, + "github_clone_path": null, + "subdomain_gathering_command": null + } +}, +{ + "model": "scanEngine.installedexternaltool", + "pk": 4, + "fields": { + "logo_url": "https://raw.githubusercontent.com/projectdiscovery/naabu/master/static/naabu-logo.png", + "name": "naabu", + "description": "Naabu is a port scanning tool written in Go that allows you to enumerate valid ports for hosts in a fast and reliable manner. It is a really simple tool that does fast SYN/CONNECT scans on the host/list of hosts and lists all ports that return a reply.", + "github_url": "https://github.com/projectdiscovery/naabu", + "license_url": "https://github.com/projectdiscovery/naabu/blob/main/LICENSE.md", + "version_lookup_command": "naabu -version", + "update_command": "go install -v github.com/projectdiscovery/naabu/v2/cmd/naabu@latest", + "install_command": "go install -v github.com/projectdiscovery/naabu/v2/cmd/naabu@latest", + "version_match_regex": "(\\b\\d+\\.\\d+\\.\\d+\\b)", + "is_default": true, + "is_subdomain_gathering": false, + "is_github_cloned": false, + "github_clone_path": null, + "subdomain_gathering_command": null + } +}, +{ + "model": "scanEngine.installedexternaltool", + "pk": 5, + "fields": { + "logo_url": "https://raw.githubusercontent.com/osmedeus/assets/main/logo-transparent.png", + "name": "gospider", + "description": "Fast web spider written in Go", + "github_url": "https://github.com/jaeles-project/gospider", + "license_url": "https://github.com/jaeles-project/gospider/blob/master/LICENSE", + "version_lookup_command": "gospider --version", + "update_command": "go install -v github.com/jaeles-project/gospider@latest", + "install_command": "go install -v github.com/jaeles-project/gospider@latest", + "version_match_regex": "v(\\d+\\.)?(\\d+\\.)?(\\*|\\d+)", + "is_default": true, + "is_subdomain_gathering": false, + "is_github_cloned": false, + "github_clone_path": null, + "subdomain_gathering_command": null + } +}, +{ + "model": "scanEngine.installedexternaltool", + "pk": 6, + "fields": { + "logo_url": "https://raw.githubusercontent.com/owasp-amass/amass/master/images/amass_logo.png", + "name": "amass", + "description": "The OWASP Amass Project performs network mapping of attack surfaces and external asset discovery using open source information gathering and active reconnaissance techniques.", + "github_url": "https://github.com/owasp-amass/amass", + "license_url": "https://github.com/owasp-amass/amass/blob/master/LICENSE", + "version_lookup_command": "amass -version", + "update_command": "go install -v github.com/owasp-amass/amass/v4/...@latest", + "install_command": "go install -v github.com/owasp-amass/amass/v4/...@latest", + "version_match_regex": "v(\\d+\\.)?(\\d+\\.)?(\\*|\\d+)", + "is_default": true, + "is_subdomain_gathering": true, + "is_github_cloned": false, + "github_clone_path": null, + "subdomain_gathering_command": null + } +}, +{ + "model": "scanEngine.installedexternaltool", + "pk": 7, + "fields": { + "logo_url": "https://github.com/ffuf/ffuf/raw/master/_img/ffuf_run_logo_600.png", + "name": "ffuf", + "description": "A fast web fuzzer written in Go.", + "github_url": "https://github.com/ffuf/ffuf", + "license_url": "https://github.com/ffuf/ffuf/blob/master/LICENSE", + "version_lookup_command": "ffuf -V", + "update_command": "go install github.com/ffuf/ffuf/v2@latest", + "install_command": "", + "version_match_regex": "(\\d+\\.)?(\\d+\\.)?(\\*|\\d+)", + "is_default": true, + "is_subdomain_gathering": false, + "is_github_cloned": false, + "github_clone_path": null, + "subdomain_gathering_command": null + } +}, +{ + "model": "scanEngine.installedexternaltool", + "pk": 8, + "fields": { + "logo_url": null, + "name": "sublist3r", + "description": "Sublist3r is a python tool designed to enumerate subdomains of websites using OSINT. It helps penetration testers and bug hunters collect and gather subdomains for the domain they are targeting. Sublist3r enumerates subdomains using many search engines such as Google, Yahoo, Bing, Baidu and Ask. Sublist3r also enumerates subdomains using Netcraft, Virustotal, ThreatCrowd, DNSdumpster, and ReverseDNS.", + "github_url": "https://github.com/aboul3la/Sublist3r", + "license_url": "https://github.com/aboul3la/Sublist3r/blob/master/LICENSE", + "version_lookup_command": null, + "update_command": "git pull", + "install_command": "git clone https://github.com/aboul3la/Sublist3r.git", + "version_match_regex": "[vV]*(\\d+\\.)?(\\d+\\.)?(\\*|\\d+)", + "is_default": true, + "is_subdomain_gathering": true, + "is_github_cloned": true, + "github_clone_path": "/home/rengine/tools/.github/Sublist3r", + "subdomain_gathering_command": null + } +}, +{ + "model": "scanEngine.installedexternaltool", + "pk": 9, + "fields": { + "logo_url": null, + "name": "hakrawler", + "description": "Fast golang web crawler for gathering URLs and JavaSript file locations. This is basically a simple implementation of the awesome Gocolly library.", + "github_url": "https://github.com/hakluke/hakrawler", + "license_url": "https://github.com/hakluke/hakrawler/blob/master/LICENSE", + "version_lookup_command": null, + "update_command": "go install -v github.com/hakluke/hakrawler@latest", + "install_command": "go install -v github.com/hakluke/hakrawler@latest", + "version_match_regex": "[vV]*(\\d+\\.)?(\\d+\\.)?(\\*|\\d+)", + "is_default": true, + "is_subdomain_gathering": false, + "is_github_cloned": false, + "github_clone_path": null, + "subdomain_gathering_command": null + } +}, +{ + "model": "scanEngine.installedexternaltool", + "pk": 10, + "fields": { + "logo_url": null, + "name": "OneForAll", + "description": "A powerful subdomain integration tool.", + "github_url": "https://github.com/shmilylty/OneForAll", + "license_url": "https://github.com/shmilylty/OneForAll/blob/master/LICENSE", + "version_lookup_command": "cat /home/rengine/tools/.github/OneForAll/oneforall.py", + "update_command": "git pull", + "install_command": "git clone https://github.com/shmilylty/OneForAll", + "version_match_regex": "v\\d+\\.\\d+\\.\\d+\\b", + "is_default": true, + "is_subdomain_gathering": true, + "is_github_cloned": true, + "github_clone_path": "/home/rengine/tools/.github/OneForAll", + "subdomain_gathering_command": null + } +}, +{ + "model": "scanEngine.installedexternaltool", + "pk": 11, + "fields": { + "logo_url": "https://raw.githubusercontent.com/laramies/theHarvester/master/theHarvester-logo.png", + "name": "theHarvester", + "description": "theHarvester is a very simple to use, yet powerful and effective tool designed to be used in the early stages of a penetration test or red team engagement. Use it for open source intelligence (OSINT) gathering to help determine a company's external threat landscape on the internet. The tool gathers emails, names, subdomains, IPs and URLs using multiple public data sources.", + "github_url": "https://github.com/laramies/theHarvester", + "license_url": "https://github.com/laramies/theHarvester/blob/master/README/LICENSES", + "version_lookup_command": "cat /home/rengine/tools/.github/theHarvester/theHarvester/lib/version.py", + "update_command": "git pull", + "install_command": "git clone https://github.com/laramies/theHarvester", + "version_match_regex": "\\b\\d+\\.\\d+\\.\\d+\\b", + "is_default": true, + "is_subdomain_gathering": false, + "is_github_cloned": true, + "github_clone_path": "/home/rengine/tools/.github/theHarvester", + "subdomain_gathering_command": null + } +}, +{ + "model": "scanEngine.installedexternaltool", + "pk": 12, + "fields": { + "logo_url": null, + "name": "ctfr", + "description": "This tool allows to get the subdomains from a HTTPS website in a few seconds. CTFR does not use neither dictionary attack nor brute-force, it just abuses of Certificate Transparency logs.", + "github_url": "https://github.com/UnaPibaGeek/ctfr", + "license_url": "https://github.com/UnaPibaGeek/ctfr/blob/master/LICENSE", + "version_lookup_command": "python3 /home/rengine/tools/.github/ctfr/ctfr.py --help", + "update_command": "git pull", + "install_command": "git clone https://github.com/UnaPibaGeek/ctfr/", + "version_match_regex": "(\\d+\\.)?(\\d+\\.).", + "is_default": true, + "is_subdomain_gathering": true, + "is_github_cloned": true, + "github_clone_path": "/home/rengine/tools/.github/ctfr", + "subdomain_gathering_command": null + } +}, +{ + "model": "scanEngine.installedexternaltool", + "pk": 13, + "fields": { + "logo_url": "https://user-images.githubusercontent.com/8293321/174841003-01a62bad-2ecf-4874-89c4-efa53dd56884.png", + "name": "tlsx", + "description": "A fast and configurable TLS grabber focused on TLS based data collection and analysis.", + "github_url": "https://github.com/projectdiscovery/tlsx", + "license_url": "https://github.com/projectdiscovery/tlsx/blob/main/LICENSE", + "version_lookup_command": "tlsx -version", + "update_command": "tlsx -update", + "install_command": "go install -v github.com/projectdiscovery/tlsx/cmd/tlsx@latest", + "version_match_regex": "(\\d+\\.)?(\\d+\\.)?(\\*|\\d+)", + "is_default": true, + "is_subdomain_gathering": true, + "is_github_cloned": false, + "github_clone_path": null, + "subdomain_gathering_command": null + } +}, +{ + "model": "scanEngine.installedexternaltool", + "pk": 14, + "fields": { + "logo_url": "https://avatars.githubusercontent.com/u/79084675?v=4", + "name": "netlas", + "description": "Non-intrusive Internet Scanner.", + "github_url": "https://github.com/netlas-io/netlas-python", + "license_url": "https://github.com/netlas-io/netlas-python/blob/master/LICENSE", + "version_lookup_command": "pip3 show netlas", + "update_command": "pip3 install netlas --update", + "install_command": "pip3 install netlas", + "version_match_regex": "(\\d+\\.)?(\\d+\\.)?(\\*|\\d+)", + "is_default": true, + "is_subdomain_gathering": true, + "is_github_cloned": false, + "github_clone_path": null, + "subdomain_gathering_command": null + } +}, +{ + "model": "scanEngine.installedexternaltool", + "pk": 15, + "fields": { + "logo_url": "https://github.githubassets.com/images/icons/emoji/unicode/1f98a.png", + "name": "dalfox", + "description": "DalFox is a powerful open-source tool that focuses on automation, making it ideal for quickly scanning for XSS flaws and analyzing parameters. Its advanced testing engine and niche features are designed to streamline the process of detecting and verifying vulnerabilities. As for the name, Dal(달) is the Korean word for \"moon,\" while \"Fox\" stands for \"Finder Of XSS\".", + "github_url": "https://github.com/hahwul/dalfox", + "license_url": "https://github.com/hahwul/dalfox/blob/main/LICENSE.txt", + "version_lookup_command": "dalfox version", + "update_command": "go install -v github.com/hahwul/dalfox/v2@latest", + "install_command": "go install -v github.com/hahwul/dalfox/v2@latest", + "version_match_regex": "v(\\d+\\.)?(\\d+\\.)?(\\*|\\d+)", + "is_default": true, + "is_subdomain_gathering": false, + "is_github_cloned": false, + "github_clone_path": null, + "subdomain_gathering_command": null + } +}, +{ + "model": "scanEngine.installedexternaltool", + "pk": 16, + "fields": { + "logo_url": "https://user-images.githubusercontent.com/8293321/196779266-421c79d4-643a-4f73-9b54-3da379bbac09.png", + "name": "katana", + "description": "A next-generation crawling and spidering framework.", + "github_url": "https://github.com/projectdiscovery/katana", + "license_url": "https://github.com/projectdiscovery/katana/blob/main/LICENSE.md", + "version_lookup_command": "katana -version", + "update_command": "go install -v github.com/projectdiscovery/katana/cmd/katana@latest", + "install_command": "go install -v github.com/projectdiscovery/katana/cmd/katana@latest", + "version_match_regex": "v(\\d+\\.)?(\\d+\\.)?(\\*|\\d+)", + "is_default": true, + "is_subdomain_gathering": false, + "is_github_cloned": false, + "github_clone_path": null, + "subdomain_gathering_command": null + } +}, +{ + "model": "scanEngine.installedexternaltool", + "pk": 17, + "fields": { + "logo_url": "https://user-images.githubusercontent.com/25837540/90128972-fc3bdf00-dd91-11ea-8c3b-0d6f4e8c6ba3.png", + "name": "crlfuzz", + "description": "A fast tool to scan CRLF vulnerability written in Go", + "github_url": "https://github.com/dwisiswant0/crlfuzz", + "license_url": "https://github.com/dwisiswant0/crlfuzz/blob/master/README.md", + "version_lookup_command": "crlfuzz -V", + "update_command": "go install -v github.com/dwisiswant0/crlfuzz/cmd/crlfuzz@latest", + "install_command": "go install -v github.com/dwisiswant0/crlfuzz/cmd/crlfuzz@latest", + "version_match_regex": "(\\d+\\.)?(\\d+\\.)?(\\*|\\d+)", + "is_default": true, + "is_subdomain_gathering": false, + "is_github_cloned": false, + "github_clone_path": null, + "subdomain_gathering_command": null + } +}, +{ + "model": "scanEngine.installedexternaltool", + "pk": 18, + "fields": { + "logo_url": null, + "name": "gau", + "description": "Get all URLs", + "github_url": "https://github.com/lc/gau", + "license_url": "https://github.com/lc/gau/blob/main/LICENSE", + "version_lookup_command": "gau --version", + "update_command": "go install github.com/lc/gau/v2/cmd/gau@latest", + "install_command": "go install github.com/lc/gau/v2/cmd/gau@latest", + "version_match_regex": "[vV]*(\\d+\\.)?(\\d+\\.)?(\\*|\\d+)", + "is_default": true, + "is_subdomain_gathering": false, + "is_github_cloned": false, + "github_clone_path": null, + "subdomain_gathering_command": null + } +} +] diff --git a/web/scanEngine/templates/scanEngine/settings/llm_toolkit.html b/web/scanEngine/templates/scanEngine/settings/llm_toolkit.html index fa6d304e..79caa5e1 100644 --- a/web/scanEngine/templates/scanEngine/settings/llm_toolkit.html +++ b/web/scanEngine/templates/scanEngine/settings/llm_toolkit.html @@ -234,7 +234,7 @@

} function selectModel(model_name){ - var url = "/api/tool/ollama/?model=" + model_name; + var url = "/api/tool/ollama/"; swal.queue([{ title: 'Are you sure you want to select this model?', text: "This model will be used to generate Scan Reports and Attack Suggestions.", @@ -248,8 +248,10 @@

method: 'PUT', credentials: "same-origin", headers: { - "X-CSRFToken": getCookie("csrftoken") - } + "X-CSRFToken": getCookie("csrftoken"), + "Content-Type": "application/json" + }, + body: JSON.stringify({ model: model_name }) }).then(function(response) { return response.json(); }).then(function(data) { @@ -272,7 +274,6 @@

} }]); } - //return location.reload(); }).catch(function() { swal.insertQueueStep({ icon: 'error', diff --git a/web/scanEngine/tests.py b/web/scanEngine/tests.py deleted file mode 100644 index 7ce503c2..00000000 --- a/web/scanEngine/tests.py +++ /dev/null @@ -1,3 +0,0 @@ -from django.test import TestCase - -# Create your tests here. diff --git a/web/scanEngine/tests/__init__.py b/web/scanEngine/tests/__init__.py new file mode 100644 index 00000000..0af3f29e --- /dev/null +++ b/web/scanEngine/tests/__init__.py @@ -0,0 +1,2 @@ +from utils.test_base import * +from .test_scan_engine import * diff --git a/web/scanEngine/tests/test_scan_engine.py b/web/scanEngine/tests/test_scan_engine.py new file mode 100644 index 00000000..3f99383b --- /dev/null +++ b/web/scanEngine/tests/test_scan_engine.py @@ -0,0 +1,227 @@ +""" +test_scan_engine.py + +This file contains unit tests for the views of the scanEngine application. +It tests functionalities related to scan engines, wordlists, settings, and tools. +""" + +from django.urls import reverse +from utils.test_base import BaseTestCase +from scanEngine.models import EngineType, Wordlist, InstalledExternalTool + +__all__ = [ + 'TestScanEngineViews', +] + +class TestScanEngineViews(BaseTestCase): + """ + Test class for the scanEngine views. + """ + + def setUp(self): + """ + Initial setup for the tests. + Creates test objects for engines, wordlists, settings, and tools. + """ + super().setUp() + self.data_generator.create_project_full() + + def test_index_view(self): + """ + Tests the index view to ensure it returns the correct status code and template. + """ + response = self.client.get(reverse('scan_engine_index', kwargs={'slug': 'default'})) + self.assertEqual(response.status_code, 200) + self.assertTemplateUsed(response, 'scanEngine/index.html') + + def test_add_engine_view(self): + """ + Tests the add engine view to ensure a new engine is created successfully. + """ + response = self.client.post(reverse('add_engine', kwargs={'slug': 'default'}), { + 'engine_name': 'New Engine', + 'yaml_configuration': 'new: config' + }) + self.assertEqual(response.status_code, 302) + self.assertTrue(EngineType.objects.filter(engine_name='New Engine').exists()) + + def test_delete_engine_view(self): + """ + Tests the delete engine view to ensure an engine is deleted successfully. + """ + response = self.client.post(reverse('delete_engine_url', kwargs={ + 'slug': 'default', + 'id': self.data_generator.engine_type.id + })) + self.assertEqual(response.status_code, 200) + self.assertFalse(EngineType.objects.filter(id=self.data_generator.engine_type.id).exists()) + + def test_update_engine_view(self): + """ + Tests the update engine view to ensure an engine is updated successfully. + """ + response = self.client.post(reverse('update_engine', kwargs={ + 'slug': 'default', + 'id': self.data_generator.engine_type.id + }), { + 'engine_name': 'Updated Engine', + 'yaml_configuration': 'updated: config' + }) + self.assertEqual(response.status_code, 302) + self.data_generator.engine_type.refresh_from_db() + self.assertEqual(self.data_generator.engine_type.engine_name, 'Updated Engine') + + def test_wordlist_list_view(self): + """ + Tests the wordlist list view to ensure it returns the correct status code and template. + """ + response = self.client.get(reverse('wordlist_list', kwargs={'slug': 'default'})) + self.assertEqual(response.status_code, 200) + self.assertTemplateUsed(response, 'scanEngine/wordlist/index.html') + + def test_add_wordlist_view(self): + """ + Tests the add wordlist view to ensure a new wordlist is created successfully. + """ + with open('test_wordlist.txt', 'w', encoding='utf-8') as f: + f.write('test\nword\nlist') + with open('test_wordlist.txt', 'rb') as f: + response = self.client.post(reverse('add_wordlist', kwargs={'slug': 'default'}), { + 'name': 'New Wordlist', + 'short_name': 'new', + 'upload_file': f + }) + self.assertEqual(response.status_code, 302) + self.assertTrue(Wordlist.objects.filter(name='New Wordlist').exists()) + + def test_delete_wordlist_view(self): + """ + Tests the delete wordlist view to ensure a wordlist is deleted successfully. + """ + response = self.client.post(reverse('delete_wordlist', kwargs={ + 'slug': 'default', + 'id': self.data_generator.wordlist.id + })) + self.assertEqual(response.status_code, 200) + self.assertFalse(Wordlist.objects.filter(id=self.data_generator.wordlist.id).exists()) + + def test_interesting_lookup_view(self): + """ + Tests the interesting lookup view to ensure it updates keywords successfully. + """ + response = self.client.post(reverse('interesting_lookup', kwargs={'slug': 'default'}), { + 'custom_type': True, + 'keywords': 'test,lookup' + }) + self.assertEqual(response.status_code, 302) + self.data_generator.interesting_lookup_model.refresh_from_db() + self.assertEqual(self.data_generator.interesting_lookup_model.keywords, 'test,lookup') + + def test_tool_specific_settings_view(self): + """ + Tests the tool-specific settings view to ensure it returns the correct status code and template. + """ + response = self.client.get(reverse('tool_settings', kwargs={'slug': 'default'})) + self.assertEqual(response.status_code, 200) + self.assertTemplateUsed(response, 'scanEngine/settings/tool.html') + + def test_rengine_settings_view(self): + """ + Tests the rengine settings view to ensure it returns the correct status code and template. + """ + response = self.client.get(reverse('rengine_settings', kwargs={'slug': 'default'})) + self.assertEqual(response.status_code, 200) + self.assertTemplateUsed(response, 'scanEngine/settings/rengine.html') + + def test_notification_settings_view(self): + """ + Tests the notification settings view to ensure it returns the correct status code and template. + """ + response = self.client.get(reverse('notification_settings', kwargs={'slug': 'default'})) + self.assertEqual(response.status_code, 200) + self.assertTemplateUsed(response, 'scanEngine/settings/notification.html') + + def test_proxy_settings_view(self): + """ + Tests the proxy settings view to ensure it updates proxy settings successfully. + """ + response = self.client.post(reverse('proxy_settings', kwargs={'slug': 'default'}), { + 'use_proxy': True, + 'proxies': '192.168.1.1', + }) + self.assertEqual(response.status_code, 302) + self.data_generator.proxy.refresh_from_db() + self.assertEqual(self.data_generator.proxy.proxies, '192.168.1.1') + + def test_hackerone_settings_view(self): + """ + Tests the Hackerone settings view to ensure it updates settings successfully. + """ + response = self.client.post(reverse('hackerone_settings', kwargs={'slug': 'default'}), { + 'username': 'newuser', + 'api_key': 'newapikey' + }) + self.assertEqual(response.status_code, 302) + self.data_generator.hackerone.refresh_from_db() + self.assertEqual(self.data_generator.hackerone.username, 'newuser') + + def test_report_settings_view(self): + """ + Tests the report settings view to ensure it updates settings successfully. + """ + response = self.client.post(reverse('report_settings', kwargs={'slug': 'default'}), { + 'primary_color': '#FFFFFF', + 'secondary_color': '#000000' + }) + self.assertEqual(response.status_code, 302) + self.data_generator.report_setting.refresh_from_db() + self.assertEqual(self.data_generator.report_setting.primary_color, '#FFFFFF') + + def test_tool_arsenal_section_view(self): + """ + Tests the tool arsenal section view to ensure it returns the correct status code and template. + """ + response = self.client.get(reverse('tool_arsenal', kwargs={'slug': 'default'})) + self.assertEqual(response.status_code, 200) + self.assertTemplateUsed(response, 'scanEngine/settings/tool_arsenal.html') + + def test_api_vault_view(self): + """ + Tests the API vault view to ensure it updates API keys successfully. + """ + response = self.client.post(reverse('api_vault', kwargs={'slug': 'default'}), { + 'key_openai': 'test_openai_key', + 'key_netlas': 'test_netlas_key' + }) + self.assertEqual(response.status_code, 200) + self.assertTemplateUsed(response, 'scanEngine/settings/api.html') + + def test_add_tool_view(self): + """ + Tests the add tool view to ensure a new tool is created successfully. + """ + response = self.client.post(reverse('add_tool', kwargs={'slug': 'default'}), { + 'name': 'New Tool', + 'github_url': 'https://github.com/new/tool', + 'install_command': 'pip install new-tool', + 'description': 'New Tool Description' + }) + self.assertEqual(response.status_code, 302) + self.assertTrue(InstalledExternalTool.objects.filter(name='New Tool').exists()) + + def test_modify_tool_in_arsenal_view(self): + """ + Tests the modify tool in arsenal view to ensure a tool is updated successfully. + """ + response = self.client.post(reverse('update_tool_in_arsenal', kwargs={ + 'slug': 'default', + 'id': self.data_generator.external_tool.id + }), { + 'name': 'Modified Tool', + 'github_url': 'https://github.com/modified/tool', + 'install_command': 'pip install modified-tool', + 'description': 'Modified Tool Description' + }) + self.assertEqual(response.status_code, 302) + self.data_generator.external_tool.refresh_from_db() + self.assertEqual(self.data_generator.external_tool.name, 'Modified Tool') diff --git a/web/scanEngine/views.py b/web/scanEngine/views.py index 397b59c6..664ecb0e 100644 --- a/web/scanEngine/views.py +++ b/web/scanEngine/views.py @@ -12,9 +12,16 @@ from django.urls import reverse from rolepermissions.decorators import has_permission_decorator +from reNgine.settings import RENGINE_HOME, RENGINE_TOOL_GITHUB_PATH from reNgine.common_func import * -from reNgine.tasks import (run_command, send_discord_message, send_slack_message,send_lark_message, send_telegram_message, run_gf_list) -from reNgine.settings import RENGINE_HOME +from reNgine.tasks import ( + run_command, + send_discord_message, + send_slack_message, + send_lark_message, + send_telegram_message, + run_gf_list +) from scanEngine.forms import * from scanEngine.forms import ConfigurationForm from scanEngine.models import * @@ -180,9 +187,7 @@ def interesting_lookup(request, slug): form = InterestingLookupForm(request.POST, instance=lookup_keywords) else: form = InterestingLookupForm(request.POST or None) - print(form.errors) if form.is_valid(): - print(form.cleaned_data) form.save() messages.add_message( request, @@ -580,21 +585,21 @@ def add_tool(request, slug): form = ExternalToolForm() if request.method == "POST": form = ExternalToolForm(request.POST) - print(form.errors) if form.is_valid(): # add tool install_command = form.data['install_command'] github_clone_path = None + + # Only modify install_command if it contains 'git clone' if 'git clone' in install_command: project_name = install_command.split('/')[-1] - install_command = install_command + ' /home/rengine/tools/.github/' + project_name + ' && pip install -r /home/rengine/tools/.github/' + project_name + '/requirements.txt' - github_clone_path = '/home/rengine/tools/.github/' + project_name - # if github cloned we also need to install requirements, atleast found in the main dir - install_command = 'pip3 install -r /home/rengine/tools/.github/' + project_name + '/requirements.txt' + install_command = f'{install_command} {RENGINE_TOOL_GITHUB_PATH}/{project_name} && pip install -r {RENGINE_TOOL_GITHUB_PATH}/{project_name}/requirements.txt' + github_clone_path = f'{RENGINE_TOOL_GITHUB_PATH}/{project_name}' run_command(install_command) run_command.apply_async(args=(install_command,)) saved_form = form.save() + if github_clone_path: tool = InstalledExternalTool.objects.get(id=saved_form.pk) tool.github_clone_path = github_clone_path @@ -605,10 +610,11 @@ def add_tool(request, slug): messages.INFO, 'External Tool Successfully Added!') return http.HttpResponseRedirect(reverse('tool_arsenal', kwargs={'slug': slug})) + context = { - 'settings_nav_active': 'active', - 'form': form - } + 'settings_nav_active': 'active', + 'form': form + } return render(request, 'scanEngine/settings/add_tool.html', context) diff --git a/web/startScan/fixtures/startScan.json b/web/startScan/fixtures/startScan.json new file mode 100644 index 00000000..5d0c8fe6 --- /dev/null +++ b/web/startScan/fixtures/startScan.json @@ -0,0 +1,3392 @@ +[ +{ + "model": "startScan.scanhistory", + "pk": 1, + "fields": { + "start_scan_date": "2024-09-03T21:25:45.594Z", + "scan_status": 2, + "results_dir": "/home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97", + "domain": 1, + "scan_type": 7, + "celery_ids": "[\"c2de965c-4db3-405b-b58d-9894f18306b2\", \"d3715a2f-20dd-471f-a1b6-0a7f35472afd\", \"a0d9c581-9f0f-46e9-ad45-2abb20681fe0\", null, \"13e72a80-8f84-4ae6-b47f-081f0ecd4b6c\", \"3596c2c2-2dca-4164-b342-b5046c697304\", \"dc33f486-7ad0-4c73-a355-4c93df4538af\", \"b74526bd-8f9a-4738-a98f-876ba776da33\", \"e1351fcc-958c-4fd2-941b-e8c34c3db9d9\", \"e61a5153-22d3-4890-984c-c45177e93baa\", \"267cc8e5-24ab-4651-a708-4286b4506ee0\", \"3f932d02-9beb-4c0b-9cb0-394fc6e73401\"]", + "tasks": "[\"subdomain_discovery\", \"http_crawl\", \"port_scan\", \"osint\", \"dir_file_fuzz\", \"fetch_url\", \"vulnerability_scan\", \"waf_detection\", \"screenshot\"]", + "stop_scan_date": "2024-09-03T22:29:36.469Z", + "used_gf_patterns": "debug_logic,idor,interestingEXT,interestingparams,interestingsubs,lfi,rce,redirect,sqli,ssrf,ssti,xss", + "error_message": null, + "initiated_by": 1, + "aborted_by": null, + "emails": [], + "employees": [], + "buckets": [], + "dorks": [ + 1, + 2, + 3, + 4, + 5, + 6, + 7, + 8, + 9, + 10, + 11, + 12, + 13, + 14, + 15, + 16, + 17, + 18, + 19, + 20, + 21, + 22, + 23, + 24, + 25, + 26, + 27, + 28, + 29, + 30, + 31, + 32, + 33, + 34, + 35, + 36, + 37, + 38, + 39, + 40, + 41, + 42, + 43, + 44, + 45, + 46, + 47, + 48, + 49, + 50, + 51, + 52, + 53, + 54, + 55, + 56, + 57, + 58, + 59, + 60, + 61, + 62, + 63, + 64, + 65, + 66, + 67, + 68, + 69, + 70, + 71, + 72, + 73, + 74, + 75, + 76, + 77 + ] + } +}, +{ + "model": "startScan.subdomain", + "pk": 1, + "fields": { + "scan_history": 1, + "target_domain": 1, + "name": "testphp.vulnweb.com", + "is_imported_subdomain": true, + "is_important": false, + "http_url": "http://testphp.vulnweb.com", + "screenshot_path": "/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/screenshots/screens/http.testphp.vulnweb.com.png", + "http_header_path": null, + "discovered_date": "2024-09-03T21:25:45.864Z", + "cname": null, + "is_cdn": false, + "cdn_name": null, + "http_status": 200, + "content_type": "text/html", + "response_time": 0.345110429, + "webserver": "nginx/1.19.0", + "content_length": 4958, + "page_title": "Home of Acunetix Art", + "attack_surface": null, + "technologies": [ + 1, + 2, + 3, + 4 + ], + "ip_addresses": [ + 1 + ], + "directories": [ + 5 + ], + "waf": [] + } +}, +{ + "model": "startScan.subdomain", + "pk": 2, + "fields": { + "scan_history": 1, + "target_domain": 1, + "name": "vulnweb.com", + "is_imported_subdomain": false, + "is_important": false, + "http_url": "http://vulnweb.com", + "screenshot_path": "/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/screenshots/screens/http.vulnweb.com.png", + "http_header_path": null, + "discovered_date": "2024-09-03T21:25:57.076Z", + "cname": null, + "is_cdn": false, + "cdn_name": null, + "http_status": 200, + "content_type": "text/html", + "response_time": 0.352853777, + "webserver": "nginx/1.19.0", + "content_length": 4018, + "page_title": "Acunetix Web Vulnerability Scanner - Test websites", + "attack_surface": null, + "technologies": [ + 2 + ], + "ip_addresses": [ + 1 + ], + "directories": [ + 6 + ], + "waf": [] + } +}, +{ + "model": "startScan.subdomain", + "pk": 16, + "fields": { + "scan_history": 1, + "target_domain": 1, + "name": "rest.vulnweb.com", + "is_imported_subdomain": false, + "is_important": false, + "http_url": "http://rest.vulnweb.com", + "screenshot_path": "/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/screenshots/screens/http.rest.vulnweb.com.png", + "http_header_path": null, + "discovered_date": "2024-09-03T21:28:09.647Z", + "cname": null, + "is_cdn": false, + "cdn_name": null, + "http_status": 200, + "content_type": "text/html", + "response_time": 0.422205408, + "webserver": "Apache/2.4.25 (Debian)", + "content_length": 3555, + "page_title": "Acunetix Vulnerable REST API", + "attack_surface": null, + "technologies": [ + 9, + 10, + 11 + ], + "ip_addresses": [ + 3 + ], + "directories": [ + 1 + ], + "waf": [] + } +}, +{ + "model": "startScan.subdomain", + "pk": 23, + "fields": { + "scan_history": 1, + "target_domain": 1, + "name": "testasp.vulnweb.com", + "is_imported_subdomain": false, + "is_important": false, + "http_url": "http://testasp.vulnweb.com", + "screenshot_path": "/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/screenshots/screens/http.testasp.vulnweb.com.png", + "http_header_path": null, + "discovered_date": "2024-09-03T21:28:09.673Z", + "cname": null, + "is_cdn": false, + "cdn_name": null, + "http_status": 200, + "content_type": "text/html", + "response_time": 0.36117539699999995, + "webserver": "Microsoft-IIS/8.5", + "content_length": 3537, + "page_title": "acuforum forums", + "attack_surface": null, + "technologies": [ + 1, + 6, + 7, + 8 + ], + "ip_addresses": [ + 2 + ], + "directories": [ + 2 + ], + "waf": [] + } +}, +{ + "model": "startScan.subdomain", + "pk": 24, + "fields": { + "scan_history": 1, + "target_domain": 1, + "name": "testaspnet.vulnweb.com", + "is_imported_subdomain": false, + "is_important": false, + "http_url": "http://testaspnet.vulnweb.com", + "screenshot_path": "/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/screenshots/screens/http.testaspnet.vulnweb.com.png", + "http_header_path": null, + "discovered_date": "2024-09-03T21:28:09.677Z", + "cname": null, + "is_cdn": false, + "cdn_name": null, + "http_status": 200, + "content_type": "text/html", + "response_time": 0.535234499, + "webserver": "Microsoft-IIS/8.5", + "content_length": 14082, + "page_title": "acublog news", + "attack_surface": null, + "technologies": [ + 6, + 8, + 12, + 13 + ], + "ip_addresses": [ + 2 + ], + "directories": [ + 3 + ], + "waf": [] + } +}, +{ + "model": "startScan.subdomain", + "pk": 26, + "fields": { + "scan_history": 1, + "target_domain": 1, + "name": "testhtml5.vulnweb.com", + "is_imported_subdomain": false, + "is_important": false, + "http_url": "http://testhtml5.vulnweb.com", + "screenshot_path": "/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/screenshots/screens/http.testhtml5.vulnweb.com.png", + "http_header_path": null, + "discovered_date": "2024-09-03T21:28:09.686Z", + "cname": null, + "is_cdn": false, + "cdn_name": null, + "http_status": 200, + "content_type": "text/html", + "response_time": 0.342381867, + "webserver": "nginx/1.19.0", + "content_length": 6940, + "page_title": "SecurityTweets - HTML5 test website for Acunetix Web Vulnerability Scanner", + "attack_surface": null, + "technologies": [ + 2, + 5 + ], + "ip_addresses": [ + 1 + ], + "directories": [ + 4 + ], + "waf": [] + } +}, +{ + "model": "startScan.subdomain", + "pk": 38, + "fields": { + "scan_history": 1, + "target_domain": 1, + "name": "www.vulnweb.com", + "is_imported_subdomain": false, + "is_important": false, + "http_url": "http://www.vulnweb.com", + "screenshot_path": "/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/screenshots/screens/http.www.vulnweb.com.png", + "http_header_path": null, + "discovered_date": "2024-09-03T21:28:09.730Z", + "cname": null, + "is_cdn": false, + "cdn_name": null, + "http_status": 200, + "content_type": "text/html", + "response_time": 0.352635869, + "webserver": "nginx/1.19.0", + "content_length": 4018, + "page_title": "Acunetix Web Vulnerability Scanner - Test websites", + "attack_surface": null, + "technologies": [ + 2 + ], + "ip_addresses": [ + 1 + ], + "directories": [ + 7 + ], + "waf": [] + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 1, + "fields": { + "name": "dns" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 2, + "fields": { + "name": "caa" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 3, + "fields": { + "name": "spf" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 4, + "fields": { + "name": "txt" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 5, + "fields": { + "name": "tech" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 6, + "fields": { + "name": "nginx" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 7, + "fields": { + "name": "misconfig" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 8, + "fields": { + "name": "http" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 9, + "fields": { + "name": "cookie" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 10, + "fields": { + "name": "generic" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 11, + "fields": { + "name": "apache" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 12, + "fields": { + "name": "php" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 13, + "fields": { + "name": "ns" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 14, + "fields": { + "name": "form" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 15, + "fields": { + "name": "misc" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 16, + "fields": { + "name": "miscellaneous" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 17, + "fields": { + "name": "cve" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 18, + "fields": { + "name": "cve2022" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 19, + "fields": { + "name": "carrental" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 20, + "fields": { + "name": "cms" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 21, + "fields": { + "name": "sqli" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 22, + "fields": { + "name": "authenticated" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 23, + "fields": { + "name": "car_rental_management_system_project" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 24, + "fields": { + "name": "headers" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 25, + "fields": { + "name": "detect" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 26, + "fields": { + "name": "aspnet" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 27, + "fields": { + "name": "phpstorm" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 28, + "fields": { + "name": "jetbrains" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 29, + "fields": { + "name": "idea" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 30, + "fields": { + "name": "exposure" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 31, + "fields": { + "name": "files" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 32, + "fields": { + "name": "debug" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 33, + "fields": { + "name": "microsoft" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 34, + "fields": { + "name": "iis" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 35, + "fields": { + "name": "waf" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 36, + "fields": { + "name": "xxe" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 37, + "fields": { + "name": "blind" + } +}, +{ + "model": "startScan.vulnerabilityreference", + "pk": 1, + "fields": { + "url": "https://support.dnsimple.com/articles/caa-record/#whats-a-caa-record" + } +}, +{ + "model": "startScan.vulnerabilityreference", + "pk": 2, + "fields": { + "url": "https://www.mimecast.com/content/how-to-create-an-spf-txt-record" + } +}, +{ + "model": "startScan.vulnerabilityreference", + "pk": 3, + "fields": { + "url": "https://www.netspi.com/blog/technical/network-penetration-testing/analyzing-dns-txt-records-to-fingerprint-service-providers/" + } +}, +{ + "model": "startScan.vulnerabilityreference", + "pk": 4, + "fields": { + "url": "https://owasp.org/www-community/HttpOnly" + } +}, +{ + "model": "startScan.vulnerabilityreference", + "pk": 5, + "fields": { + "url": "https://owasp.org/www-community/controls/SecureCookieAttribute" + } +}, +{ + "model": "startScan.vulnerabilityreference", + "pk": 6, + "fields": { + "url": "https://github.com/dirtycoder0124/formcrawler" + } +}, +{ + "model": "startScan.vulnerabilityreference", + "pk": 7, + "fields": { + "url": "https://github.com/k0xx11/bug_report/blob/main/vendors/campcodes.com/car-rental-management-system/SQLi-8.md" + } +}, +{ + "model": "startScan.vulnerabilityreference", + "pk": 8, + "fields": { + "url": "https://nvd.nist.gov/vuln/detail/CVE-2022-32028" + } +}, +{ + "model": "startScan.vulnerabilityreference", + "pk": 9, + "fields": { + "url": "https://github.com/ARPSyndicate/kenzer-templates" + } +}, +{ + "model": "startScan.vulnerabilityreference", + "pk": 10, + "fields": { + "url": "https://developer.mozilla.org/en-US/docs/Web/Security/Mixed_content" + } +}, +{ + "model": "startScan.vulnerabilityreference", + "pk": 11, + "fields": { + "url": "https://portswigger.net/kb/issues/01000400_mixed-content" + } +}, +{ + "model": "startScan.vulnerabilityreference", + "pk": 12, + "fields": { + "url": "https://resources.infosecinstitute.com/topics/vulnerabilities/https-mixed-content-vulnerability/" + } +}, +{ + "model": "startScan.vulnerabilityreference", + "pk": 13, + "fields": { + "url": "https://docs.gitlab.com/ee/user/application_security/dast/checks/319.1.html" + } +}, +{ + "model": "startScan.vulnerabilityreference", + "pk": 14, + "fields": { + "url": "https://portswigger.net/kb/issues/00100800_asp-net-debugging-enabled" + } +}, +{ + "model": "startScan.vulnerabilityreference", + "pk": 15, + "fields": { + "url": "https://github.com/Ekultek/WhatWaf" + } +}, +{ + "model": "startScan.scanactivity", + "pk": 1, + "fields": { + "scan_of": 1, + "title": "Subdomain discovery", + "name": "subdomain_discovery", + "time": "2024-09-03T21:28:31.457Z", + "status": 2, + "error_message": null, + "traceback": null, + "celery_id": "be34c2cd-c7b3-41df-abd6-3e12bb512ea3" + } +}, +{ + "model": "startScan.scanactivity", + "pk": 2, + "fields": { + "scan_of": 1, + "title": "OS Intelligence", + "name": "osint", + "time": "2024-09-03T21:26:58.275Z", + "status": 2, + "error_message": null, + "traceback": null, + "celery_id": "a0d9c581-9f0f-46e9-ad45-2abb20681fe0" + } +}, +{ + "model": "startScan.scanactivity", + "pk": 3, + "fields": { + "scan_of": 1, + "title": "Http crawl", + "name": "http_crawl", + "time": "2024-09-03T21:28:31.186Z", + "status": 2, + "error_message": null, + "traceback": null, + "celery_id": null + } +}, +{ + "model": "startScan.scanactivity", + "pk": 4, + "fields": { + "scan_of": 1, + "title": "Port scan", + "name": "port_scan", + "time": "2024-09-03T21:29:11.664Z", + "status": 2, + "error_message": null, + "traceback": null, + "celery_id": "13e72a80-8f84-4ae6-b47f-081f0ecd4b6c" + } +}, +{ + "model": "startScan.scanactivity", + "pk": 5, + "fields": { + "scan_of": 1, + "title": "Fetch URL", + "name": "fetch_url", + "time": "2024-09-03T22:02:45.068Z", + "status": 2, + "error_message": null, + "traceback": null, + "celery_id": "3596c2c2-2dca-4164-b342-b5046c697304" + } +}, +{ + "model": "startScan.scanactivity", + "pk": 6, + "fields": { + "scan_of": 1, + "title": "Directories & files fuzz", + "name": "dir_file_fuzz", + "time": "2024-09-03T22:13:05.287Z", + "status": 2, + "error_message": null, + "traceback": null, + "celery_id": "032f40df-a5f0-4fb6-bb06-63b73a54bd17" + } +}, +{ + "model": "startScan.scanactivity", + "pk": 7, + "fields": { + "scan_of": 1, + "title": "Vulnerability scan", + "name": "vulnerability_scan", + "time": "2024-09-03T22:29:16.424Z", + "status": 2, + "error_message": null, + "traceback": null, + "celery_id": "6673b166-bc55-4440-a217-9932b697cd69" + } +}, +{ + "model": "startScan.scanactivity", + "pk": 8, + "fields": { + "scan_of": 1, + "title": "Screenshot", + "name": "screenshot", + "time": "2024-09-03T22:03:01.734Z", + "status": 2, + "error_message": null, + "traceback": null, + "celery_id": "dc33f486-7ad0-4c73-a355-4c93df4538af" + } +}, +{ + "model": "startScan.scanactivity", + "pk": 9, + "fields": { + "scan_of": 1, + "title": "WAF detection", + "name": "waf_detection", + "time": "2024-09-03T22:03:10.096Z", + "status": 2, + "error_message": null, + "traceback": null, + "celery_id": "c4ccc7f8-b370-4354-9a6a-098ba13fde0f" + } +}, +{ + "model": "startScan.scanactivity", + "pk": 10, + "fields": { + "scan_of": 1, + "title": "Nuclei Scan", + "name": "nuclei_scan", + "time": "2024-09-03T22:29:15.243Z", + "status": 2, + "error_message": null, + "traceback": null, + "celery_id": "b74526bd-8f9a-4738-a98f-876ba776da33" + } +}, +{ + "model": "startScan.scanactivity", + "pk": 11, + "fields": { + "scan_of": 1, + "title": "Nuclei Scan with severity unknown", + "name": "nuclei_individual_severity_module", + "time": "2024-09-03T22:04:05.946Z", + "status": 2, + "error_message": null, + "traceback": null, + "celery_id": "e1351fcc-958c-4fd2-941b-e8c34c3db9d9" + } +}, +{ + "model": "startScan.scanactivity", + "pk": 12, + "fields": { + "scan_of": 1, + "title": "Nuclei Scan with severity info", + "name": "nuclei_individual_severity_module", + "time": "2024-09-03T22:16:51.232Z", + "status": 2, + "error_message": null, + "traceback": null, + "celery_id": "e61a5153-22d3-4890-984c-c45177e93baa" + } +}, +{ + "model": "startScan.scanactivity", + "pk": 13, + "fields": { + "scan_of": 1, + "title": "Nuclei Scan with severity low", + "name": "nuclei_individual_severity_module", + "time": "2024-09-03T22:08:56.115Z", + "status": 2, + "error_message": null, + "traceback": null, + "celery_id": "267cc8e5-24ab-4651-a708-4286b4506ee0" + } +}, +{ + "model": "startScan.scanactivity", + "pk": 14, + "fields": { + "scan_of": 1, + "title": "Nuclei Scan with severity medium", + "name": "nuclei_individual_severity_module", + "time": "2024-09-03T22:28:15.118Z", + "status": 2, + "error_message": null, + "traceback": null, + "celery_id": "47b90b12-ecce-49c9-93cf-f1e26fc15db6" + } +}, +{ + "model": "startScan.scanactivity", + "pk": 15, + "fields": { + "scan_of": 1, + "title": "Nuclei Scan with severity high", + "name": "nuclei_individual_severity_module", + "time": "2024-09-03T22:29:10.920Z", + "status": 2, + "error_message": null, + "traceback": null, + "celery_id": "06e12924-022d-4972-8b4b-ceda9b6985f2" + } +}, +{ + "model": "startScan.scanactivity", + "pk": 16, + "fields": { + "scan_of": 1, + "title": "Nuclei Scan with severity critical", + "name": "nuclei_individual_severity_module", + "time": "2024-09-03T22:06:56.220Z", + "status": 2, + "error_message": null, + "traceback": null, + "celery_id": "3f932d02-9beb-4c0b-9cb0-394fc6e73401" + } +}, +{ + "model": "startScan.command", + "pk": 1, + "fields": { + "scan_history": 1, + "activity": null, + "command": "httpx -cl -ct -rt -location -td -websocket -cname -asn -cdn -probe -random-agent -t 1 -json -u testphp.vulnweb.com -silent", + "return_code": 0, + "output": "{\"timestamp\":\"2024-09-03T21:25:56.985783616Z\",\"port\":\"80\",\"url\":\"http://testphp.vulnweb.com\",\"input\":\"testphp.vulnweb.com\",\"title\":\"Home of Acunetix Art\",\"scheme\":\"http\",\"webserver\":\"nginx/1.19.0\",\"content_type\":\"text/html\",\"method\":\"GET\",\"host\":\"44.228.249.3\",\"path\":\"/\",\"time\":\"343.388749ms\",\"a\":[\"44.228.249.3\"],\"tech\":[\"DreamWeaver\",\"Nginx:1.19.0\",\"PHP:5.6.40\",\"Ubuntu\"],\"words\":514,\"lines\":110,\"status_code\":200,\"content_length\":4958,\"failed\":false,\"knowledgebase\":{\"PageType\":\"nonerror\",\"pHash\":0}}\n", + "time": "2024-09-03T21:25:45.871Z" + } +}, +{ + "model": "startScan.command", + "pk": 2, + "fields": { + "scan_history": 1, + "activity": null, + "command": "rm /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/httpx_input.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:25:57.045Z" + } +}, +{ + "model": "startScan.command", + "pk": 3, + "fields": { + "scan_history": null, + "activity": null, + "command": "geoiplookup 44.228.249.3", + "return_code": null, + "output": null, + "time": "2024-09-03T21:25:57.037Z" + } +}, +{ + "model": "startScan.command", + "pk": 4, + "fields": { + "scan_history": 1, + "activity": null, + "command": "httpx -cl -ct -rt -location -td -websocket -cname -asn -cdn -probe -random-agent -t 1 -json -u vulnweb.com -silent", + "return_code": 0, + "output": "{\"timestamp\":\"2024-09-03T21:26:08.122014174Z\",\"port\":\"80\",\"url\":\"http://vulnweb.com\",\"input\":\"vulnweb.com\",\"title\":\"Acunetix Web Vulnerability Scanner - Test websites\",\"scheme\":\"http\",\"webserver\":\"nginx/1.19.0\",\"content_type\":\"text/html\",\"method\":\"GET\",\"host\":\"44.228.249.3\",\"path\":\"/\",\"time\":\"351.001638ms\",\"a\":[\"44.228.249.3\"],\"tech\":[\"Nginx:1.19.0\"],\"words\":482,\"lines\":74,\"status_code\":200,\"content_length\":4018,\"failed\":false,\"knowledgebase\":{\"PageType\":\"nonerror\",\"pHash\":0}}\n", + "time": "2024-09-03T21:25:57.079Z" + } +}, +{ + "model": "startScan.command", + "pk": 5, + "fields": { + "scan_history": 1, + "activity": null, + "command": "rm /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/httpx_input.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:26:08.160Z" + } +}, +{ + "model": "startScan.command", + "pk": 6, + "fields": { + "scan_history": 1, + "activity": 1, + "command": "subfinder -d vulnweb.com -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/subdomains_subfinder.txt -timeout 5 -t 30 -silent", + "return_code": 0, + "output": "\n[\u001b[34mINF\u001b[0m] Current subfinder version v2.6.6 (\u001b[92mlatest\u001b[0m)\nblogger.com.vulnweb.com\nodincovo.vulnweb.com\ntestasp.vulnweb.com\nwww.testphp.vulnweb.com\ntestphp.vulnweb.com\n2ftestasp.vulnweb.com\nedu-rost.rutestasp.vulnweb.com\nu003erest.vulnweb.com\ntestaspnet.vulnweb.com\ntetphp.vulnweb.com\nrest.vulnweb.com\ntestaspx.vulnweb.com\n5burl-3dhttp-3a-2f-2fwww.vulnweb.com\n7ctestasp.vulnweb.com\nwww.test.php.vulnweb.com\nedu-rost.ruwww.vulnweb.com\ntesthtml5.vulnweb.com\nwww.virus.vulnweb.com\nu003etestasp.vulnweb.com\ntest.php.vulnweb.com\ntestap.vulnweb.com\nwww.testasp.vulnweb.com\nestphp.vulnweb.com\ntestapsnet.vulnweb.com\nhttptestaspnet.vulnweb.com\n2f-2fwww.vulnweb.com\nrestasp.vulnweb.com\nviruswall.vulnweb.com\nvirus.vulnweb.com\nttestphp.vulnweb.com\nwww.vulnweb.com\ntest.vulnweb.com\ntestaps.vulnweb.com\n2fwww.vulnweb.com\nantivirus1.vulnweb.com\ntestpphp.vulnweb.com", + "time": "2024-09-03T21:26:08.233Z" + } +}, +{ + "model": "startScan.command", + "pk": 7, + "fields": { + "scan_history": 1, + "activity": null, + "command": "GooFuzz -t vulnweb.com -d 3 -p 5 -w /login/,login.html -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/gofuzz.txt", + "return_code": 1, + "output": "\n*********************************************************\n* GooFuzz 1.2.5 - The Power of Google Dorks\t\t*\n*********************************************************\n\nTarget: \u001b[1mvulnweb.com\u001b[0m\n\n===================================================================\nDirectories and files found from: \u001b[1m/login/,login.html\u001b[0m\n===================================================================\n\nhttp://rest.vulnweb.com/basic_authentication/api/\nhttp://rest.vulnweb.com/docs/\nhttp://testasp.vulnweb.com/Search.asp\nhttp://testaspnet.vulnweb.com/login.aspx\nhttp://testhtml5.vulnweb.com/\nhttp://testphp.vulnweb.com/\nhttp://testphp.vulnweb.com/admin/\nhttp://testphp.vulnweb.com/artists.php\nhttp://testphp.vulnweb.com/artists.php?artist\nhttp://testphp.vulnweb.com/artists.php?artist=1\nhttp://testphp.vulnweb.com/artists.php?artist=2\nhttp://testphp.vulnweb.com/categories.php\nhttp://testphp.vulnweb.com/disclaimer.php\nhttp://testphp.vulnweb.com/listproducts.php?cat\nhttp://testphp.vulnweb.com/listproducts.php?cat=1\nhttp://testphp.vulnweb.com/login.php\nhttp://testphp.vulnweb.com/search.php\nhttp://www.vulnweb.com/", + "time": "2024-09-03T21:26:08.250Z" + } +}, +{ + "model": "startScan.command", + "pk": 8, + "fields": { + "scan_history": 1, + "activity": 1, + "command": "ctfr -d vulnweb.com -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/subdomains_ctfr.txt && cat /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/subdomains_ctfr.txt | sed 's/\\*.//g' | tail -n +12 | uniq | sort > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/subdomains_ctfr.txt", + "return_code": 0, + "output": "\n\n____ _____ _____ ____\n/ ___|_ _| ___| _ \\\n| | | | | |_ | |_) |\n| |___ | | | _| | _ <\n\\____| |_| |_| |_| \\_\\\n\nVersion 1.2 - Hey don't miss AXFR!\nMade by Sheila A. Berta (UnaPibaGeek)\n\n\n[!] ---- TARGET: vulnweb.com ---- [!]\n\n\n\n[!] Done. Have a nice day! ;).", + "time": "2024-09-03T21:26:10.094Z" + } +}, +{ + "model": "startScan.command", + "pk": 9, + "fields": { + "scan_history": 1, + "activity": 1, + "command": "sublist3r -d vulnweb.com -t 30 -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/subdomains_sublister.txt", + "return_code": 0, + "output": "\n\u001b[91m\n____ _ _ _ _ _____\n/ ___| _ _| |__ | (_)___| |_|___ / _ __\n\\___ \\| | | | '_ \\| | / __| __| |_ \\| '__|\n___) | |_| | |_) | | \\__ \\ |_ ___) | |\n|____/ \\__,_|_.__/|_|_|___/\\__|____/|_|\u001b[0m\u001b[93m\n\n# Coded By Ahmed Aboul-Ela - @aboul3la\n\n\u001b[94m[-] Enumerating subdomains now for vulnweb.com\u001b[0m\n\u001b[92m[-] Searching now in Baidu..\u001b[0m\n\u001b[92m[-] Searching now in Yahoo..\u001b[0m\n\u001b[92m[-] Searching now in Google..\u001b[0m\n\u001b[92m[-] Searching now in Bing..\u001b[0m\n\u001b[92m[-] Searching now in Ask..\u001b[0m\n\u001b[92m[-] Searching now in Netcraft..\u001b[0m\n\u001b[92m[-] Searching now in DNSdumpster..\u001b[0m\n\u001b[92m[-] Searching now in Virustotal..\u001b[0m\n\u001b[92m[-] Searching now in ThreatCrowd..\u001b[0m\n\u001b[92m[-] Searching now in SSL Certificates..\u001b[0m\n\u001b[92m[-] Searching now in PassiveDNS..\u001b[0m\n\u001b[91m[!] Error: Virustotal probably now is blocking our requests\u001b[0m", + "time": "2024-09-03T21:26:11.036Z" + } +}, +{ + "model": "startScan.command", + "pk": 10, + "fields": { + "scan_history": 1, + "activity": null, + "command": "GooFuzz -t vulnweb.com -d 3 -p 5 -w /admin/,admin.html -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/gofuzz.txt", + "return_code": 1, + "output": "\n*********************************************************\n* GooFuzz 1.2.5 - The Power of Google Dorks\t\t*\n*********************************************************\n\nTarget: \u001b[1mvulnweb.com\u001b[0m\n\n===================================================================\nDirectories and files found from: \u001b[1m/admin/,admin.html\u001b[0m\n===================================================================\n\nhttp://rest.vulnweb.com/basic_authentication/api/\nhttp://rest.vulnweb.com/docs/\nhttp://rest.vulnweb.com/images/1_basic_authentication.png\nhttp://rest.vulnweb.com/images/2_jwt.png\nhttp://rest.vulnweb.com/images/3_oauth2.png\nhttp://testphp.vulnweb.com/admin/\nhttp://testphp.vulnweb.com/login.php\nhttp://testphp.vulnweb.com/secured/phpinfo.php", + "time": "2024-09-03T21:26:12.068Z" + } +}, +{ + "model": "startScan.command", + "pk": 11, + "fields": { + "scan_history": 1, + "activity": null, + "command": "GooFuzz -t vulnweb.com -d 3 -p 5 -w /dashboard/,dashboard.html -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/gofuzz.txt", + "return_code": 1, + "output": "\n*********************************************************\n* GooFuzz 1.2.5 - The Power of Google Dorks\t\t*\n*********************************************************\n\nTarget: \u001b[1mvulnweb.com\u001b[0m\n\nSorry, no results found.", + "time": "2024-09-03T21:26:15.816Z" + } +}, +{ + "model": "startScan.command", + "pk": 12, + "fields": { + "scan_history": 1, + "activity": null, + "command": "GooFuzz -t stackoverflow.com -d 3 -p 2 -w vulnweb.com -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/gofuzz.txt", + "return_code": 1, + "output": "\n*********************************************************\n* GooFuzz 1.2.5 - The Power of Google Dorks\t\t*\n*********************************************************\n\nTarget: \u001b[1mstackoverflow.com\u001b[0m\n\nSorry, no results found.", + "time": "2024-09-03T21:26:16.663Z" + } +}, +{ + "model": "startScan.command", + "pk": 13, + "fields": { + "scan_history": 1, + "activity": null, + "command": "GooFuzz -t tiktok.com -d 3 -p 2 -w vulnweb.com -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/gofuzz.txt", + "return_code": 1, + "output": "\n*********************************************************\n* GooFuzz 1.2.5 - The Power of Google Dorks\t\t*\n*********************************************************\n\nTarget: \u001b[1mtiktok.com\u001b[0m\n\nSorry, no results found.", + "time": "2024-09-03T21:26:17.334Z" + } +}, +{ + "model": "startScan.command", + "pk": 14, + "fields": { + "scan_history": 1, + "activity": null, + "command": "GooFuzz -t facebook.com -d 3 -p 2 -w vulnweb.com -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/gofuzz.txt", + "return_code": 1, + "output": "\n*********************************************************\n* GooFuzz 1.2.5 - The Power of Google Dorks\t\t*\n*********************************************************\n\nTarget: \u001b[1mfacebook.com\u001b[0m\n\n===================================================================\nDirectories and files found from: \u001b[1mvulnweb.com\u001b[0m\n===================================================================\n\nhttps://m.facebook.com/QwertyAloneC/posts/dork-sqli-just-add-inurl-before-themphpcatidcartphpidphpcidindexphpshowtopiccont/142828509609539/\nhttps://m.facebook.com/groups/3201261889909211/posts/3624197390948990/\nhttps://m.facebook.com/groups/3201261889909211/posts/4007010936000965/\nhttps://m.facebook.com/groups/3201261889909211/posts/4007311169304275/\nhttps://m.facebook.com/groups/3201261889909211/posts/4188319924536731/\nhttps://m.facebook.com/groups/3201261889909211/posts/4367877359914319/\nhttps://m.facebook.com/groups/3201261889909211/posts/6293716407330395/\nhttps://m.facebook.com/groups/hackingteam2022/posts/2726773620796174/\nhttps://www.facebook.com/KaliLinuxBrazil/photos/tenha-certeza-de-estar-com-o-tor-instaladoinstala%C3%A7%C3%A3o-tor-rootkaliapt-get-install/972928206082146/\nhttps://www.facebook.com/KaliLinuxBrazil/photos/tenha-certeza-de-estar-com-o-tor-instaladoinstala&C3&A7&C3&A3o-tor-rootkaliapt-get-install/972928206082146/\nhttps://www.facebook.com/groups/3201261889909211/", + "time": "2024-09-03T21:26:18.037Z" + } +}, +{ + "model": "startScan.command", + "pk": 15, + "fields": { + "scan_history": 1, + "activity": null, + "command": "GooFuzz -t twitter.com -d 3 -p 2 -w vulnweb.com -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/gofuzz.txt", + "return_code": 1, + "output": "\n*********************************************************\n* GooFuzz 1.2.5 - The Power of Google Dorks\t\t*\n*********************************************************\n\nTarget: \u001b[1mtwitter.com\u001b[0m\n\n===================================================================\nDirectories and files found from: \u001b[1mvulnweb.com\u001b[0m\n===================================================================\n\nhttps://mobile.twitter.com/MrHades2020\nhttps://twitter.com/0xSwayamm\nhttps://twitter.com/7h3r4bb17\nhttps://twitter.com/SadatTamzit\nhttps://twitter.com/deathpoolx1\nhttps://twitter.com/hunterabubakar\nhttps://twitter.com/kg4409\nhttps://twitter.com/ravidutt04?lang\nhttps://twitter.com/ravidutt04?lang=ca\nhttps://twitter.com/therceman/status/1711473903934054427\nhttps://twitter.com/vishack81", + "time": "2024-09-03T21:26:21.869Z" + } +}, +{ + "model": "startScan.command", + "pk": 16, + "fields": { + "scan_history": 1, + "activity": null, + "command": "GooFuzz -t youtube.com -d 3 -p 2 -w vulnweb.com -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/gofuzz.txt", + "return_code": 1, + "output": "\n*********************************************************\n* GooFuzz 1.2.5 - The Power of Google Dorks\t\t*\n*********************************************************\n\nTarget: \u001b[1myoutube.com\u001b[0m\n\n===================================================================\nDirectories and files found from: \u001b[1mvulnweb.com\u001b[0m\n===================================================================\n\nhttps://m.youtube.com/watch?v\nhttps://m.youtube.com/watch?v=2_lswM1S264\nhttps://m.youtube.com/watch?v=2tJgPyRITGc\nhttps://m.youtube.com/watch?v=6FDKHewTP4A\nhttps://m.youtube.com/watch?v=cEoPdpVUeyU\nhttps://m.youtube.com/watch?v=gHkGuVb9vX8\nhttps://www.youtube.com/watch?v\nhttps://www.youtube.com/watch?v=IbDAHDSlgYg\nhttps://www.youtube.com/watch?v=ZstyFyfS3g4\nhttps://www.youtube.com/watch?v=cEoPdpVUeyU\nhttps://www.youtube.com/watch?v=dabm-7CcHaE\nhttps://www.youtube.com/watch?v=tAxMpoKkvCw", + "time": "2024-09-03T21:26:25.626Z" + } +}, +{ + "model": "startScan.command", + "pk": 17, + "fields": { + "scan_history": 1, + "activity": null, + "command": "GooFuzz -t reddit.com -d 3 -p 2 -w vulnweb.com -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/gofuzz.txt", + "return_code": 1, + "output": "\n*********************************************************\n* GooFuzz 1.2.5 - The Power of Google Dorks\t\t*\n*********************************************************\n\nTarget: \u001b[1mreddit.com\u001b[0m\n\n===================================================================\nDirectories and files found from: \u001b[1mvulnweb.com\u001b[0m\n===================================================================\n\nhttps://www.reddit.com/r/sysadmin/comments/gs031c/how_to_login_to_web_by_submitting_username/", + "time": "2024-09-03T21:26:29.388Z" + } +}, +{ + "model": "startScan.command", + "pk": 18, + "fields": { + "scan_history": 1, + "activity": null, + "command": "GooFuzz -t trello.com -d 3 -p 2 -w vulnweb.com -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/gofuzz.txt", + "return_code": 1, + "output": "\n*********************************************************\n* GooFuzz 1.2.5 - The Power of Google Dorks\t\t*\n*********************************************************\n\nTarget: \u001b[1mtrello.com\u001b[0m\n\nSorry, no results found.", + "time": "2024-09-03T21:26:33.059Z" + } +}, +{ + "model": "startScan.command", + "pk": 19, + "fields": { + "scan_history": 1, + "activity": null, + "command": "GooFuzz -t atlassian.net -d 3 -p 2 -w vulnweb.com -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/gofuzz.txt", + "return_code": 1, + "output": "\n*********************************************************\n* GooFuzz 1.2.5 - The Power of Google Dorks\t\t*\n*********************************************************\n\nTarget: \u001b[1matlassian.net\u001b[0m\n\nSorry, no results found.", + "time": "2024-09-03T21:26:33.737Z" + } +}, +{ + "model": "startScan.command", + "pk": 20, + "fields": { + "scan_history": 1, + "activity": 1, + "command": "tlsx -san -cn -silent -ro -host vulnweb.com | sed -n '/^\\([a-zA-Z0-9]\\([-a-zA-Z0-9]*[a-zA-Z0-9]\\)\\?\\.\\)\\+vulnweb.com$/p' | uniq | sort > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/subdomains_tlsx.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:26:34.437Z" + } +}, +{ + "model": "startScan.command", + "pk": 21, + "fields": { + "scan_history": 1, + "activity": null, + "command": "GooFuzz -t github.com -d 3 -p 2 -w vulnweb.com -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/gofuzz.txt", + "return_code": 1, + "output": "\n*********************************************************\n* GooFuzz 1.2.5 - The Power of Google Dorks\t\t*\n*********************************************************\n\nTarget: \u001b[1mgithub.com\u001b[0m\n\n===================================================================\nDirectories and files found from: \u001b[1mvulnweb.com\u001b[0m\n===================================================================\n\nhttps://github.com/dradis/dradis-acunetix/blob/master/spec/fixtures/files/testphp.vulnweb.com.export.acunetix.xml\nhttps://github.com/dtag-dev-sec/explo/blob/master/examples/SQLI_LOGIN_testphp.vulnweb.com.yaml\nhttps://github.com/dtag-dev-sec/explo/blob/master/examples/SQLI_testphp.vulnweb.com.yaml\nhttps://github.com/yangbh/Hammer/blob/master/output/testphp.vulnweb.com/http_testphp.vulnweb.com", + "time": "2024-09-03T21:26:34.456Z" + } +}, +{ + "model": "startScan.command", + "pk": 22, + "fields": { + "scan_history": 1, + "activity": null, + "command": "GooFuzz -t gitlab.com -d 3 -p 2 -w vulnweb.com -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/gofuzz.txt", + "return_code": 1, + "output": "\n*********************************************************\n* GooFuzz 1.2.5 - The Power of Google Dorks\t\t*\n*********************************************************\n\nTarget: \u001b[1mgitlab.com\u001b[0m\n\nSorry, no results found.", + "time": "2024-09-03T21:26:38.165Z" + } +}, +{ + "model": "startScan.command", + "pk": 23, + "fields": { + "scan_history": 1, + "activity": null, + "command": "GooFuzz -t bitbucket.org -d 3 -p 2 -w vulnweb.com -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/gofuzz.txt", + "return_code": 1, + "output": "\n*********************************************************\n* GooFuzz 1.2.5 - The Power of Google Dorks\t\t*\n*********************************************************\n\nTarget: \u001b[1mbitbucket.org\u001b[0m\n\n===================================================================\nDirectories and files found from: \u001b[1mvulnweb.com\u001b[0m\n===================================================================\n\nhttps://bitbucket.org/snippets/alms/KrG4LL\nhttps://bitbucket.org/snippets/bakueikozo/rex9ar/playstation-classic-uart-login-console\nhttps://bitbucket.org/snippets/especnorthamerica/neb6dq/examples-of-web-controller-rest-api-usage\nhttps://bitbucket.org/snippets/faridani/pRX6r\nhttps://bitbucket.org/snippets/orrp/xeGzXB/interactive-mgg\nhttps://bitbucket.org/snippets/raerose01/5enKR5\nhttps://bitbucket.org/snippets/sglienke/64LG6b/introsort\nhttps://bitbucket.org/snippets/sglienke/6oBqMb\nhttps://bitbucket.org/snippets/suntorytime/rAGXar/wellness-sources-overview\nhttps://bitbucket.org/snippets/wmgodyak/6bXKj", + "time": "2024-09-03T21:26:38.820Z" + } +}, +{ + "model": "startScan.command", + "pk": 24, + "fields": { + "scan_history": 1, + "activity": null, + "command": "GooFuzz -t vulnweb.com -d 3 -p 4 -e env,xml,conf,toml,yml,yaml,cnf,inf,rdp,ora,txt,cfg,ini -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/gofuzz.txt", + "return_code": 1, + "output": "\n*********************************************************\n* GooFuzz 1.2.5 - The Power of Google Dorks\t\t*\n*********************************************************\n\nTarget: \u001b[1mvulnweb.com\u001b[0m\n\nSorry, no results found for \u001b[1menv\u001b[0m.\n\n===================================================================\nExtension: \u001b[1mxml\u001b[0m\n===================================================================\n\nhttp://testphp.vulnweb.com/.idea/workspace.xml\nhttp://testphp.vulnweb.com/crossdomain.xml\n\n\u001b[1m[!]\u001b[0m Oops... Looks like Google has temporarily blocked your IP address.", + "time": "2024-09-03T21:26:42.598Z" + } +}, +{ + "model": "startScan.command", + "pk": 25, + "fields": { + "scan_history": 1, + "activity": null, + "command": "GooFuzz -t vulnweb.com -d 3 -p 1 -w Jenkins -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/gofuzz.txt", + "return_code": 1, + "output": "\n*********************************************************\n* GooFuzz 1.2.5 - The Power of Google Dorks\t\t*\n*********************************************************\n\nTarget: \u001b[1mvulnweb.com\u001b[0m\n\n\u001b[1m[!]\u001b[0m Oops... Looks like Google has temporarily blocked your IP address.", + "time": "2024-09-03T21:26:47.795Z" + } +}, +{ + "model": "startScan.command", + "pk": 26, + "fields": { + "scan_history": 1, + "activity": null, + "command": "GooFuzz -t vulnweb.com -d 3 -p 5 -w /wp-content/,/wp-includes/ -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/gofuzz.txt", + "return_code": 1, + "output": "\n*********************************************************\n* GooFuzz 1.2.5 - The Power of Google Dorks\t\t*\n*********************************************************\n\nTarget: \u001b[1mvulnweb.com\u001b[0m\n\n\u001b[1m[!]\u001b[0m Oops... Looks like Google has temporarily blocked your IP address.", + "time": "2024-09-03T21:26:48.862Z" + } +}, +{ + "model": "startScan.command", + "pk": 27, + "fields": { + "scan_history": 1, + "activity": null, + "command": "GooFuzz -t vulnweb.com -d 3 -p 5 -w PHP Parse error,PHP Warning,PHP Error -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/gofuzz.txt", + "return_code": 1, + "output": "\n*********************************************************\n* GooFuzz 1.2.5 - The Power of Google Dorks\t\t*\n*********************************************************\n\nTarget: \u001b[1mvulnweb.com\u001b[0m\n\n\u001b[1m[!]\u001b[0m Oops... Looks like Google has temporarily blocked your IP address.", + "time": "2024-09-03T21:26:49.546Z" + } +}, +{ + "model": "startScan.command", + "pk": 28, + "fields": { + "scan_history": 1, + "activity": null, + "command": "GooFuzz -t vulnweb.com -d 3 -p 7 -e doc,docx,odt,pdf,rtf,sxw,psw,ppt,pptx,pps,csv -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/gofuzz.txt", + "return_code": 1, + "output": "\n*********************************************************\n* GooFuzz 1.2.5 - The Power of Google Dorks\t\t*\n*********************************************************\n\nTarget: \u001b[1mvulnweb.com\u001b[0m\n\n\u001b[1m[!]\u001b[0m Oops... Looks like Google has temporarily blocked your IP address.", + "time": "2024-09-03T21:26:50.666Z" + } +}, +{ + "model": "startScan.command", + "pk": 29, + "fields": { + "scan_history": 1, + "activity": null, + "command": "GooFuzz -t vulnweb.com -d 3 -p 1 -e sql,db,dbf,mdb -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/gofuzz.txt", + "return_code": 1, + "output": "\n*********************************************************\n* GooFuzz 1.2.5 - The Power of Google Dorks\t\t*\n*********************************************************\n\nTarget: \u001b[1mvulnweb.com\u001b[0m\n\n\u001b[1m[!]\u001b[0m Oops... Looks like Google has temporarily blocked your IP address.", + "time": "2024-09-03T21:26:51.510Z" + } +}, +{ + "model": "startScan.command", + "pk": 30, + "fields": { + "scan_history": 1, + "activity": null, + "command": "GooFuzz -t vulnweb.com -d 3 -p 1 -e git -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/gofuzz.txt", + "return_code": 1, + "output": "\n*********************************************************\n* GooFuzz 1.2.5 - The Power of Google Dorks\t\t*\n*********************************************************\n\nTarget: \u001b[1mvulnweb.com\u001b[0m\n\n\u001b[1m[!]\u001b[0m Oops... Looks like Google has temporarily blocked your IP address.", + "time": "2024-09-03T21:26:52.449Z" + } +}, +{ + "model": "startScan.command", + "pk": 31, + "fields": { + "scan_history": 1, + "activity": 1, + "command": "oneforall --target vulnweb.com run && cut -d',' -f6 /home/rengine/tools/.github/OneForAll/results/vulnweb.com.csv | tail -n +2 > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/subdomains_oneforall.txt && rm -rf /home/rengine/tools/.github/OneForAll/results/vulnweb.com.csv", + "return_code": 0, + "output": "\n\nOneForAll is a powerful subdomain integration tool\u001b[01;33m\n___ _ _\n___ ___ ___| _|___ ___ ___| | | \u001b[01;37m{\u001b[1;31mv0.4.5 #dev\u001b[01;37m}\u001b[01;32m\n| . | | -_| _| . | _| .'| | | \u001b[01;34m\n|___|_|_|___|_| |___|_| |__,|_|_| \u001b[01;37mgit.io/fjHT1\n\n\u001b[1;31mOneForAll is under development, please update before each use!\u001b[0m\n\n[*] Starting OneForAll @ 2024-09-03 21:27:05\n\n21:27:05,902 [INFOR] utils:532 - Checking dependent environment\n21:27:05,902 [INFOR] utils:544 - Checking network environment\n21:27:06,465 [INFOR] utils:555 - Checking for the latest version\n21:27:06,730 [INFOR] utils:579 - The current version v0.4.5 is already the latest version\n21:27:06,731 [INFOR] oneforall:241 - Start running OneForAll\n21:27:06,732 [INFOR] oneforall:246 - Got 1 domains\n21:27:06,763 [INFOR] wildcard:108 - Detecting vulnweb.com use wildcard dns record or not\n21:27:06,881 [ALERT] wildcard:47 - 370500ba.vulnweb.com resolve to: 370500ba.vulnweb.com. IP: {'44.228.249.3'} TTL: 3600\n21:27:06,907 [ALERT] wildcard:47 - 62a5f2cb.vulnweb.com resolve to: 62a5f2cb.vulnweb.com. IP: {'44.228.249.3'} TTL: 3600\n21:27:06,933 [ALERT] wildcard:47 - ee447f63.vulnweb.com resolve to: ee447f63.vulnweb.com. IP: {'44.228.249.3'} TTL: 3600\n21:27:06,934 [INFOR] utils:700 - Attempting to request http://370500ba.vulnweb.com\n21:27:07,318 [ALERT] utils:708 - Error request http://370500ba.vulnweb.com\n21:27:07,318 [INFOR] utils:700 - Attempting to request http://62a5f2cb.vulnweb.com\n21:27:07,721 [ALERT] utils:708 - Error request http://62a5f2cb.vulnweb.com\n21:27:07,722 [INFOR] utils:700 - Attempting to request http://ee447f63.vulnweb.com\n21:27:08,135 [ALERT] utils:708 - Error request http://ee447f63.vulnweb.com\n21:27:08,135 [ALERT] wildcard:121 - The domain vulnweb.com enables wildcard\n21:27:08,135 [INFOR] collect:44 - Start collecting subdomains of vulnweb.com\n21:27:08,208 [INFOR] module:63 - AXFRCheck module took 0.0 seconds found 0 subdomains\n21:27:08,218 [INFOR] module:63 - NSECCheck module took 0.0 seconds found 0 subdomains\n21:27:08,232 [INFOR] module:63 - QueryMX module took 0.0 seconds found 0 subdomains\n21:27:08,232 [INFOR] module:63 - QuerySOA module took 0.0 seconds found 0 subdomains\n21:27:08,233 [INFOR] module:63 - QueryNS module took 0.0 seconds found 0 subdomains\n21:27:08,262 [INFOR] module:63 - QuerySPF module took 0.0 seconds found 0 subdomains\n21:27:08,267 [INFOR] module:63 - QueryTXT module took 0.1 seconds found 0 subdomains\n21:27:08,418 [ERROR] module:129 - HTTPSConnectionPool(host='riddler.io', port=443): Max retries exceeded with url: /search?q=pld%3Avulnweb.com (Caused by NewConnectionError(': Failed to establish a new connection: [Errno -5] No address associated with hostname'))\n21:27:08,419 [INFOR] module:63 - RiddlerQuery module took 0.2 seconds found 0 subdomains\n21:27:08,448 [INFOR] module:63 - CrtshQuery module took 0.3 seconds found 0 subdomains\n21:27:08,537 [ALERT] utils:273 - GET https://www.dnsgrep.cn/subdomain/vulnweb.com 403 - Forbidden 17723\n21:27:08,538 [INFOR] module:63 - DnsgrepQuery module took 0.3 seconds found 0 subdomains\n21:27:08,555 [INFOR] module:63 - SiteDossierQuery module took 0.4 seconds found 4 subdomains\n21:27:08,556 [INFOR] module:63 - CertSpotterQuery module took 0.4 seconds found 0 subdomains\n21:27:08,564 [INFOR] module:63 - ThreatMinerQuery module took 0.4 seconds found 0 subdomains\n21:27:08,646 [INFOR] module:63 - UrlscanQuery module took 0.4 seconds found 8 subdomains\n21:27:08,648 [ALERT] utils:273 - GET https://www.virustotal.com/ui/domains/vulnweb.com/subdomains?limit=40&cursor= 429 - Too Many Requests 181\n21:27:08,648 [ALERT] utils:282 - {'error': {'code': 'RecaptchaRequiredError', 'message': 'Please re-send request with a valid reCAPTCHA response in the \"x-recaptcha-response\" header'}}\n21:27:08,649 [INFOR] module:63 - CSPCheck module took 0.5 seconds found 0 subdomains\n21:27:08,650 [INFOR] module:63 - VirusTotalQuery module took 0.4 seconds found 0 subdomains\n21:27:08,726 [ALERT] utils:273 - GET https://fullhunt.io/api/v1/domain/vulnweb.com/subdomains 401 - UNAUTHORIZED 50\n21:27:08,726 [ALERT] utils:282 - {'message': 'Unauthorized access', 'success': False}\n21:27:08,728 [INFOR] module:63 - FullHuntAPIQuery module took 0.5 seconds found 0 subdomains\n21:27:08,894 [INFOR] module:63 - AnubisQuery module took 0.7 seconds found 12 subdomains\n21:27:08,895 [ALERT] utils:273 - GET https://transparencyreport.google.com/transparencyreport/api/v3/httpsreport/ct/certsearch?include_expired=true&include_subdomains=true&domain=vulnweb.com 404 - Not Found 1611\n21:27:08,897 [INFOR] module:63 - GoogleQuery module took 0.7 seconds found 0 subdomains\n21:27:08,953 [INFOR] module:63 - HackerTargetQuery module took 0.8 seconds found 22 subdomains\n21:27:09,137 [INFOR] module:63 - AlienVaultQuery module took 0.9 seconds found 19 subdomains\n21:27:09,276 [ALERT] utils:273 - GET https://searchdns.netcraft.com/?restriction=site+contains&position=limited&host=%2A.vulnweb.com&from=1 403 - Forbidden 17830\n21:27:09,277 [INFOR] module:63 - NetCraftQuery module took 1.1 seconds found 0 subdomains\n21:27:09,399 [INFOR] module:63 - AskSearch module took 1.2 seconds found 0 subdomains\n21:27:09,427 [INFOR] module:63 - DNSDumpsterQuery module took 1.2 seconds found 11 subdomains\n21:27:09,572 [ALERT] utils:273 - GET https://ce.baidu.com/index/getRelatedSites?site_address=vulnweb.com 500 - Internal Server Error 0\n21:27:09,573 [INFOR] module:63 - CeBaiduQuery module took 1.4 seconds found 0 subdomains\n21:27:09,666 [ALERT] utils:273 - POST https://www.dnsscan.cn/dns.html?keywords=vulnweb.com&page=1 404 - Not Found 206\n21:27:09,668 [INFOR] module:63 - QianXunQuery module took 1.5 seconds found 0 subdomains\n21:27:09,897 [INFOR] module:63 - BingSearch module took 1.7 seconds found 0 subdomains\n21:27:09,985 [INFOR] module:63 - ChinazQuery module took 1.8 seconds found 2 subdomains\n21:27:10,151 [INFOR] module:63 - MySSLQuery module took 2.0 seconds found 0 subdomains\n21:27:10,355 [INFOR] module:63 - RapidDNSQuery module took 2.2 seconds found 281 subdomains\n21:27:10,428 [INFOR] module:63 - SogouSearch module took 2.2 seconds found 5 subdomains\n21:27:10,760 [INFOR] module:63 - IP138Query module took 2.6 seconds found 16 subdomains\n21:27:10,878 [INFOR] module:63 - GiteeSearch module took 2.7 seconds found 0 subdomains\n21:27:11,603 [ERROR] module:129 - HTTPSConnectionPool(host='api.sublist3r.com', port=443): Max retries exceeded with url: /search.php?domain=vulnweb.com (Caused by SSLError(SSLError(1, '[SSL: TLSV1_ALERT_INTERNAL_ERROR] tlsv1 alert internal error (_ssl.c:997)')))\n21:27:11,604 [INFOR] module:63 - Sublist3rQuery module took 3.4 seconds found 0 subdomains\n21:27:11,627 [INFOR] module:63 - YahooSearch module took 3.4 seconds found 0 subdomains\n21:27:11,922 [INFOR] module:63 - RobtexQuery module took 3.7 seconds found 15 subdomains\n21:27:12,070 [INFOR] module:63 - BaiduSearch module took 3.9 seconds found 0 subdomains\n21:27:12,271 [INFOR] module:63 - WzSearch module took 4.1 seconds found 0 subdomains\n21:27:12,901 [INFOR] module:63 - SoSearch module took 4.7 seconds found 1 subdomains\n21:27:12,985 [ALERT] utils:273 - GET https://www.google.com/sorry/index?continue=https://www.google.com/search%3Fq%3Dsite%253A.vulnweb.com%26start%3D1%26num%3D50%26filter%3D0%26btnG%3DSearch%26gbv%3D1%26hl%3Den&hl=en&q=EgRdFhUyGLD-3bYGIjAXsFmbSQzD5Zn_V7wNiBiIu7JIhrukOC5iwq21AZzzrLaQgQb6Du1W_V-vgWEc488yAXJaAUM 429 - Too Many Requests 3385\n21:27:12,987 [INFOR] module:63 - GoogleSearch module took 4.8 seconds found 0 subdomains\n21:27:14,309 [INFOR] module:63 - YandexSearch module took 6.1 seconds found 0 subdomains\n21:27:18,296 [INFOR] module:63 - CertInfo module took 10.1 seconds found 0 subdomains\n21:27:34,346 [INFOR] module:63 - SitemapCheck module took 26.2 seconds found 0 subdomains\n21:27:34,702 [INFOR] module:63 - RobotsCheck module took 26.5 seconds found 0 subdomains\n21:27:34,710 [INFOR] module:63 - CrossDomainCheck module took 26.5 seconds found 0 subdomains\n21:27:34,906 [INFOR] module:63 - BruteSRV module took 0.2 seconds found 0 subdomains\n21:27:34,910 [INFOR] brute:460 - Start running Brute module\n21:27:34,911 [INFOR] brute:410 - Blasting vulnweb.com\n21:27:34,911 [INFOR] utils:174 - /home/rengine/tools/.github/OneForAll/results/temp does not exist, directory will be created\n21:27:34,911 [INFOR] brute:119 - Querying NS records of vulnweb.com\n21:27:34,936 [INFOR] brute:129 - vulnweb.com's authoritative name server is ['ns1.eurodns.com.', 'ns2.eurodns.com.', 'ns3.eurodns.com.', 'ns4.eurodns.com.']\n21:27:34,936 [INFOR] brute:99 - Querying A record from authoritative name server: ['ns1.eurodns.com.', 'ns2.eurodns.com.', 'ns3.eurodns.com.', 'ns4.eurodns.com.']\n21:27:35,027 [INFOR] brute:114 - Authoritative name server A record result: ['199.167.66.107', '104.37.178.107', '199.167.66.108', '104.37.178.108']\n21:27:35,027 [INFOR] wildcard:159 - Collecting wildcard dns record for vulnweb.com\n21:27:35,028 [INFOR] wildcard:128 - Query e5822a54.vulnweb.com 's wildcard dns record in authoritative name server\n21:27:35,042 [INFOR] wildcard:153 - e5822a54.vulnweb.com results on authoritative name server: e5822a54.vulnweb.com. IP: {'44.228.249.3'} TTL: 3600\n21:27:35,043 [INFOR] wildcard:128 - Query 2d14ad45.vulnweb.com 's wildcard dns record in authoritative name server\n21:27:35,058 [INFOR] wildcard:153 - 2d14ad45.vulnweb.com results on authoritative name server: 2d14ad45.vulnweb.com. IP: {'44.228.249.3'} TTL: 3600\n21:27:35,059 [INFOR] brute:347 - Generating dictionary for vulnweb.com\n21:27:35,131 [ALERT] utils:695 - Please check whether sol.vulnweb.com is correct or not\n21:27:35,133 [INFOR] brute:365 - Dictionary size: 95247\n21:27:35,165 [INFOR] brute:441 - Running massdns to brute subdomains\n21:27:39,270 [INFOR] brute:197 - Counting IP cname appear times\n21:27:39,510 [INFOR] brute:238 - Processing result\n21:27:40,095 [ALERT] brute:451 - Brute module takes 5.2 seconds, found 4 subdomains of vulnweb.com\n21:27:40,109 [INFOR] brute:489 - Finished Brute module to brute vulnweb.com\n21:27:40,125 [INFOR] resolve:143 - Start resolving subdomains of vulnweb.com\n21:27:40,144 [INFOR] resolve:166 - Running massdns to resolve subdomains\n21:27:42,189 [INFOR] resolve:104 - Processing resolved results\n21:27:42,192 [INFOR] resolve:172 - Finished resolve subdomains of vulnweb.com\n21:27:42,192 [INFOR] resolve:61 - Saving resolved results\n21:27:42,236 [INFOR] request:258 - Start requesting subdomains of vulnweb.com\n21:27:42,236 [INFOR] request:39 - Port range:[80, 443]\n21:27:42,237 [INFOR] request:56 - Generating request urls\n21:27:42,237 [INFOR] request:217 - Requesting urls in bulk\n\n0it [00:00, ?it/s]\nRequest Progress: 88%|███████████████████████▋ | 7/8 [00:00<00:00, 70.00it/s]\nRequest Progress: 9it [00:13, 1.46s/it]\n21:27:55,384 [INFOR] request:264 - Found that vulnweb.com has 3 alive subdomains\n21:27:55,384 [INFOR] finder:23 - Start Finder module\n\n0it [00:00, ?it/s]21:27:55,387 [INFOR] request:217 - Requesting urls in bulk\n\nRequest Progress: 1it [00:00, 4165.15it/s]\n21:27:55,389 [INFOR] module:63 - Finder module took 0.0 seconds found 1 subdomains\n21:27:55,389 [INFOR] resolve:143 - Start resolving subdomains of vulnweb.com\n21:27:55,409 [INFOR] resolve:166 - Running massdns to resolve subdomains\n21:27:55,436 [INFOR] resolve:104 - Processing resolved results\n\n0it [00:00, ?it/s]21:27:55,436 [INFOR] resolve:172 - Finished resolve subdomains of vulnweb.com\n21:27:55,437 [INFOR] request:258 - Start requesting subdomains of vulnweb.com\n21:27:55,437 [INFOR] request:39 - Port range:[80, 443]\n21:27:55,437 [INFOR] request:56 - Generating request urls\n21:27:55,437 [INFOR] request:217 - Requesting urls in bulk\n\nRequest Progress: 3it [00:13, 4.36s/it]\n21:28:08,520 [INFOR] request:264 - Found that vulnweb.com has 4 alive subdomains\n21:28:08,520 [INFOR] altdns:203 - Start altdns module\n\n0it [00:00, ?it/s]21:28:08,529 [INFOR] altdns:210 - The altdns module generated 0 new subdomains\n21:28:08,529 [INFOR] resolve:143 - Start resolving subdomains of vulnweb.com\n21:28:08,529 [INFOR] request:258 - Start requesting subdomains of vulnweb.com\n21:28:08,530 [INFOR] request:39 - Port range:[80, 443]\n21:28:08,530 [INFOR] request:56 - Generating request urls\n21:28:08,530 [INFOR] request:217 - Requesting urls in bulk\n\nRequest Progress: 1it [00:00, 391.52it/s]\n21:28:08,534 [INFOR] request:264 - Found that vulnweb.com has 4 alive subdomains\n21:28:08,877 [ALERT] export:66 - The subdomain result for vulnweb.com: /home/rengine/tools/.github/OneForAll/results/vulnweb.com.csv\n21:28:08,878 [INFOR] oneforall:255 - Finished OneForAll", + "time": "2024-09-03T21:27:04.860Z" + } +}, +{ + "model": "startScan.command", + "pk": 32, + "fields": { + "scan_history": 1, + "activity": 1, + "command": "netlas search -d domain -i domain domain:\"*.vulnweb.com\" -f json | grep -oE '([a-zA-Z0-9]([-a-zA-Z0-9]*[a-zA-Z0-9])?\\.)+vulnweb.com' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/subdomains_netlas.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:28:08.941Z" + } +}, +{ + "model": "startScan.command", + "pk": 33, + "fields": { + "scan_history": 1, + "activity": 1, + "command": "cat /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/subdomains_*.txt > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/#1_subdomain_discovery.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:28:09.587Z" + } +}, +{ + "model": "startScan.command", + "pk": 34, + "fields": { + "scan_history": 1, + "activity": 1, + "command": "sort -u /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/#1_subdomain_discovery.txt -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/#1_subdomain_discovery.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:28:09.592Z" + } +}, +{ + "model": "startScan.command", + "pk": 35, + "fields": { + "scan_history": 1, + "activity": 3, + "command": "httpx -cl -ct -rt -location -td -websocket -cname -asn -cdn -probe -random-agent -t 30 -json -l /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/httpx_input.txt -silent", + "return_code": 0, + "output": "{\"timestamp\":\"2024-09-03T21:28:10.479003156Z\",\"url\":\"http://localhost.vulnweb.com\",\"input\":\"localhost.vulnweb.com\",\"error\":\"connection refused\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.770588207Z\",\"url\":\"http://edu-rost.ruwww.vulnweb.com\",\"input\":\"edu-rost.ruwww.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.770698494Z\",\"url\":\"http://test.vulnweb.com\",\"input\":\"test.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.772639921Z\",\"url\":\"http://tetphp.vulnweb.com\",\"input\":\"tetphp.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.773795126Z\",\"url\":\"http://blogger.com.vulnweb.com\",\"input\":\"blogger.com.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.774028924Z\",\"url\":\"http://testaps.vulnweb.com\",\"input\":\"testaps.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.774314729Z\",\"url\":\"http://test.php.vulnweb.com\",\"input\":\"test.php.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.778213954Z\",\"url\":\"http://u003erest.vulnweb.com\",\"input\":\"u003erest.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.779635989Z\",\"url\":\"http://u003etestasp.vulnweb.com\",\"input\":\"u003etestasp.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.779901386Z\",\"url\":\"http://5burl-3dhttp-3a-2f-2fwww.vulnweb.com\",\"input\":\"5burl-3dhttp-3a-2f-2fwww.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.782169265Z\",\"url\":\"http://testap.vulnweb.com\",\"input\":\"testap.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.783045016Z\",\"url\":\"http://testpphp.vulnweb.com\",\"input\":\"testpphp.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.783385584Z\",\"port\":\"80\",\"url\":\"http://testphp.vulnweb.com\",\"input\":\"testphp.vulnweb.com\",\"title\":\"Home of Acunetix Art\",\"scheme\":\"http\",\"webserver\":\"nginx/1.19.0\",\"content_type\":\"text/html\",\"method\":\"GET\",\"host\":\"44.228.249.3\",\"path\":\"/\",\"time\":\"345.110429ms\",\"a\":[\"44.228.249.3\"],\"tech\":[\"DreamWeaver\",\"Nginx:1.19.0\",\"PHP:5.6.40\",\"Ubuntu\"],\"words\":514,\"lines\":110,\"status_code\":200,\"content_length\":4958,\"failed\":false,\"knowledgebase\":{\"PageType\":\"nonerror\",\"pHash\":0}}\n{\"timestamp\":\"2024-09-03T21:28:20.783891311Z\",\"port\":\"80\",\"url\":\"http://testhtml5.vulnweb.com\",\"input\":\"testhtml5.vulnweb.com\",\"title\":\"SecurityTweets - HTML5 test website for Acunetix Web Vulnerability Scanner\",\"scheme\":\"http\",\"webserver\":\"nginx/1.19.0\",\"content_type\":\"text/html\",\"method\":\"GET\",\"host\":\"44.228.249.3\",\"path\":\"/\",\"time\":\"342.381867ms\",\"a\":[\"44.228.249.3\"],\"tech\":[\"AngularJS\",\"Nginx:1.19.0\"],\"words\":1483,\"lines\":164,\"status_code\":200,\"content_length\":6940,\"failed\":false,\"knowledgebase\":{\"PageType\":\"nonerror\",\"pHash\":0}}\n{\"timestamp\":\"2024-09-03T21:28:20.79672208Z\",\"url\":\"http://testapsnet.vulnweb.com\",\"input\":\"testapsnet.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.797297117Z\",\"url\":\"http://antivirus1.vulnweb.com\",\"input\":\"antivirus1.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.797526076Z\",\"url\":\"http://testaspx.vulnweb.com\",\"input\":\"testaspx.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.798569381Z\",\"port\":\"80\",\"url\":\"http://testasp.vulnweb.com\",\"input\":\"testasp.vulnweb.com\",\"title\":\"acuforum forums\",\"scheme\":\"http\",\"webserver\":\"Microsoft-IIS/8.5\",\"content_type\":\"text/html\",\"method\":\"GET\",\"host\":\"44.238.29.244\",\"path\":\"/\",\"time\":\"361.175397ms\",\"a\":[\"44.238.29.244\"],\"tech\":[\"DreamWeaver\",\"IIS:8.5\",\"Microsoft ASP.NET\",\"Windows Server\"],\"words\":328,\"lines\":46,\"status_code\":200,\"content_length\":3537,\"failed\":false,\"knowledgebase\":{\"PageType\":\"nonerror\",\"pHash\":0}}\n{\"timestamp\":\"2024-09-03T21:28:20.801057773Z\",\"url\":\"http://httptestaspnet.vulnweb.com\",\"input\":\"httptestaspnet.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.805003305Z\",\"url\":\"http://ttestphp.vulnweb.com\",\"input\":\"ttestphp.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.805102451Z\",\"url\":\"http://2f-2fwww.vulnweb.com\",\"input\":\"2f-2fwww.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.806546145Z\",\"url\":\"http://odincovo.vulnweb.com\",\"input\":\"odincovo.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.808176239Z\",\"url\":\"http://2fwww.vulnweb.com\",\"input\":\"2fwww.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.808798495Z\",\"url\":\"http://7ctestasp.vulnweb.com\",\"input\":\"7ctestasp.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.80931386Z\",\"url\":\"http://estphp.vulnweb.com\",\"input\":\"estphp.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.812391837Z\",\"url\":\"http://restasp.vulnweb.com\",\"input\":\"restasp.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.814600876Z\",\"url\":\"http://2ftestasp.vulnweb.com\",\"input\":\"2ftestasp.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.822120875Z\",\"url\":\"http://virus.vulnweb.com\",\"input\":\"virus.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.83703144Z\",\"url\":\"http://edu-rost.rutestasp.vulnweb.com\",\"input\":\"edu-rost.rutestasp.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.856906486Z\",\"port\":\"80\",\"url\":\"http://rest.vulnweb.com\",\"input\":\"rest.vulnweb.com\",\"title\":\"Acunetix Vulnerable REST API\",\"scheme\":\"http\",\"webserver\":\"Apache/2.4.25 (Debian)\",\"content_type\":\"text/html\",\"method\":\"GET\",\"host\":\"35.81.188.86\",\"path\":\"/\",\"time\":\"422.205408ms\",\"a\":[\"35.81.188.86\"],\"tech\":[\"Apache HTTP Server:2.4.25\",\"Debian\",\"PHP:7.1.26\"],\"words\":1397,\"lines\":138,\"status_code\":200,\"content_length\":3555,\"failed\":false,\"knowledgebase\":{\"PageType\":\"nonerror\",\"pHash\":0}}\n{\"timestamp\":\"2024-09-03T21:28:20.992940938Z\",\"port\":\"80\",\"url\":\"http://testaspnet.vulnweb.com\",\"input\":\"testaspnet.vulnweb.com\",\"title\":\"acublog news\",\"scheme\":\"http\",\"webserver\":\"Microsoft-IIS/8.5\",\"content_type\":\"text/html\",\"method\":\"GET\",\"host\":\"44.238.29.244\",\"path\":\"/\",\"time\":\"535.234499ms\",\"a\":[\"44.238.29.244\"],\"tech\":[\"IIS:8.5\",\"Microsoft ASP.NET:2.0.50727\",\"Microsoft Visual Studio\",\"Windows Server\"],\"words\":774,\"lines\":89,\"status_code\":200,\"content_length\":14082,\"failed\":false,\"knowledgebase\":{\"PageType\":\"other\",\"pHash\":0}}\n{\"timestamp\":\"2024-09-03T21:28:21.214887749Z\",\"url\":\"http://www.testphp.vulnweb.com\",\"input\":\"www.testphp.vulnweb.com\",\"error\":\"no address found for host\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:21.224643226Z\",\"url\":\"http://www.testasp.vulnweb.com\",\"input\":\"www.testasp.vulnweb.com\",\"error\":\"no address found for host\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:31.115507511Z\",\"url\":\"http://www.test.php.vulnweb.com\",\"input\":\"www.test.php.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:31.121278393Z\",\"url\":\"http://www.virus.vulnweb.com\",\"input\":\"www.virus.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:31.13080372Z\",\"port\":\"80\",\"url\":\"http://vulnweb.com\",\"input\":\"vulnweb.com\",\"title\":\"Acunetix Web Vulnerability Scanner - Test websites\",\"scheme\":\"http\",\"webserver\":\"nginx/1.19.0\",\"content_type\":\"text/html\",\"method\":\"GET\",\"host\":\"44.228.249.3\",\"path\":\"/\",\"time\":\"352.853777ms\",\"a\":[\"44.228.249.3\"],\"tech\":[\"Nginx:1.19.0\"],\"words\":482,\"lines\":74,\"status_code\":200,\"content_length\":4018,\"failed\":false,\"knowledgebase\":{\"PageType\":\"nonerror\",\"pHash\":0}}\n{\"timestamp\":\"2024-09-03T21:28:31.13762029Z\",\"port\":\"80\",\"url\":\"http://www.vulnweb.com\",\"input\":\"www.vulnweb.com\",\"title\":\"Acunetix Web Vulnerability Scanner - Test websites\",\"scheme\":\"http\",\"webserver\":\"nginx/1.19.0\",\"content_type\":\"text/html\",\"method\":\"GET\",\"host\":\"44.228.249.3\",\"path\":\"/\",\"time\":\"352.635869ms\",\"a\":[\"44.228.249.3\"],\"tech\":[\"Nginx:1.19.0\"],\"words\":482,\"lines\":74,\"status_code\":200,\"content_length\":4018,\"failed\":false,\"knowledgebase\":{\"PageType\":\"nonerror\",\"pHash\":0}}\n{\"timestamp\":\"2024-09-03T21:28:31.142479444Z\",\"url\":\"http://viruswall.vulnweb.com\",\"input\":\"viruswall.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n", + "time": "2024-09-03T21:28:09.749Z" + } +}, +{ + "model": "startScan.command", + "pk": 36, + "fields": { + "scan_history": null, + "activity": null, + "command": "geoiplookup 44.238.29.244", + "return_code": null, + "output": null, + "time": "2024-09-03T21:28:20.894Z" + } +}, +{ + "model": "startScan.command", + "pk": 37, + "fields": { + "scan_history": null, + "activity": null, + "command": "geoiplookup 35.81.188.86", + "return_code": null, + "output": null, + "time": "2024-09-03T21:28:20.945Z" + } +}, +{ + "model": "startScan.command", + "pk": 38, + "fields": { + "scan_history": 1, + "activity": 3, + "command": "rm /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/httpx_input.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:28:31.181Z" + } +}, +{ + "model": "startScan.command", + "pk": 39, + "fields": { + "scan_history": 1, + "activity": 4, + "command": "naabu -json -exclude-cdn -list /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/input_subdomains_port_scan.txt -top-ports 100 -c 30 -rate 150 -timeout 5000 -silent", + "return_code": 0, + "output": "\n__\n___ ___ ___ _/ / __ __\n/ _ \\/ _ \\/ _ \\/ _ \\/ // /\n/_//_/\\_,_/\\_,_/_.__/\\_,_/\n\nprojectdiscovery.io\n\n[INF] Current naabu version 2.3.0 (outdated)\n{\"host\":\"rest.vulnweb.com\",\"ip\":\"35.81.188.86\",\"timestamp\":\"2024-09-03T21:28:34.852197905Z\",\"port\":8080,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"testasp.vulnweb.com\",\"ip\":\"44.238.29.244\",\"timestamp\":\"2024-09-03T21:28:38.833012077Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"testaspnet.vulnweb.com\",\"ip\":\"44.238.29.244\",\"timestamp\":\"2024-09-03T21:28:38.833058584Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"rest.vulnweb.com\",\"ip\":\"35.81.188.86\",\"timestamp\":\"2024-09-03T21:28:39.816853087Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"2f-2fwww.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846295137Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"2ftestasp.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846355841Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"5burl-3dhttp-3a-2f-2fwww.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846373614Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"7ctestasp.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846384815Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"antivirus1.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846391618Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"blogger.com.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846396848Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"edu-rost.rutestasp.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846400745Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"edu-rost.ruwww.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846405213Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"estphp.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846411675Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"httptestaspnet.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846415963Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"odincovo.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846419801Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"restasp.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846423748Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"test.php.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846428026Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"test.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846438285Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"testap.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846442483Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"testaps.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.84644639Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"testapsnet.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846450137Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"testaspx.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846456559Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"testhtml5.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846461439Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"testphp.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846469183Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"testpphp.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846474944Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"tetphp.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846479302Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"ttestphp.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.84648337Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"u003erest.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846487688Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"u003etestasp.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846492607Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"virus.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.8464998Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"viruswall.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846504209Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"www.test.php.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846508697Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"www.virus.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846512795Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n", + "time": "2024-09-03T21:28:31.494Z" + } +}, +{ + "model": "startScan.command", + "pk": 40, + "fields": { + "scan_history": 1, + "activity": null, + "command": "httpx -cl -ct -rt -location -td -websocket -cname -asn -cdn -probe -random-agent -t 1 -json -u rest.vulnweb.com:8080 -silent", + "return_code": 0, + "output": "{\"timestamp\":\"2024-09-03T21:28:36.659376329Z\",\"port\":\"8080\",\"url\":\"http://rest.vulnweb.com:8080\",\"input\":\"rest.vulnweb.com:8080\",\"scheme\":\"http\",\"webserver\":\"Apache/2.4.25 (Debian)\",\"content_type\":\"application/json\",\"method\":\"GET\",\"host\":\"35.81.188.86\",\"path\":\"/\",\"time\":\"353.948066ms\",\"a\":[\"35.81.188.86\"],\"tech\":[\"Apache HTTP Server:2.4.25\",\"Debian\",\"PHP:7.1.26\"],\"words\":4,\"lines\":1,\"status_code\":200,\"content_length\":36,\"failed\":false,\"knowledgebase\":{\"PageType\":\"error\",\"pHash\":0}}\n", + "time": "2024-09-03T21:28:34.882Z" + } +}, +{ + "model": "startScan.command", + "pk": 41, + "fields": { + "scan_history": 1, + "activity": null, + "command": "rm /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/httpx_input.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:28:36.698Z" + } +}, +{ + "model": "startScan.command", + "pk": 42, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://rest.vulnweb.com\" | gospider --js -d 2 --sitemap --robots -w -r -a -t 30 --no-redirect | grep -Eo 'https?://rest\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_gospider_rest.vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:11.700Z" + } +}, +{ + "model": "startScan.command", + "pk": 43, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://rest.vulnweb.com\" | hakrawler -subs -u -t 30 -dr | grep -Eo 'https?://rest\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_hakrawler_rest.vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:11.724Z" + } +}, +{ + "model": "startScan.command", + "pk": 44, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://rest.vulnweb.com\" | waybackurls | grep -Eo 'https?://rest\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_waybackurls_rest.vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:11.740Z" + } +}, +{ + "model": "startScan.command", + "pk": 45, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://rest.vulnweb.com\" | katana -silent -jc -kf all -d 3 -fs rdn -c 30 -dr | grep -Eo 'https?://rest\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_katana_rest.vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:11.756Z" + } +}, +{ + "model": "startScan.command", + "pk": 46, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://rest.vulnweb.com\" | gau --threads 30 | grep -Eo 'https?://rest\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_gau_rest.vulnweb.com.txt", + "return_code": 1, + "output": "", + "time": "2024-09-03T21:29:11.771Z" + } +}, +{ + "model": "startScan.command", + "pk": 47, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://testasp.vulnweb.com\" | gospider --js -d 2 --sitemap --robots -w -r -a -t 30 --no-redirect | grep -Eo 'https?://testasp\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_gospider_testasp.vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:11.789Z" + } +}, +{ + "model": "startScan.command", + "pk": 48, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://testasp.vulnweb.com\" | hakrawler -subs -u -t 30 -dr | grep -Eo 'https?://testasp\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_hakrawler_testasp.vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:11.807Z" + } +}, +{ + "model": "startScan.command", + "pk": 49, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://testasp.vulnweb.com\" | waybackurls | grep -Eo 'https?://testasp\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_waybackurls_testasp.vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:11.822Z" + } +}, +{ + "model": "startScan.command", + "pk": 50, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://testasp.vulnweb.com\" | katana -silent -jc -kf all -d 3 -fs rdn -c 30 -dr | grep -Eo 'https?://testasp\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_katana_testasp.vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:11.836Z" + } +}, +{ + "model": "startScan.command", + "pk": 51, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://testasp.vulnweb.com\" | gau --threads 30 | grep -Eo 'https?://testasp\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_gau_testasp.vulnweb.com.txt", + "return_code": 1, + "output": "", + "time": "2024-09-03T21:29:11.849Z" + } +}, +{ + "model": "startScan.command", + "pk": 52, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://testaspnet.vulnweb.com\" | gospider --js -d 2 --sitemap --robots -w -r -a -t 30 --no-redirect | grep -Eo 'https?://testaspnet\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_gospider_testaspnet.vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:11.863Z" + } +}, +{ + "model": "startScan.command", + "pk": 53, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://testaspnet.vulnweb.com\" | hakrawler -subs -u -t 30 -dr | grep -Eo 'https?://testaspnet\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_hakrawler_testaspnet.vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:11.878Z" + } +}, +{ + "model": "startScan.command", + "pk": 54, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://testaspnet.vulnweb.com\" | waybackurls | grep -Eo 'https?://testaspnet\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_waybackurls_testaspnet.vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:11.892Z" + } +}, +{ + "model": "startScan.command", + "pk": 55, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://testaspnet.vulnweb.com\" | katana -silent -jc -kf all -d 3 -fs rdn -c 30 -dr | grep -Eo 'https?://testaspnet\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_katana_testaspnet.vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:11.906Z" + } +}, +{ + "model": "startScan.command", + "pk": 56, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://testaspnet.vulnweb.com\" | gau --threads 30 | grep -Eo 'https?://testaspnet\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_gau_testaspnet.vulnweb.com.txt", + "return_code": 1, + "output": "", + "time": "2024-09-03T21:29:11.921Z" + } +}, +{ + "model": "startScan.command", + "pk": 57, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://testhtml5.vulnweb.com\" | gospider --js -d 2 --sitemap --robots -w -r -a -t 30 --no-redirect | grep -Eo 'https?://testhtml5\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_gospider_testhtml5.vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:11.935Z" + } +}, +{ + "model": "startScan.command", + "pk": 58, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://testhtml5.vulnweb.com\" | hakrawler -subs -u -t 30 -dr | grep -Eo 'https?://testhtml5\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_hakrawler_testhtml5.vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:11.952Z" + } +}, +{ + "model": "startScan.command", + "pk": 59, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://testhtml5.vulnweb.com\" | waybackurls | grep -Eo 'https?://testhtml5\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_waybackurls_testhtml5.vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:11.966Z" + } +}, +{ + "model": "startScan.command", + "pk": 60, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://testhtml5.vulnweb.com\" | katana -silent -jc -kf all -d 3 -fs rdn -c 30 -dr | grep -Eo 'https?://testhtml5\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_katana_testhtml5.vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:11.981Z" + } +}, +{ + "model": "startScan.command", + "pk": 61, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://testhtml5.vulnweb.com\" | gau --threads 30 | grep -Eo 'https?://testhtml5\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_gau_testhtml5.vulnweb.com.txt", + "return_code": 1, + "output": "", + "time": "2024-09-03T21:29:11.995Z" + } +}, +{ + "model": "startScan.command", + "pk": 62, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://testphp.vulnweb.com\" | gospider --js -d 2 --sitemap --robots -w -r -a -t 30 --no-redirect | grep -Eo 'https?://testphp\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_gospider_testphp.vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:12.011Z" + } +}, +{ + "model": "startScan.command", + "pk": 63, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://testphp.vulnweb.com\" | hakrawler -subs -u -t 30 -dr | grep -Eo 'https?://testphp\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_hakrawler_testphp.vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:12.028Z" + } +}, +{ + "model": "startScan.command", + "pk": 64, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://testphp.vulnweb.com\" | waybackurls | grep -Eo 'https?://testphp\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_waybackurls_testphp.vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:12.042Z" + } +}, +{ + "model": "startScan.command", + "pk": 65, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://testphp.vulnweb.com\" | katana -silent -jc -kf all -d 3 -fs rdn -c 30 -dr | grep -Eo 'https?://testphp\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_katana_testphp.vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:12.057Z" + } +}, +{ + "model": "startScan.command", + "pk": 66, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://testphp.vulnweb.com\" | gau --threads 30 | grep -Eo 'https?://testphp\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_gau_testphp.vulnweb.com.txt", + "return_code": 1, + "output": "", + "time": "2024-09-03T21:29:12.072Z" + } +}, +{ + "model": "startScan.command", + "pk": 67, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://vulnweb.com\" | gospider --js -d 2 --sitemap --robots -w -r -a -t 30 --no-redirect | grep -Eo 'https?://vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_gospider_vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:12.087Z" + } +}, +{ + "model": "startScan.command", + "pk": 68, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://vulnweb.com\" | hakrawler -subs -u -t 30 -dr | grep -Eo 'https?://vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_hakrawler_vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:12.100Z" + } +}, +{ + "model": "startScan.command", + "pk": 69, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://vulnweb.com\" | waybackurls | grep -Eo 'https?://vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_waybackurls_vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:12.113Z" + } +}, +{ + "model": "startScan.command", + "pk": 70, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://vulnweb.com\" | katana -silent -jc -kf all -d 3 -fs rdn -c 30 -dr | grep -Eo 'https?://vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_katana_vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:12.128Z" + } +}, +{ + "model": "startScan.command", + "pk": 71, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://vulnweb.com\" | gau --threads 30 | grep -Eo 'https?://vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_gau_vulnweb.com.txt", + "return_code": 1, + "output": "", + "time": "2024-09-03T21:29:12.143Z" + } +}, +{ + "model": "startScan.command", + "pk": 72, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://www.vulnweb.com\" | gospider --js -d 2 --sitemap --robots -w -r -a -t 30 --no-redirect | grep -Eo 'https?://www\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_gospider_www.vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:12.159Z" + } +}, +{ + "model": "startScan.command", + "pk": 73, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://www.vulnweb.com\" | hakrawler -subs -u -t 30 -dr | grep -Eo 'https?://www\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_hakrawler_www.vulnweb.com.txt", + "return_code": 1, + "output": "", + "time": "2024-09-03T21:29:12.176Z" + } +}, +{ + "model": "startScan.command", + "pk": 74, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://www.vulnweb.com\" | waybackurls | grep -Eo 'https?://www\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_waybackurls_www.vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:12.191Z" + } +}, +{ + "model": "startScan.command", + "pk": 75, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://www.vulnweb.com\" | katana -silent -jc -kf all -d 3 -fs rdn -c 30 -dr | grep -Eo 'https?://www\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_katana_www.vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:12.207Z" + } +}, +{ + "model": "startScan.command", + "pk": 76, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://www.vulnweb.com\" | gau --threads 30 | grep -Eo 'https?://www\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_gau_www.vulnweb.com.txt", + "return_code": 1, + "output": "", + "time": "2024-09-03T21:29:12.222Z" + } +}, +{ + "model": "startScan.command", + "pk": 77, + "fields": { + "scan_history": null, + "activity": null, + "command": "tlsx -san -cn -silent -ro -host vulnweb.com -o /tmp/ip_domain_tlsx.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:33:01.109Z" + } +}, +{ + "model": "startScan.command", + "pk": 78, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "cat /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_* > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/#1_fetch_url.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:35:38.599Z" + } +}, +{ + "model": "startScan.command", + "pk": 79, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "cat /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/input_endpoints_fetch_url.txt >> /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/#1_fetch_url.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:35:38.631Z" + } +}, +{ + "model": "startScan.command", + "pk": 80, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "sort -u /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/#1_fetch_url.txt -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/#1_fetch_url.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:35:38.659Z" + } +}, +{ + "model": "startScan.command", + "pk": 81, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "cat /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/#1_fetch_url.txt | grep -Eiv \"\\.(png|jpg|jpeg|gif|mp4|mpeg|mp3).*\" > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_filtered.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:35:38.696Z" + } +}, +{ + "model": "startScan.command", + "pk": 82, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "mv /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_filtered.txt /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/#1_fetch_url.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:35:38.727Z" + } +}, +{ + "model": "startScan.command", + "pk": 106, + "fields": { + "scan_history": 1, + "activity": 14, + "command": "nuclei -j -irr -l /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_unfurled.txt -c 50 -retries 1 -rl 150 -timeout 5 -silent -t /home/rengine/nuclei-templates -severity medium", + "return_code": 0, + "output": null, + "time": "2024-09-03T22:02:49.873Z" + } +}, +{ + "model": "startScan.command", + "pk": 117, + "fields": { + "scan_history": 1, + "activity": null, + "command": "httpx -cl -ct -rt -location -td -websocket -cname -asn -cdn -probe -random-agent -t 7 -json -l /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/httpx_input.txt -silent", + "return_code": 0, + "output": "{\"timestamp\":\"2024-09-03T22:13:05.027538389Z\",\"port\":\"80\",\"url\":\"http://vulnweb.com\",\"input\":\"http://vulnweb.com\",\"title\":\"Acunetix Web Vulnerability Scanner - Test websites\",\"scheme\":\"http\",\"webserver\":\"nginx/1.19.0\",\"content_type\":\"text/html\",\"method\":\"GET\",\"host\":\"44.228.249.3\",\"path\":\"/\",\"time\":\"341.066989ms\",\"a\":[\"44.228.249.3\"],\"tech\":[\"Nginx:1.19.0\"],\"words\":482,\"lines\":74,\"status_code\":200,\"content_length\":4018,\"failed\":false,\"knowledgebase\":{\"PageType\":\"nonerror\",\"pHash\":0}}\n{\"timestamp\":\"2024-09-03T22:13:05.034995861Z\",\"port\":\"80\",\"url\":\"http://www.vulnweb.com\",\"input\":\"http://www.vulnweb.com\",\"title\":\"Acunetix Web Vulnerability Scanner - Test websites\",\"scheme\":\"http\",\"webserver\":\"nginx/1.19.0\",\"content_type\":\"text/html\",\"method\":\"GET\",\"host\":\"44.228.249.3\",\"path\":\"/\",\"time\":\"347.273586ms\",\"a\":[\"44.228.249.3\"],\"tech\":[\"Nginx:1.19.0\"],\"words\":482,\"lines\":74,\"status_code\":200,\"content_length\":4018,\"failed\":false,\"knowledgebase\":{\"PageType\":\"nonerror\",\"pHash\":0}}\n{\"timestamp\":\"2024-09-03T22:13:05.035316875Z\",\"port\":\"80\",\"url\":\"http://testasp.vulnweb.com\",\"input\":\"http://testasp.vulnweb.com\",\"title\":\"acuforum forums\",\"scheme\":\"http\",\"webserver\":\"Microsoft-IIS/8.5\",\"content_type\":\"text/html\",\"method\":\"GET\",\"host\":\"44.238.29.244\",\"path\":\"/\",\"time\":\"347.420538ms\",\"a\":[\"44.238.29.244\"],\"tech\":[\"DreamWeaver\",\"IIS:8.5\",\"Microsoft ASP.NET\",\"Windows Server\"],\"words\":328,\"lines\":46,\"status_code\":200,\"content_length\":3538,\"failed\":false,\"knowledgebase\":{\"PageType\":\"nonerror\",\"pHash\":0}}\n{\"timestamp\":\"2024-09-03T22:13:05.036463189Z\",\"port\":\"80\",\"url\":\"http://testhtml5.vulnweb.com\",\"input\":\"http://testhtml5.vulnweb.com\",\"title\":\"SecurityTweets - HTML5 test website for Acunetix Web Vulnerability Scanner\",\"scheme\":\"http\",\"webserver\":\"nginx/1.19.0\",\"content_type\":\"text/html\",\"method\":\"GET\",\"host\":\"44.228.249.3\",\"path\":\"/\",\"time\":\"342.066622ms\",\"a\":[\"44.228.249.3\"],\"tech\":[\"AngularJS\",\"Nginx:1.19.0\"],\"words\":1483,\"lines\":164,\"status_code\":200,\"content_length\":6940,\"failed\":false,\"knowledgebase\":{\"PageType\":\"nonerror\",\"pHash\":0}}\n{\"timestamp\":\"2024-09-03T22:13:05.053531252Z\",\"port\":\"80\",\"url\":\"http://rest.vulnweb.com\",\"input\":\"http://rest.vulnweb.com\",\"title\":\"Acunetix Vulnerable REST API\",\"scheme\":\"http\",\"webserver\":\"Apache/2.4.25 (Debian)\",\"content_type\":\"text/html\",\"method\":\"GET\",\"host\":\"35.81.188.86\",\"path\":\"/\",\"time\":\"367.185091ms\",\"a\":[\"35.81.188.86\"],\"tech\":[\"Apache HTTP Server:2.4.25\",\"Debian\",\"PHP:7.1.26\"],\"words\":1397,\"lines\":138,\"status_code\":200,\"content_length\":3555,\"failed\":false,\"knowledgebase\":{\"PageType\":\"nonerror\",\"pHash\":0}}\n{\"timestamp\":\"2024-09-03T22:13:05.066012087Z\",\"port\":\"80\",\"url\":\"http://testphp.vulnweb.com\",\"input\":\"http://testphp.vulnweb.com\",\"title\":\"Home of Acunetix Art\",\"scheme\":\"http\",\"webserver\":\"nginx/1.19.0\",\"content_type\":\"text/html\",\"method\":\"GET\",\"host\":\"44.228.249.3\",\"path\":\"/\",\"time\":\"374.757261ms\",\"a\":[\"44.228.249.3\"],\"tech\":[\"DreamWeaver\",\"Nginx:1.19.0\",\"PHP:5.6.40\",\"Ubuntu\"],\"words\":514,\"lines\":110,\"status_code\":200,\"content_length\":4958,\"failed\":false,\"knowledgebase\":{\"PageType\":\"nonerror\",\"pHash\":0}}\n{\"timestamp\":\"2024-09-03T22:13:05.228722074Z\",\"port\":\"80\",\"url\":\"http://testaspnet.vulnweb.com\",\"input\":\"http://testaspnet.vulnweb.com\",\"title\":\"acublog news\",\"scheme\":\"http\",\"webserver\":\"Microsoft-IIS/8.5\",\"content_type\":\"text/html\",\"method\":\"GET\",\"host\":\"44.238.29.244\",\"path\":\"/\",\"time\":\"513.247119ms\",\"a\":[\"44.238.29.244\"],\"tech\":[\"IIS:8.5\",\"Microsoft ASP.NET:2.0.50727\",\"Microsoft Visual Studio\",\"Windows Server\"],\"words\":774,\"lines\":89,\"status_code\":200,\"content_length\":14081,\"failed\":false,\"knowledgebase\":{\"PageType\":\"other\",\"pHash\":0}}\n", + "time": "2024-09-03T22:12:53.901Z" + } +}, +{ + "model": "startScan.command", + "pk": 118, + "fields": { + "scan_history": 1, + "activity": null, + "command": "rm /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/httpx_input.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T22:13:05.280Z" + } +}, +{ + "model": "startScan.command", + "pk": 119, + "fields": { + "scan_history": null, + "activity": null, + "command": "gf -list", + "return_code": 0, + "output": "\naws-keys\nbase64\ncors\ndebug-pages\ndebug_logic\nfirebase\nfw\ngo-functions\nhttp-auth\nidor\nimg-traversal\ninterestingEXT\ninterestingparams\ninterestingsubs\nip\njson-sec\njsvar\nlfi\nmeg-headers\nphp-curl\nphp-errors\nphp-serialized\nphp-sinks\nphp-sources\nrce\nredirect\ns3-buckets\nsec\nservers\nsqli\nssrf\nssti\nstrings\ntakeovers\nupload-fields\nurls\nxss", + "time": "2024-09-03T23:36:10.771Z" + } +}, +{ + "model": "startScan.technology", + "pk": 1, + "fields": { + "name": "DreamWeaver" + } +}, +{ + "model": "startScan.technology", + "pk": 2, + "fields": { + "name": "Nginx:1.19.0" + } +}, +{ + "model": "startScan.technology", + "pk": 3, + "fields": { + "name": "PHP:5.6.40" + } +}, +{ + "model": "startScan.technology", + "pk": 4, + "fields": { + "name": "Ubuntu" + } +}, +{ + "model": "startScan.technology", + "pk": 5, + "fields": { + "name": "AngularJS" + } +}, +{ + "model": "startScan.technology", + "pk": 6, + "fields": { + "name": "IIS:8.5" + } +}, +{ + "model": "startScan.technology", + "pk": 7, + "fields": { + "name": "Microsoft ASP.NET" + } +}, +{ + "model": "startScan.technology", + "pk": 8, + "fields": { + "name": "Windows Server" + } +}, +{ + "model": "startScan.technology", + "pk": 9, + "fields": { + "name": "Apache HTTP Server:2.4.25" + } +}, +{ + "model": "startScan.technology", + "pk": 10, + "fields": { + "name": "Debian" + } +}, +{ + "model": "startScan.technology", + "pk": 11, + "fields": { + "name": "PHP:7.1.26" + } +}, +{ + "model": "startScan.technology", + "pk": 12, + "fields": { + "name": "Microsoft ASP.NET:2.0.50727" + } +}, +{ + "model": "startScan.technology", + "pk": 13, + "fields": { + "name": "Microsoft Visual Studio" + } +}, +{ + "model": "startScan.technology", + "pk": 14, + "fields": { + "name": "Bootstrap" + } +}, +{ + "model": "startScan.technology", + "pk": 15, + "fields": { + "name": "Basic" + } +}, +{ + "model": "startScan.ipaddress", + "pk": 1, + "fields": { + "address": "44.228.249.3", + "is_cdn": false, + "geo_iso": null, + "version": null, + "is_private": false, + "reverse_pointer": null, + "ports": [ + 2 + ], + "ip_subscan_ids": [] + } +}, +{ + "model": "startScan.ipaddress", + "pk": 2, + "fields": { + "address": "44.238.29.244", + "is_cdn": false, + "geo_iso": null, + "version": null, + "is_private": false, + "reverse_pointer": null, + "ports": [ + 2 + ], + "ip_subscan_ids": [] + } +}, +{ + "model": "startScan.ipaddress", + "pk": 3, + "fields": { + "address": "35.81.188.86", + "is_cdn": false, + "geo_iso": null, + "version": null, + "is_private": false, + "reverse_pointer": null, + "ports": [ + 1, + 2 + ], + "ip_subscan_ids": [] + } +}, +{ + "model": "startScan.port", + "pk": 1, + "fields": { + "number": 8080, + "service_name": "unknown", + "description": "", + "is_uncommon": true + } +}, +{ + "model": "startScan.port", + "pk": 2, + "fields": { + "number": 80, + "service_name": "unknown", + "description": "", + "is_uncommon": false + } +}, +{ + "model": "startScan.directoryfile", + "pk": 1, + "fields": { + "length": 28674, + "lines": 688, + "http_status": 200, + "words": 5389, + "name": "ZG9jcy8=", + "url": "http://rest.vulnweb.com/docs/", + "content_type": "text/html; charset=UTF-8" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 2, + "fields": { + "length": 3555, + "lines": 138, + "http_status": 200, + "words": 1397, + "name": "aW5kZXgucGhw", + "url": "http://rest.vulnweb.com/index.php", + "content_type": "text/html; charset=UTF-8" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 3, + "fields": { + "length": 3555, + "lines": 138, + "http_status": 200, + "words": 1397, + "name": "aW5kZXgucEhw", + "url": "http://rest.vulnweb.com/index.pHp", + "content_type": "text/html; charset=UTF-8" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 4, + "fields": { + "length": 926, + "lines": 16, + "http_status": 200, + "words": 29, + "name": "X3Z0aV9jbmYv", + "url": "http://testasp.vulnweb.com/_vti_cnf/", + "content_type": "text/html" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 5, + "fields": { + "length": 3194, + "lines": 55, + "http_status": 200, + "words": 429, + "name": "bG9naW4uYXNw", + "url": "http://testasp.vulnweb.com/login.asp", + "content_type": "text/html" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 6, + "fields": { + "length": 13, + "lines": 1, + "http_status": 200, + "words": 2, + "name": "cm9ib3RzLnR4dA==", + "url": "http://testasp.vulnweb.com/robots.txt", + "content_type": "text/plain" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 7, + "fields": { + "length": 894, + "lines": 4, + "http_status": 200, + "words": 2, + "name": "ZmF2aWNvbi5pY28=", + "url": "http://testaspnet.vulnweb.com/favicon.ico", + "content_type": "image/x-icon" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 8, + "fields": { + "length": 13, + "lines": 1, + "http_status": 200, + "words": 2, + "name": "cm9ib3RzLnR4dA==", + "url": "http://testaspnet.vulnweb.com/robots.txt", + "content_type": "text/plain" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 9, + "fields": { + "length": 3, + "lines": 1, + "http_status": 200, + "words": 1, + "name": "dGVzdC50eHQ=", + "url": "http://testaspnet.vulnweb.com/test.txt", + "content_type": "text/plain" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 10, + "fields": { + "length": 4472, + "lines": 101, + "http_status": 200, + "words": 899, + "name": "Y29tbWVudA==", + "url": "http://testhtml5.vulnweb.com/comment", + "content_type": "text/html; charset=utf-8" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 11, + "fields": { + "length": 493, + "lines": 15, + "http_status": 200, + "words": 87, + "name": "ZXhhbXBsZXM=", + "url": "http://testhtml5.vulnweb.com/examples", + "content_type": "text/html; charset=utf-8" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 12, + "fields": { + "length": 894, + "lines": 81, + "http_status": 200, + "words": 4, + "name": "ZmF2aWNvbi5pY28=", + "url": "http://testhtml5.vulnweb.com/favicon.ico", + "content_type": "image/x-icon" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 13, + "fields": { + "length": 4462, + "lines": 101, + "http_status": 200, + "words": 898, + "name": "cmVwb3J0", + "url": "http://testhtml5.vulnweb.com/report", + "content_type": "text/html; charset=utf-8" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 14, + "fields": { + "length": 386, + "lines": 14, + "http_status": 200, + "words": 54, + "name": "c2FtcGxlcy8=", + "url": "http://testhtml5.vulnweb.com/samples/", + "content_type": "text/html; charset=utf-8" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 15, + "fields": { + "length": 386, + "lines": 14, + "http_status": 200, + "words": 54, + "name": "c2FtcGxlcw==", + "url": "http://testhtml5.vulnweb.com/samples", + "content_type": "text/html; charset=utf-8" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 16, + "fields": { + "length": 6, + "lines": 1, + "http_status": 200, + "words": 1, + "name": "LmlkZWEvLm5hbWU=", + "url": "http://testphp.vulnweb.com/.idea/.name", + "content_type": "application/octet-stream" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 17, + "fields": { + "length": 951, + "lines": 14, + "http_status": 200, + "words": 427, + "name": "LmlkZWEv", + "url": "http://testphp.vulnweb.com/.idea/", + "content_type": "text/html" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 18, + "fields": { + "length": 171, + "lines": 6, + "http_status": 200, + "words": 10, + "name": "LmlkZWEvZW5jb2RpbmdzLnhtbA==", + "url": "http://testphp.vulnweb.com/.idea/encodings.xml", + "content_type": "text/xml" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 19, + "fields": { + "length": 275, + "lines": 10, + "http_status": 200, + "words": 26, + "name": "LmlkZWEvbW9kdWxlcy54bWw=", + "url": "http://testphp.vulnweb.com/.idea/modules.xml", + "content_type": "text/xml" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 20, + "fields": { + "length": 143, + "lines": 5, + "http_status": 200, + "words": 13, + "name": "LmlkZWEvc2NvcGVzL3Njb3BlX3NldHRpbmdzLnhtbA==", + "url": "http://testphp.vulnweb.com/.idea/scopes/scope_settings.xml", + "content_type": "text/xml" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 21, + "fields": { + "length": 266, + "lines": 9, + "http_status": 200, + "words": 18, + "name": "LmlkZWEvbWlzYy54bWw=", + "url": "http://testphp.vulnweb.com/.idea/misc.xml", + "content_type": "text/xml" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 22, + "fields": { + "length": 173, + "lines": 8, + "http_status": 200, + "words": 16, + "name": "LmlkZWEvdmNzLnhtbA==", + "url": "http://testphp.vulnweb.com/.idea/vcs.xml", + "content_type": "text/xml" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 23, + "fields": { + "length": 12473, + "lines": 217, + "http_status": 200, + "words": 1702, + "name": "LmlkZWEvd29ya3NwYWNlLnhtbA==", + "url": "http://testphp.vulnweb.com/.idea/workspace.xml", + "content_type": "text/xml" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 24, + "fields": { + "length": 400, + "lines": 9, + "http_status": 200, + "words": 122, + "name": "X21tU2VydmVyU2NyaXB0cy8=", + "url": "http://testphp.vulnweb.com/_mmServerScripts/", + "content_type": "text/html" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 25, + "fields": { + "length": 93, + "lines": 1, + "http_status": 200, + "words": 4, + "name": "X21tU2VydmVyU2NyaXB0cy9NTUhUVFBEQi5waHA=", + "url": "http://testphp.vulnweb.com/_mmServerScripts/MMHTTPDB.php", + "content_type": "text/html; charset=UTF-8" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 26, + "fields": { + "length": 262, + "lines": 8, + "http_status": 200, + "words": 66, + "name": "YWRtaW4v", + "url": "http://testphp.vulnweb.com/admin/", + "content_type": "text/html" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 27, + "fields": { + "length": 224, + "lines": 5, + "http_status": 200, + "words": 8, + "name": "Y3Jvc3Nkb21haW4ueG1s", + "url": "http://testphp.vulnweb.com/crossdomain.xml", + "content_type": "text/xml" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 28, + "fields": { + "length": 595, + "lines": 11, + "http_status": 200, + "words": 262, + "name": "Q1ZTLw==", + "url": "http://testphp.vulnweb.com/CVS/", + "content_type": "text/html" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 29, + "fields": { + "length": 1, + "lines": 1, + "http_status": 200, + "words": 2, + "name": "Q1ZTL0VudHJpZXM=", + "url": "http://testphp.vulnweb.com/CVS/Entries", + "content_type": "application/octet-stream" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 30, + "fields": { + "length": 1, + "lines": 1, + "http_status": 200, + "words": 2, + "name": "Q1ZTL1Jvb3Q=", + "url": "http://testphp.vulnweb.com/CVS/Root", + "content_type": "application/octet-stream" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 31, + "fields": { + "length": 894, + "lines": 4, + "http_status": 200, + "words": 2, + "name": "ZmF2aWNvbi5pY28=", + "url": "http://testphp.vulnweb.com/favicon.ico", + "content_type": "image/x-icon" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 32, + "fields": { + "length": 377, + "lines": 9, + "http_status": 200, + "words": 128, + "name": "aW1hZ2VzLw==", + "url": "http://testphp.vulnweb.com/images/", + "content_type": "text/html" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 33, + "fields": { + "length": 3265, + "lines": 91, + "http_status": 200, + "words": 350, + "name": "aW5kZXguYmFr", + "url": "http://testphp.vulnweb.com/index.bak", + "content_type": "application/octet-stream" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 34, + "fields": { + "length": 4958, + "lines": 110, + "http_status": 200, + "words": 514, + "name": "aW5kZXgucGhw", + "url": "http://testphp.vulnweb.com/index.php", + "content_type": "text/html; charset=UTF-8" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 35, + "fields": { + "length": 2586, + "lines": 2, + "http_status": 200, + "words": 9, + "name": "aW5kZXguemlw", + "url": "http://testphp.vulnweb.com/index.zip", + "content_type": "application/zip" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 36, + "fields": { + "length": 5523, + "lines": 120, + "http_status": 200, + "words": 557, + "name": "bG9naW4ucGhw", + "url": "http://testphp.vulnweb.com/login.php", + "content_type": "text/html; charset=UTF-8" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 37, + "fields": { + "length": 268, + "lines": 8, + "http_status": 200, + "words": 60, + "name": "dmVuZG9yLw==", + "url": "http://testphp.vulnweb.com/vendor/", + "content_type": "text/html" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 38, + "fields": { + "length": 4018, + "lines": 74, + "http_status": 200, + "words": 482, + "name": "aW5kZXguaHRtbA==", + "url": "http://vulnweb.com/index.html", + "content_type": "text/html" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 39, + "fields": { + "length": 4018, + "lines": 74, + "http_status": 200, + "words": 482, + "name": "aW5kZXguaHRtbA==", + "url": "http://www.vulnweb.com/index.html", + "content_type": "text/html" + } +}, +{ + "model": "startScan.directoryscan", + "pk": 1, + "fields": { + "command_line": "ffuf -w /home/rengine/wordlists/dicc.txt -p 0.05 -t 30 -timeout 5 -ac -mc 200,204 -u http://rest.vulnweb.com/FUZZ -json", + "scanned_date": "2024-09-03T22:02:45.119Z", + "directory_files": [ + 1, + 2, + 3 + ], + "dir_subscan_ids": [] + } +}, +{ + "model": "startScan.directoryscan", + "pk": 2, + "fields": { + "command_line": "ffuf -w /home/rengine/wordlists/dicc.txt -p 0.05 -t 30 -timeout 5 -ac -mc 200,204 -u http://testasp.vulnweb.com/FUZZ -json", + "scanned_date": "2024-09-03T22:05:24.142Z", + "directory_files": [ + 4, + 5, + 6 + ], + "dir_subscan_ids": [] + } +}, +{ + "model": "startScan.directoryscan", + "pk": 3, + "fields": { + "command_line": "ffuf -w /home/rengine/wordlists/dicc.txt -p 0.05 -t 30 -timeout 5 -ac -mc 200,204 -u http://testaspnet.vulnweb.com/FUZZ -json", + "scanned_date": "2024-09-03T22:06:42.524Z", + "directory_files": [ + 7, + 8, + 9 + ], + "dir_subscan_ids": [] + } +}, +{ + "model": "startScan.directoryscan", + "pk": 4, + "fields": { + "command_line": "ffuf -w /home/rengine/wordlists/dicc.txt -p 0.05 -t 30 -timeout 5 -ac -mc 200,204 -u http://testhtml5.vulnweb.com/FUZZ -json", + "scanned_date": "2024-09-03T22:07:57.153Z", + "directory_files": [ + 10, + 11, + 12, + 13, + 14, + 15 + ], + "dir_subscan_ids": [] + } +}, +{ + "model": "startScan.directoryscan", + "pk": 5, + "fields": { + "command_line": "ffuf -w /home/rengine/wordlists/dicc.txt -p 0.05 -t 30 -timeout 5 -ac -mc 200,204 -u http://testphp.vulnweb.com/FUZZ -json", + "scanned_date": "2024-09-03T22:09:11.505Z", + "directory_files": [ + 16, + 17, + 18, + 19, + 20, + 21, + 22, + 23, + 24, + 25, + 26, + 27, + 28, + 29, + 30, + 31, + 32, + 33, + 34, + 35, + 36, + 37 + ], + "dir_subscan_ids": [] + } +}, +{ + "model": "startScan.directoryscan", + "pk": 6, + "fields": { + "command_line": "ffuf -w /home/rengine/wordlists/dicc.txt -p 0.05 -t 30 -timeout 5 -ac -mc 200,204 -u http://vulnweb.com/FUZZ -json", + "scanned_date": "2024-09-03T22:10:26.882Z", + "directory_files": [ + 38 + ], + "dir_subscan_ids": [] + } +}, +{ + "model": "startScan.directoryscan", + "pk": 7, + "fields": { + "command_line": "ffuf -w /home/rengine/wordlists/dicc.txt -p 0.05 -t 30 -timeout 5 -ac -mc 200,204 -u http://www.vulnweb.com/FUZZ -json", + "scanned_date": "2024-09-03T22:11:40.431Z", + "directory_files": [ + 39 + ], + "dir_subscan_ids": [] + } +}, +{ + "model": "startScan.dork", + "pk": 1, + "fields": { + "type": "login_pages", + "url": "http://rest.vulnweb.com/basic_authentication/api/" + } +}, +{ + "model": "startScan.dork", + "pk": 2, + "fields": { + "type": "login_pages", + "url": "http://rest.vulnweb.com/docs/" + } +}, +{ + "model": "startScan.dork", + "pk": 3, + "fields": { + "type": "login_pages", + "url": "http://testasp.vulnweb.com/Search.asp" + } +}, +{ + "model": "startScan.dork", + "pk": 4, + "fields": { + "type": "login_pages", + "url": "http://testaspnet.vulnweb.com/login.aspx" + } +}, +{ + "model": "startScan.dork", + "pk": 5, + "fields": { + "type": "login_pages", + "url": "http://testhtml5.vulnweb.com/" + } +}, +{ + "model": "startScan.dork", + "pk": 6, + "fields": { + "type": "login_pages", + "url": "http://testphp.vulnweb.com/" + } +}, +{ + "model": "startScan.dork", + "pk": 7, + "fields": { + "type": "login_pages", + "url": "http://testphp.vulnweb.com/admin/" + } +}, +{ + "model": "startScan.dork", + "pk": 8, + "fields": { + "type": "login_pages", + "url": "http://testphp.vulnweb.com/artists.php" + } +}, +{ + "model": "startScan.dork", + "pk": 9, + "fields": { + "type": "login_pages", + "url": "http://testphp.vulnweb.com/artists.php?artist" + } +}, +{ + "model": "startScan.dork", + "pk": 10, + "fields": { + "type": "login_pages", + "url": "http://testphp.vulnweb.com/artists.php?artist=1" + } +}, +{ + "model": "startScan.dork", + "pk": 11, + "fields": { + "type": "login_pages", + "url": "http://testphp.vulnweb.com/artists.php?artist=2" + } +}, +{ + "model": "startScan.dork", + "pk": 12, + "fields": { + "type": "login_pages", + "url": "http://testphp.vulnweb.com/categories.php" + } +}, +{ + "model": "startScan.dork", + "pk": 13, + "fields": { + "type": "login_pages", + "url": "http://testphp.vulnweb.com/disclaimer.php" + } +}, +{ + "model": "startScan.dork", + "pk": 14, + "fields": { + "type": "login_pages", + "url": "http://testphp.vulnweb.com/listproducts.php?cat" + } +}, +{ + "model": "startScan.dork", + "pk": 15, + "fields": { + "type": "login_pages", + "url": "http://testphp.vulnweb.com/listproducts.php?cat=1" + } +}, +{ + "model": "startScan.dork", + "pk": 16, + "fields": { + "type": "login_pages", + "url": "http://testphp.vulnweb.com/login.php" + } +}, +{ + "model": "startScan.dork", + "pk": 17, + "fields": { + "type": "login_pages", + "url": "http://testphp.vulnweb.com/search.php" + } +}, +{ + "model": "startScan.dork", + "pk": 18, + "fields": { + "type": "login_pages", + "url": "http://www.vulnweb.com/" + } +}, +{ + "model": "startScan.dork", + "pk": 19, + "fields": { + "type": "admin_panels", + "url": "http://rest.vulnweb.com/basic_authentication/api/" + } +}, +{ + "model": "startScan.dork", + "pk": 20, + "fields": { + "type": "admin_panels", + "url": "http://rest.vulnweb.com/docs/" + } +}, +{ + "model": "startScan.dork", + "pk": 21, + "fields": { + "type": "admin_panels", + "url": "http://rest.vulnweb.com/images/1_basic_authentication.png" + } +}, +{ + "model": "startScan.dork", + "pk": 22, + "fields": { + "type": "admin_panels", + "url": "http://rest.vulnweb.com/images/2_jwt.png" + } +}, +{ + "model": "startScan.dork", + "pk": 23, + "fields": { + "type": "admin_panels", + "url": "http://rest.vulnweb.com/images/3_oauth2.png" + } +}, +{ + "model": "startScan.dork", + "pk": 24, + "fields": { + "type": "admin_panels", + "url": "http://testphp.vulnweb.com/admin/" + } +}, +{ + "model": "startScan.dork", + "pk": 25, + "fields": { + "type": "admin_panels", + "url": "http://testphp.vulnweb.com/login.php" + } +}, +{ + "model": "startScan.dork", + "pk": 26, + "fields": { + "type": "admin_panels", + "url": "http://testphp.vulnweb.com/secured/phpinfo.php" + } +}, +{ + "model": "startScan.dork", + "pk": 27, + "fields": { + "type": "social_media", + "url": "https://m.facebook.com/QwertyAloneC/posts/dork-sqli-just-add-inurl-before-themphpcatidcartphpidphpcidindexphpshowtopiccont/142828509609539/" + } +}, +{ + "model": "startScan.dork", + "pk": 28, + "fields": { + "type": "social_media", + "url": "https://m.facebook.com/groups/3201261889909211/posts/3624197390948990/" + } +}, +{ + "model": "startScan.dork", + "pk": 29, + "fields": { + "type": "social_media", + "url": "https://m.facebook.com/groups/3201261889909211/posts/4007010936000965/" + } +}, +{ + "model": "startScan.dork", + "pk": 30, + "fields": { + "type": "social_media", + "url": "https://m.facebook.com/groups/3201261889909211/posts/4007311169304275/" + } +}, +{ + "model": "startScan.dork", + "pk": 31, + "fields": { + "type": "social_media", + "url": "https://m.facebook.com/groups/3201261889909211/posts/4188319924536731/" + } +}, +{ + "model": "startScan.dork", + "pk": 32, + "fields": { + "type": "social_media", + "url": "https://m.facebook.com/groups/3201261889909211/posts/4367877359914319/" + } +}, +{ + "model": "startScan.dork", + "pk": 33, + "fields": { + "type": "social_media", + "url": "https://m.facebook.com/groups/3201261889909211/posts/6293716407330395/" + } +}, +{ + "model": "startScan.dork", + "pk": 34, + "fields": { + "type": "social_media", + "url": "https://m.facebook.com/groups/hackingteam2022/posts/2726773620796174/" + } +}, +{ + "model": "startScan.dork", + "pk": 35, + "fields": { + "type": "social_media", + "url": "https://www.facebook.com/KaliLinuxBrazil/photos/tenha-certeza-de-estar-com-o-tor-instaladoinstala%C3%A7%C3%A3o-tor-rootkaliapt-get-install/972928206082146/" + } +}, +{ + "model": "startScan.dork", + "pk": 36, + "fields": { + "type": "social_media", + "url": "https://www.facebook.com/KaliLinuxBrazil/photos/tenha-certeza-de-estar-com-o-tor-instaladoinstala&C3&A7&C3&A3o-tor-rootkaliapt-get-install/972928206082146/" + } +}, +{ + "model": "startScan.dork", + "pk": 37, + "fields": { + "type": "social_media", + "url": "https://www.facebook.com/groups/3201261889909211/" + } +}, +{ + "model": "startScan.dork", + "pk": 38, + "fields": { + "type": "social_media", + "url": "https://mobile.twitter.com/MrHades2020" + } +}, +{ + "model": "startScan.dork", + "pk": 39, + "fields": { + "type": "social_media", + "url": "https://twitter.com/0xSwayamm" + } +}, +{ + "model": "startScan.dork", + "pk": 40, + "fields": { + "type": "social_media", + "url": "https://twitter.com/7h3r4bb17" + } +}, +{ + "model": "startScan.dork", + "pk": 41, + "fields": { + "type": "social_media", + "url": "https://twitter.com/SadatTamzit" + } +}, +{ + "model": "startScan.dork", + "pk": 42, + "fields": { + "type": "social_media", + "url": "https://twitter.com/deathpoolx1" + } +}, +{ + "model": "startScan.dork", + "pk": 43, + "fields": { + "type": "social_media", + "url": "https://twitter.com/hunterabubakar" + } +}, +{ + "model": "startScan.dork", + "pk": 44, + "fields": { + "type": "social_media", + "url": "https://twitter.com/kg4409" + } +}, +{ + "model": "startScan.dork", + "pk": 45, + "fields": { + "type": "social_media", + "url": "https://twitter.com/ravidutt04?lang" + } +}, +{ + "model": "startScan.dork", + "pk": 46, + "fields": { + "type": "social_media", + "url": "https://twitter.com/ravidutt04?lang=ca" + } +}, +{ + "model": "startScan.dork", + "pk": 47, + "fields": { + "type": "social_media", + "url": "https://twitter.com/therceman/status/1711473903934054427" + } +}, +{ + "model": "startScan.dork", + "pk": 48, + "fields": { + "type": "social_media", + "url": "https://twitter.com/vishack81" + } +}, +{ + "model": "startScan.dork", + "pk": 49, + "fields": { + "type": "social_media", + "url": "https://m.youtube.com/watch?v" + } +}, +{ + "model": "startScan.dork", + "pk": 50, + "fields": { + "type": "social_media", + "url": "https://m.youtube.com/watch?v=2_lswM1S264" + } +}, +{ + "model": "startScan.dork", + "pk": 51, + "fields": { + "type": "social_media", + "url": "https://m.youtube.com/watch?v=2tJgPyRITGc" + } +}, +{ + "model": "startScan.dork", + "pk": 52, + "fields": { + "type": "social_media", + "url": "https://m.youtube.com/watch?v=6FDKHewTP4A" + } +}, +{ + "model": "startScan.dork", + "pk": 53, + "fields": { + "type": "social_media", + "url": "https://m.youtube.com/watch?v=cEoPdpVUeyU" + } +}, +{ + "model": "startScan.dork", + "pk": 54, + "fields": { + "type": "social_media", + "url": "https://m.youtube.com/watch?v=gHkGuVb9vX8" + } +}, +{ + "model": "startScan.dork", + "pk": 55, + "fields": { + "type": "social_media", + "url": "https://www.youtube.com/watch?v" + } +}, +{ + "model": "startScan.dork", + "pk": 56, + "fields": { + "type": "social_media", + "url": "https://www.youtube.com/watch?v=IbDAHDSlgYg" + } +}, +{ + "model": "startScan.dork", + "pk": 57, + "fields": { + "type": "social_media", + "url": "https://www.youtube.com/watch?v=ZstyFyfS3g4" + } +}, +{ + "model": "startScan.dork", + "pk": 58, + "fields": { + "type": "social_media", + "url": "https://www.youtube.com/watch?v=cEoPdpVUeyU" + } +}, +{ + "model": "startScan.dork", + "pk": 59, + "fields": { + "type": "social_media", + "url": "https://www.youtube.com/watch?v=dabm-7CcHaE" + } +}, +{ + "model": "startScan.dork", + "pk": 60, + "fields": { + "type": "social_media", + "url": "https://www.youtube.com/watch?v=tAxMpoKkvCw" + } +}, +{ + "model": "startScan.dork", + "pk": 61, + "fields": { + "type": "social_media", + "url": "https://www.reddit.com/r/sysadmin/comments/gs031c/how_to_login_to_web_by_submitting_username/" + } +}, +{ + "model": "startScan.dork", + "pk": 62, + "fields": { + "type": "code_sharing", + "url": "https://github.com/dradis/dradis-acunetix/blob/master/spec/fixtures/files/testphp.vulnweb.com.export.acunetix.xml" + } +}, +{ + "model": "startScan.dork", + "pk": 63, + "fields": { + "type": "code_sharing", + "url": "https://github.com/dtag-dev-sec/explo/blob/master/examples/SQLI_LOGIN_testphp.vulnweb.com.yaml" + } +}, +{ + "model": "startScan.dork", + "pk": 64, + "fields": { + "type": "code_sharing", + "url": "https://github.com/dtag-dev-sec/explo/blob/master/examples/SQLI_testphp.vulnweb.com.yaml" + } +}, +{ + "model": "startScan.dork", + "pk": 65, + "fields": { + "type": "code_sharing", + "url": "https://github.com/yangbh/Hammer/blob/master/output/testphp.vulnweb.com/http_testphp.vulnweb.com" + } +}, +{ + "model": "startScan.dork", + "pk": 66, + "fields": { + "type": "code_sharing", + "url": "https://bitbucket.org/snippets/alms/KrG4LL" + } +}, +{ + "model": "startScan.dork", + "pk": 67, + "fields": { + "type": "code_sharing", + "url": "https://bitbucket.org/snippets/bakueikozo/rex9ar/playstation-classic-uart-login-console" + } +}, +{ + "model": "startScan.dork", + "pk": 68, + "fields": { + "type": "code_sharing", + "url": "https://bitbucket.org/snippets/especnorthamerica/neb6dq/examples-of-web-controller-rest-api-usage" + } +}, +{ + "model": "startScan.dork", + "pk": 69, + "fields": { + "type": "code_sharing", + "url": "https://bitbucket.org/snippets/faridani/pRX6r" + } +}, +{ + "model": "startScan.dork", + "pk": 70, + "fields": { + "type": "code_sharing", + "url": "https://bitbucket.org/snippets/orrp/xeGzXB/interactive-mgg" + } +}, +{ + "model": "startScan.dork", + "pk": 71, + "fields": { + "type": "code_sharing", + "url": "https://bitbucket.org/snippets/raerose01/5enKR5" + } +}, +{ + "model": "startScan.dork", + "pk": 72, + "fields": { + "type": "code_sharing", + "url": "https://bitbucket.org/snippets/sglienke/64LG6b/introsort" + } +}, +{ + "model": "startScan.dork", + "pk": 73, + "fields": { + "type": "code_sharing", + "url": "https://bitbucket.org/snippets/sglienke/6oBqMb" + } +}, +{ + "model": "startScan.dork", + "pk": 74, + "fields": { + "type": "code_sharing", + "url": "https://bitbucket.org/snippets/suntorytime/rAGXar/wellness-sources-overview" + } +}, +{ + "model": "startScan.dork", + "pk": 75, + "fields": { + "type": "code_sharing", + "url": "https://bitbucket.org/snippets/wmgodyak/6bXKj" + } +}, +{ + "model": "startScan.dork", + "pk": 76, + "fields": { + "type": "config_files", + "url": "http://testphp.vulnweb.com/.idea/workspace.xml" + } +}, +{ + "model": "startScan.dork", + "pk": 77, + "fields": { + "type": "config_files", + "url": "http://testphp.vulnweb.com/crossdomain.xml" + } +} +] diff --git a/web/startScan/tests.py b/web/startScan/tests.py deleted file mode 100644 index 7ce503c2..00000000 --- a/web/startScan/tests.py +++ /dev/null @@ -1,3 +0,0 @@ -from django.test import TestCase - -# Create your tests here. diff --git a/web/startScan/tests/__init__.py b/web/startScan/tests/__init__.py new file mode 100644 index 00000000..cfbb5ef8 --- /dev/null +++ b/web/startScan/tests/__init__.py @@ -0,0 +1,2 @@ +from utils.test_base import * +from .test_start_scan import * diff --git a/web/startScan/tests/test_start_scan.py b/web/startScan/tests/test_start_scan.py new file mode 100644 index 00000000..92c42544 --- /dev/null +++ b/web/startScan/tests/test_start_scan.py @@ -0,0 +1,220 @@ +""" +This file contains the test cases for the startScan views and models. +""" +import json +from unittest.mock import patch +from django.urls import reverse +from django.utils import timezone +from django.test import override_settings +from utils.test_base import BaseTestCase +from utils.test_utils import MockTemplate +from startScan.models import ScanHistory, Subdomain, EndPoint, Vulnerability, ScanActivity + +__all__ = [ + 'TestStartScanViews', + 'TestStartScanModels', +] + +class TestStartScanViews(BaseTestCase): + """Test cases for startScan views.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + + @override_settings(CELERY_TASK_ALWAYS_EAGER=True) + def test_start_scan_view(self): + """Test the start scan view.""" + data = { + 'domain_name': self.data_generator.domain.name, + 'scan_mode': self.data_generator.engine_type.id, + 'importSubdomainTextArea': "www.example.com\nmail.example.com", + 'outOfScopeSubdomainTextarea': "www.example.com\nmail.example.com", + 'filterPath': "www.example.com", + } + response = self.client.post(reverse('start_scan', kwargs={ + 'slug': self.data_generator.project.slug, + 'domain_id': self.data_generator.domain.id + }), data) + self.assertEqual(response.status_code, 302) + self.assertEqual(response.url, f"/scan/{self.data_generator.project.slug}/history/scan") + + scan = ScanHistory.objects.latest('id') + self.assertEqual(scan.domain, self.data_generator.domain) + self.assertEqual(scan.scan_type.id, self.data_generator.engine_type.id) + + def test_scan_history_view(self): + """Test the scan history view.""" + response = self.client.get(reverse('scan_history', kwargs={ + 'slug': self.data_generator.project.slug, + })) + self.assertEqual(response.status_code, 200) + self.assertIn('scan_history', response.context) + + def test_detail_scan_view(self): + """Test the detail scan view.""" + response = self.client.get(reverse('detail_scan', kwargs={ + 'slug': self.data_generator.project.slug, + 'id': self.data_generator.scan_history.id + })) + self.assertEqual(response.status_code, 200) + #self.assertIn('scan_history', response.context) + + @patch('startScan.views.delete_scan') + def test_delete_scan_view(self, mock_delete_scan): + """Test the delete scan view.""" + mock_delete_scan.return_value = True + response = self.client.post(reverse('delete_scan', kwargs={ + 'id': self.data_generator.scan_history.id + })) + self.assertEqual(response.status_code, 200) + self.assertEqual(json.loads(response.content), {'status': 'true'}) + + @patch('startScan.views.delete_scan') + @MockTemplate.mock_template('base/_items/top_bar.html') + def test_delete_scan_view_failure(self, mock_delete_scan): + """Test the delete scan view when deletion fails.""" + mock_delete_scan.return_value = False + response = self.client.post(reverse('delete_scan', kwargs={ + 'id': 999 + })) + self.assertEqual(response.status_code, 404) + + def test_stop_scan_view(self): + """Test the stop scan view.""" + response = self.client.post(reverse('stop_scan', kwargs={ + 'id': self.data_generator.scan_history.id + })) + self.assertEqual(response.status_code, 200) + self.assertIn('status', json.loads(response.content)) + + def test_export_subdomains_view(self): + """Test the export subdomains view.""" + response = self.client.get(reverse('export_subdomains', kwargs={ + 'scan_id': self.data_generator.scan_history.id + })) + self.assertEqual(response.status_code, 200) + self.assertEqual(response['Content-Type'], 'text/plain') + + def test_export_empty_subdomains_view(self): + """Test the export subdomains view when there are no subdomains.""" + Subdomain.objects.all().delete() + + response = self.client.get(reverse('export_subdomains', kwargs={ + 'scan_id': self.data_generator.scan_history.id + })) + self.assertEqual(response.status_code, 200) + self.assertEqual(response['Content-Type'], 'text/plain') + self.assertEqual(response.content.decode(), '') + + def test_export_endpoints_view(self): + """Test the export endpoints view.""" + response = self.client.get(reverse('export_endpoints', kwargs={ + 'scan_id': self.data_generator.scan_history.id + })) + self.assertEqual(response.status_code, 200) + self.assertEqual(response['Content-Type'], 'text/plain') + + def test_export_empty_endpoints_view(self): + """Test the export endpoints view when there are no endpoints.""" + # Delete all endpoints + EndPoint.objects.all().delete() + + response = self.client.get(reverse('export_endpoints', kwargs={ + 'scan_id': self.data_generator.scan_history.id + })) + self.assertEqual(response.status_code, 200) + self.assertEqual(response['Content-Type'], 'text/plain') + self.assertEqual(response.content.decode(), '') + +class TestStartScanModels(BaseTestCase): + """Test cases for startScan models.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + + def test_scan_history_model(self): + """Test the ScanHistory model.""" + self.assertIsInstance(self.data_generator.scan_history, ScanHistory) + self.assertEqual(str(self.data_generator.scan_history), self.data_generator.domain.name) + + def test_scan_history_model_with_missing_fields(self): + """Test the ScanHistory model with missing fields.""" + minimal_scan_history = ScanHistory.objects.create( + domain=self.data_generator.domain, + scan_type=self.data_generator.engine_type, + start_scan_date=timezone.now(), + ) + self.assertIsInstance(minimal_scan_history, ScanHistory) + self.assertEqual(str(minimal_scan_history), f"{self.data_generator.domain.name}") + self.assertIsNone(minimal_scan_history.initiated_by) + self.assertIsNone(minimal_scan_history.tasks) + + def test_subdomain_model(self): + """Test the Subdomain model.""" + self.assertIsInstance(self.data_generator.subdomain, Subdomain) + self.assertEqual(str(self.data_generator.subdomain), self.data_generator.subdomain.name) + + def test_subdomain_model_with_missing_fields(self): + """Test the Subdomain model with missing fields.""" + minimal_subdomain = Subdomain.objects.create( + name='test.example.com', + target_domain=self.data_generator.domain + ) + self.assertIsInstance(minimal_subdomain, Subdomain) + self.assertEqual(str(minimal_subdomain), 'test.example.com') + self.assertIsNone(minimal_subdomain.http_url) + self.assertIsNone(minimal_subdomain.discovered_date) + + def test_endpoint_model(self): + """Test the EndPoint model.""" + self.assertIsInstance(self.data_generator.endpoint, EndPoint) + self.assertEqual(str(self.data_generator.endpoint), self.data_generator.endpoint.http_url) + + def test_endpoint_model_with_missing_fields(self): + """Test the EndPoint model with missing fields.""" + minimal_endpoint = EndPoint.objects.create( + target_domain=self.data_generator.domain, + http_url='http://test.example.com' + ) + self.assertIsInstance(minimal_endpoint, EndPoint) + self.assertEqual(str(minimal_endpoint), 'http://test.example.com') + self.assertIsNone(minimal_endpoint.response_time) + self.assertIsNone(minimal_endpoint.discovered_date) + + def test_vulnerability_model(self): + """Test the Vulnerability model.""" + self.assertIsInstance(self.data_generator.vulnerabilities[0], Vulnerability) + self.assertEqual(str(self.data_generator.vulnerabilities[0].name), self.data_generator.vulnerabilities[0].name) + + def test_vulnerability_model_with_missing_fields(self): + """Test the Vulnerability model with missing fields.""" + minimal_vulnerability = Vulnerability.objects.create( + name='Test Vulnerability', + target_domain=self.data_generator.domain, + severity=1 + ) + self.assertIsInstance(minimal_vulnerability, Vulnerability) + self.assertEqual(str(minimal_vulnerability.name), 'Test Vulnerability') + self.assertIsNone(minimal_vulnerability.source) + self.assertIsNone(minimal_vulnerability.description) + + def test_scan_activity_model(self): + """Test the ScanActivity model.""" + self.assertIsInstance(self.data_generator.scan_activity, ScanActivity) + self.assertEqual(str(self.data_generator.scan_activity), "Test Type") + + def test_scan_activity_model_with_missing_fields(self): + """Test the ScanActivity model with missing fields.""" + minimal_scan_activity = ScanActivity.objects.create( + scan_of=self.data_generator.scan_history, + name="Test Type", + time=timezone.now(), + status=1 + ) + self.assertIsInstance(minimal_scan_activity, ScanActivity) + self.assertEqual(minimal_scan_activity.name, "Test Type") + self.assertIsNone(minimal_scan_activity.error_message) diff --git a/web/startScan/views.py b/web/startScan/views.py index 79b9ea49..c1785abf 100644 --- a/web/startScan/views.py +++ b/web/startScan/views.py @@ -40,7 +40,7 @@ def detail_scan(request, id, slug): # Get scan objects scan = get_object_or_404(ScanHistory, id=id) - domain_id = scan.domain.id + domain_id = safe_int_cast( scan.domain.id) scan_engines = EngineType.objects.order_by('engine_name').all() recent_scans = ScanHistory.objects.filter(domain__id=domain_id) last_scans = ( @@ -268,7 +268,7 @@ def start_scan_ui(request, slug, domain_id): filterPath = '' # Get engine type - engine_id = request.POST['scan_mode'] + engine_id = safe_int_cast(request.POST['scan_mode']) # Create ScanHistory object scan_history_id = create_scan_object( @@ -322,7 +322,7 @@ def start_multiple_scan(request, slug): if request.POST.get('scan_mode', 0): # if scan mode is available, then start the scan # get engine type - engine_id = request.POST['scan_mode'] + engine_id = safe_int_cast( request.POST['scan_mode']) list_of_domains = request.POST['list_of_domain_id'] grouped_scans = [] @@ -682,7 +682,7 @@ def visualise(request, id): def start_organization_scan(request, id, slug): organization = get_object_or_404(Organization, id=id) if request.method == "POST": - engine_id = request.POST['scan_mode'] + engine_id = safe_int_cast( request.POST['scan_mode']) # Start Celery task for each organization's domains for domain in organization.get_domains(): diff --git a/web/targetApp/fixtures/targetApp.json b/web/targetApp/fixtures/targetApp.json new file mode 100644 index 00000000..0a23be7c --- /dev/null +++ b/web/targetApp/fixtures/targetApp.json @@ -0,0 +1,520 @@ +[ +{ + "model": "targetApp.historicalip", + "pk": 1, + "fields": { + "ip": "44.228.249.3", + "location": "Boardman - United States", + "owner": "AMAZON-02", + "last_seen": "AMAZON-02" + } +}, +{ + "model": "targetApp.historicalip", + "pk": 2, + "fields": { + "ip": "18.192.172.30", + "location": "Frankfurt am Main - Germany", + "owner": "AMAZON-02", + "last_seen": "AMAZON-02" + } +}, +{ + "model": "targetApp.historicalip", + "pk": 3, + "fields": { + "ip": "176.28.50.165", + "location": "Strasbourg - France", + "owner": "Host Europe GmbH", + "last_seen": "Host Europe GmbH" + } +}, +{ + "model": "targetApp.historicalip", + "pk": 4, + "fields": { + "ip": "50.116.82.164", + "location": "United States", + "owner": "UNIFIEDLAYER-AS-1", + "last_seen": "UNIFIEDLAYER-AS-1" + } +}, +{ + "model": "targetApp.relateddomain", + "pk": 1, + "fields": { + "name": "2xax.com" + } +}, +{ + "model": "targetApp.relateddomain", + "pk": 2, + "fields": { + "name": "accunetix.com" + } +}, +{ + "model": "targetApp.relateddomain", + "pk": 3, + "fields": { + "name": "acunetix.asia" + } +}, +{ + "model": "targetApp.relateddomain", + "pk": 4, + "fields": { + "name": "acunetix.at" + } +}, +{ + "model": "targetApp.relateddomain", + "pk": 5, + "fields": { + "name": "acunetix.biz" + } +}, +{ + "model": "targetApp.relateddomain", + "pk": 6, + "fields": { + "name": "acunetix.co" + } +}, +{ + "model": "targetApp.relateddomain", + "pk": 7, + "fields": { + "name": "acunetix.com" + } +}, +{ + "model": "targetApp.relateddomain", + "pk": 8, + "fields": { + "name": "acunetix.in" + } +}, +{ + "model": "targetApp.relateddomain", + "pk": 9, + "fields": { + "name": "acunetix.info" + } +}, +{ + "model": "targetApp.relateddomain", + "pk": 10, + "fields": { + "name": "acunetix.jp" + } +}, +{ + "model": "targetApp.relateddomain", + "pk": 11, + "fields": { + "name": "acunetix.net" + } +}, +{ + "model": "targetApp.relateddomain", + "pk": 12, + "fields": { + "name": "acunetix.org" + } +}, +{ + "model": "targetApp.relateddomain", + "pk": 13, + "fields": { + "name": "acunetix.tw" + } +}, +{ + "model": "targetApp.relateddomain", + "pk": 14, + "fields": { + "name": "bxss.me" + } +}, +{ + "model": "targetApp.relateddomain", + "pk": 15, + "fields": { + "name": "free-security-audit.com" + } +}, +{ + "model": "targetApp.relateddomain", + "pk": 16, + "fields": { + "name": "free-security-scan.com" + } +}, +{ + "model": "targetApp.relateddomain", + "pk": 17, + "fields": { + "name": "mbsa-reporter.com" + } +}, +{ + "model": "targetApp.relateddomain", + "pk": 18, + "fields": { + "name": "mbsareporter.com" + } +}, +{ + "model": "targetApp.relateddomain", + "pk": 19, + "fields": { + "name": "networkdefender.com" + } +}, +{ + "model": "targetApp.relateddomain", + "pk": 20, + "fields": { + "name": "networkdefender.net" + } +}, +{ + "model": "targetApp.relateddomain", + "pk": 21, + "fields": { + "name": "security-analyzer-reporter.com" + } +}, +{ + "model": "targetApp.relateddomain", + "pk": 22, + "fields": { + "name": "sitesecurityaudit.com" + } +}, +{ + "model": "targetApp.relateddomain", + "pk": 23, + "fields": { + "name": "sitesecuritymonitor.com" + } +}, +{ + "model": "targetApp.relateddomain", + "pk": 24, + "fields": { + "name": "sitesecuritymonitor.net" + } +}, +{ + "model": "targetApp.relateddomain", + "pk": 25, + "fields": { + "name": "sql-injection-audit.com" + } +}, +{ + "model": "targetApp.relateddomain", + "pk": 26, + "fields": { + "name": "sql-injection-scan.com" + } +}, +{ + "model": "targetApp.relateddomain", + "pk": 27, + "fields": { + "name": "sqlinjectionscanner.com" + } +}, +{ + "model": "targetApp.relateddomain", + "pk": 28, + "fields": { + "name": "vulnweb.com" + } +}, +{ + "model": "targetApp.relateddomain", + "pk": 29, + "fields": { + "name": "web-security-audit.com" + } +}, +{ + "model": "targetApp.relateddomain", + "pk": 30, + "fields": { + "name": "web-site-defender.com" + } +}, +{ + "model": "targetApp.relateddomain", + "pk": 31, + "fields": { + "name": "web-sitedefender.com" + } +}, +{ + "model": "targetApp.relateddomain", + "pk": 32, + "fields": { + "name": "website-audit.org" + } +}, +{ + "model": "targetApp.relateddomain", + "pk": 33, + "fields": { + "name": "website-defender.com" + } +}, +{ + "model": "targetApp.relateddomain", + "pk": 34, + "fields": { + "name": "websitedefender.asia" + } +}, +{ + "model": "targetApp.relateddomain", + "pk": 35, + "fields": { + "name": "websitedefender.at" + } +}, +{ + "model": "targetApp.relateddomain", + "pk": 36, + "fields": { + "name": "websitedefender.co" + } +}, +{ + "model": "targetApp.relateddomain", + "pk": 37, + "fields": { + "name": "websitedefender.com" + } +}, +{ + "model": "targetApp.relateddomain", + "pk": 38, + "fields": { + "name": "websitesecuritycenter.com" + } +}, +{ + "model": "targetApp.relateddomain", + "pk": 39, + "fields": { + "name": "websitesecuritymonitor.com" + } +}, +{ + "model": "targetApp.relateddomain", + "pk": 40, + "fields": { + "name": "webvulnerabilityscanner.com" + } +}, +{ + "model": "targetApp.registrar", + "pk": 1, + "fields": { + "name": "Eurodns S.A.", + "phone": "+352.27220150", + "email": "legalservices@eurodns.com", + "url": null + } +}, +{ + "model": "targetApp.domainregistration", + "pk": 1, + "fields": { + "name": "Acunetix Acunetix", + "organization": "Acunetix Ltd", + "address": "3rd Floor,, J&C Building,, Road Town", + "city": "Tortola", + "state": null, + "zip_code": "VG1110", + "country": "VG", + "email": "administrator@acunetix.com", + "phone": "+1.23456789", + "fax": null, + "id_str": null + } +}, +{ + "model": "targetApp.domainregistration", + "pk": 2, + "fields": { + "name": null, + "organization": null, + "address": null, + "city": null, + "state": null, + "zip_code": null, + "country": null, + "email": null, + "phone": null, + "fax": null, + "id_str": null + } +}, +{ + "model": "targetApp.whoisstatus", + "pk": 1, + "fields": { + "name": "clienttransferprohibited" + } +}, +{ + "model": "targetApp.nameserver", + "pk": 1, + "fields": { + "name": "ns1.eurodns.com" + } +}, +{ + "model": "targetApp.nameserver", + "pk": 2, + "fields": { + "name": "ns2.eurodns.com" + } +}, +{ + "model": "targetApp.nameserver", + "pk": 3, + "fields": { + "name": "ns3.eurodns.com" + } +}, +{ + "model": "targetApp.nameserver", + "pk": 4, + "fields": { + "name": "ns4.eurodns.com" + } +}, +{ + "model": "targetApp.dnsrecord", + "pk": 1, + "fields": { + "name": "44.228.249.3", + "type": "a" + } +}, +{ + "model": "targetApp.dnsrecord", + "pk": 2, + "fields": { + "name": "v=spf1 -all", + "type": "txt" + } +}, +{ + "model": "targetApp.dnsrecord", + "pk": 3, + "fields": { + "name": "google-site-verification=4lqorv-lti-d4gpxtbeqwmfnwff7uaazqc9gzvhukbw", + "type": "txt" + } +}, +{ + "model": "targetApp.domaininfo", + "pk": 1, + "fields": { + "dnssec": false, + "created": "2010-06-14T00:00:00Z", + "updated": "2023-05-26T10:04:20Z", + "expires": "2025-06-13T00:00:00Z", + "geolocation_iso": "VG", + "registrar": 1, + "registrant": 1, + "admin": 2, + "tech": 2, + "whois_server": "whois.eurodns.com", + "status": [ + 1 + ], + "name_servers": [ + 1, + 2, + 3, + 4 + ], + "dns_records": [ + 1, + 2, + 3 + ], + "related_domains": [ + 1, + 2, + 3, + 4, + 5, + 6, + 7, + 8, + 9, + 10, + 11, + 12, + 13, + 14, + 15, + 16, + 17, + 18, + 19, + 20, + 21, + 22, + 23, + 24, + 25, + 26, + 27, + 28, + 29, + 30, + 31, + 32, + 33, + 34, + 35, + 36, + 37, + 38, + 39, + 40 + ], + "related_tlds": [], + "similar_domains": [], + "historical_ips": [ + 1, + 2, + 3, + 4 + ] + } +}, +{ + "model": "targetApp.domain", + "pk": 1, + "fields": { + "name": "vulnweb.com", + "h1_team_handle": "", + "ip_address_cidr": null, + "description": "", + "insert_date": "2024-09-03T21:23:39.098Z", + "start_scan_date": "2024-09-04T19:29:56.563Z", + "request_headers": null, + "domain_info": 1, + "project": 1 + } +} +] diff --git a/web/targetApp/forms.py b/web/targetApp/forms.py index 0d11c2e7..9163b773 100644 --- a/web/targetApp/forms.py +++ b/web/targetApp/forms.py @@ -43,12 +43,28 @@ class AddTargetForm(forms.Form): } )) -class AddOrganizationForm(forms.Form): +class AddOrganizationForm(forms.ModelForm): def __init__(self, *args, **kwargs): project = kwargs.pop('project') super(AddOrganizationForm, self).__init__(*args, **kwargs) - self.fields['domains'].choices = [(domain.id, domain.name) for domain in Domain.objects.filter(project__slug=project) if not domain.get_organization()] - + self.fields['domains'] = forms.ModelMultipleChoiceField( + queryset=Domain.objects.filter(project__slug=project, domains__isnull=True), + widget=forms.SelectMultiple( + attrs={ + "class": "form-control select2-multiple", + "data-toggle": "select2", + "data-width": "100%", + "data-placeholder": "Choose Targets", + "id": "domains", + } + ), + required=True + ) + + class Meta: + model = Organization + fields = ['name', 'description', 'domains'] + name = forms.CharField( required=True, widget=forms.TextInput( @@ -68,26 +84,10 @@ def __init__(self, *args, **kwargs): } )) - domains = forms.ChoiceField( - required=True, - widget=forms.Select( - attrs={ - "class": "form-control select2-multiple", - "multiple": "multiple", - "data-toggle": "select2", - "data-width": "100%", - "multiple": "multiple", - "data-placeholder": "Choose Targets", - "id": "domains", - } - ) - ) - - def clean_name(self): - data = self.cleaned_data['name'] - if Organization.objects.filter(name=data).count() > 0: - raise forms.ValidationError(f"{data} Organization already exists") - return data + def clean_domains(self): + if domains := self.cleaned_data.get('domains'): + return [int(domain.id) for domain in domains] + return [] class UpdateTargetForm(forms.ModelForm): diff --git a/web/targetApp/tests.py b/web/targetApp/tests.py deleted file mode 100644 index 7ce503c2..00000000 --- a/web/targetApp/tests.py +++ /dev/null @@ -1,3 +0,0 @@ -from django.test import TestCase - -# Create your tests here. diff --git a/web/targetApp/tests/__init__.py b/web/targetApp/tests/__init__.py new file mode 100644 index 00000000..e30f0ac2 --- /dev/null +++ b/web/targetApp/tests/__init__.py @@ -0,0 +1,2 @@ +from utils.test_base import * +from .test_target_app import * diff --git a/web/targetApp/tests/test_target_app.py b/web/targetApp/tests/test_target_app.py new file mode 100644 index 00000000..8af2c5b2 --- /dev/null +++ b/web/targetApp/tests/test_target_app.py @@ -0,0 +1,413 @@ +""" +TestTargetAppViews contains unit tests for the views of the targetApp application. +It verifies the functionality related to targets and organizations, ensuring that views +return the correct status codes, templates, and handle various scenarios appropriately. + +Methods: + setUp: Initializes test objects for projects, domains, and organizations before each test. + test_index_view: Tests the index view for correct status code and template usage. + test_add_target_view: Tests the addition of a new target to ensure it is created successfully. + test_add_ip_view: Tests the addition of a new IP target to ensure it is created successfully. + test_add_target_with_invalid_ip: Tests the addition of a target with an invalid IP address. + test_add_target_with_file: Tests the addition of targets from a file to ensure they are created successfully. + test_add_target_with_empty_file: Tests the handling of an empty file upload. + test_list_target_view: Tests the list target view for correct status code and template usage. + test_delete_target_view: Tests the deletion of a target to ensure it is removed successfully. + test_update_target_view: Tests the update of a target to ensure it is updated successfully. + test_update_organization_view_with_invalid_data: Tests updating an organization with invalid data. + test_delete_non_existent_target: Tests the deletion of a non-existent target. + test_add_organization_view: Tests the addition of a new organization to ensure it is created successfully. + test_list_organization_view: Tests the list organization view for correct status code and template usage. + test_delete_organization_view: Tests the deletion of an organization to ensure it is removed successfully. + test_update_organization_view: Tests the update of an organization to ensure it is updated successfully. + test_update_organization_with_invalid_data: Tests updating an organization with invalid data. + test_add_organization_with_duplicate_name: Tests adding an organization with a duplicate name. + test_delete_non_existent_organization: Tests the deletion of a non-existent organization. +""" + +import os +from django.urls import reverse +from django.contrib.messages import get_messages +from utils.test_base import BaseTestCase +from targetApp.models import Domain, Organization + +__all__ = [ + 'TestTargetAppViews', +] + +class TestTargetAppViews(BaseTestCase): + """ + Test class for the views of the targetApp. + """ + + def setUp(self): + """ + Initial setup for the tests. + Creates test objects for projects, domains, and organizations. + """ + super().setUp() + self.data_generator.create_project_full() + + def test_index_view(self): + """ + Tests the index view to ensure it returns the correct status code and template. + """ + response = self.client.get(reverse('targetIndex')) + self.assertEqual(response.status_code, 200) + self.assertTemplateUsed(response, 'target/index.html') + + def test_add_target_view(self): + """ + Tests the add target view to ensure a new target is created successfully. + """ + Domain.objects.all().delete() + response = self.client.post( + reverse('add_target', kwargs={ + 'slug': self.data_generator.project.slug + }), { + 'addTargets': 'example.com', + 'targetDescription': 'Test Description', + 'targetH1TeamHandle': 'Test Handle', + 'targetOrganization': 'Test Organization', + 'add-multiple-targets': 'submit', + } + ) + self.assertEqual(response.status_code, 302) + self.assertTrue(Domain.objects.filter(name='example.com').exists()) + + def test_add_ip_view(self): + """ + Tests the add target view to ensure a new target is created successfully. + """ + Domain.objects.all().delete() + response = self.client.post( + reverse('add_target', kwargs={ + 'slug': self.data_generator.project.slug + }), { + 'ip_address': '192.168.1.0%2F24', + 'resolved_ip_domains': ['example.local','other-example.local'], + 'targetDescription': 'Test Description', + 'targetH1TeamHandle': 'Test Handle', + 'targetOrganization': 'Test Organization', + 'add-ip-target': 'submit', + } + ) + self.assertEqual(response.status_code, 302) + self.assertTrue(Domain.objects.filter(name='example.local').exists()) + self.assertTrue(Domain.objects.filter(name='other-example.local').exists()) + + def test_add_target_with_invalid_ip(self): + """ + Test adding a target with an invalid IP address. + """ + response = self.client.post( + reverse('add_target', kwargs={'slug': self.data_generator.project.slug}), + { + 'ip_address': '999.999.999.999', # Invalid IP address + 'resolved_ip_domains': ['999.999.999.999'], + 'targetDescription': 'Test Description', + 'targetH1TeamHandle': 'Test Handle', + 'targetOrganization': 'Test Organization', + 'add-ip-target': 'submit', + } + ) + + self.assertEqual(response.status_code, 302) + messages_list = list(get_messages(response.wsgi_request)) + self.assertIn( + "IP 999.999.999.999 is not a valid IP address / domain. Skipping.", + [str(message) for message in messages_list] + ) + + + def test_add_target_with_file(self): + """ + Test the add target with file view to ensure a new target is created successfully. + """ + Domain.objects.all().delete() + # Create a temporary file for the test + with open('domains.txt', 'w', encoding='utf-8') as f: + f.write("example.local\nother-example.local\n") + + with open('domains.txt', 'rb') as file: + response = self.client.post( + reverse('add_target', kwargs={'slug': self.data_generator.project.slug}), + { + 'txtFile': file, + 'import-txt-target': 'Upload', + }, + format='multipart' + ) + + # Check that the response is correct + self.assertEqual(response.status_code, 302) + self.assertTrue(Domain.objects.filter(name='example.local').exists()) + self.assertTrue(Domain.objects.filter(name='other-example.local').exists()) + + # Clean up the temporary file + os.remove('domains.txt') + + def test_add_target_with_empty_file(self): + """ + Test uploading an empty file to ensure the system handles it correctly. + """ + # Create an empty file for the test + with open('empty_file.txt', 'w', encoding='utf-8') as f: + pass # Create an empty file + + with open('empty_file.txt', 'rb') as file: + response = self.client.post( + reverse('add_target', kwargs={'slug': self.data_generator.project.slug}), + { + 'txtFile': file, + 'import-txt-target': 'Upload', + }, + format='multipart' + ) + + # Check that the response is correct + self.assertEqual(response.status_code, 302) + + # Check the returned message + messages_list = list(get_messages(response.wsgi_request)) + self.assertIn( + "The uploaded file is empty. Please upload a valid file.", + [str(message) for message in messages_list] + ) + + # Check that no new target was created + self.assertFalse(Domain.objects.filter(name='example.local').exists()) + + # Clean up the empty file + os.remove('empty_file.txt') + + def test_list_target_view(self): + """ + Tests the list target view to ensure it returns the correct status code and template. + """ + response = self.client.get( + reverse('list_target', kwargs={ + 'slug': self.data_generator.project.slug + }) + ) + self.assertEqual(response.status_code, 200) + self.assertTemplateUsed(response, 'target/list.html') + + def test_delete_target_view(self): + """ + Tests the delete target view to ensure a target is deleted successfully. + """ + response = self.client.post( + reverse('delete_target', kwargs={ + 'id': self.data_generator.domain.id + }) + ) + self.assertEqual(response.status_code, 200) + self.assertFalse(Domain.objects.filter(id=self.data_generator.domain.id).exists()) + + def test_update_target_view(self): + """ + Tests the update target view to ensure a target is updated successfully. + """ + response = self.client.post( + reverse('update_target', kwargs={ + 'slug': self.data_generator.project.slug, + 'id': self.data_generator.domain.id + }), + { + 'description': 'Updated description', + 'h1_team_handle': 'Updated Handle' + } + ) + self.assertEqual(response.status_code, 302) + self.data_generator.domain.refresh_from_db() + updated_domain = Domain.objects.get(id=self.data_generator.domain.id) + self.assertEqual(updated_domain.description, 'Updated description') + self.assertEqual(updated_domain.h1_team_handle, 'Updated Handle') + + def test_update_organization_view_with_invalid_data(self): + """ + Test updating an organization with invalid data to ensure validation works. + """ + # Prepare invalid data (e.g., empty name) + invalid_data = { + 'name': '', # Invalid: name cannot be empty + 'description': 'Updated Org Description', + } + + response = self.client.post( + reverse('update_organization', kwargs={ + 'slug': self.data_generator.project.slug, + 'id': self.data_generator.organization.id + }), + invalid_data + ) + + # Check that the response is still 200 (indicating the form was not valid) + self.assertEqual(response.status_code, 200) + + # Check for the presence of an error message in the response context + self.assertContains(response, "This field is required.") + + # Verify that the organization data has not changed + self.data_generator.organization.refresh_from_db() + self.assertEqual(self.data_generator.organization.name, 'Test Organization') + self.assertEqual(self.data_generator.organization.description, 'Test Description') + + def test_delete_non_existent_target(self): + """ + Test attempting to delete a target that does not exist. + """ + # Attempt to delete a target with a non-existent ID + non_existent_id = self.data_generator.domain.id + 999 # Ensure this ID does not exist + + response = self.client.post( + reverse('delete_target', kwargs={'id': non_existent_id}), + follow=True # Follow the redirect after deletion + ) + + # Check that the response is still 200 (indicating the request was processed) + self.assertEqual(response.status_code, 200) + + messages_list = list(get_messages(response.wsgi_request)) + self.assertIn( + "Domain not found.", + [str(message) for message in messages_list] + ) + + # Verify that the existing target is still present + self.assertTrue(Domain.objects.filter(id=self.data_generator.domain.id).exists()) + + def test_add_organization_view(self): + """ + Tests the add organization view to ensure a new organization is created successfully. + """ + Organization.objects.all().delete() + response = self.client.post( + reverse('add_organization', kwargs={ + 'slug': self.data_generator.project.slug + }), { + 'name': 'New Organization', + 'description': 'New Org Description', + 'domains': [self.data_generator.domain.id], + } + ) + self.assertEqual(response.status_code, 302) + self.assertTrue(Organization.objects.filter(name='New Organization').exists()) + + def test_list_organization_view(self): + """ + Tests the list organization view to ensure it returns the correct status code and template. + """ + response = self.client.get( + reverse('list_organization', kwargs={ + 'slug': self.data_generator.project.slug + }) + ) + self.assertEqual(response.status_code, 200) + self.assertTemplateUsed(response, 'organization/list.html') + + def test_delete_organization_view(self): + """ + Tests the delete organization view to ensure an organization is deleted successfully. + """ + response = self.client.post( + reverse('delete_organization', kwargs={ + 'id': self.data_generator.organization.id + }) + ) + self.assertEqual(response.status_code, 200) + self.assertFalse(Organization.objects.filter(id=self.data_generator.organization.id).exists()) + + def test_update_organization_view(self): + """ + Tests the update organization view to ensure an organization is updated successfully. + """ + response = self.client.post( + reverse('update_organization', kwargs={ + 'slug': self.data_generator.project.slug, + 'id': self.data_generator.organization.id + }), + { + 'name': 'Updated Organization', + 'description': 'Updated Org Description', + 'domains': [self.data_generator.domain.id], + } + ) + self.assertEqual(response.status_code, 302) + self.data_generator.organization.refresh_from_db() + self.assertEqual(self.data_generator.organization.name, 'Updated Organization') + self.assertEqual(self.data_generator.organization.description, 'Updated Org Description') + + def test_update_organization_with_invalid_data(self): + """ + Test updating an organization with invalid data to ensure validation works. + """ + response = self.client.post( + reverse('update_organization', kwargs={ + 'slug': self.data_generator.project.slug, + 'id': self.data_generator.organization.id + }), + { + 'name': '', # Invalid: name cannot be empty + 'description': 'Updated Org Description', + 'domains': [] # Assuming domains are required + } + ) + + # Check that the response is still 200 (indicating the form was not valid) + self.assertEqual(response.status_code, 200) + + # Check for the presence of an error message in the response context + self.assertContains(response, "This field is required.") + + # Verify that the organization data has not changed + self.data_generator.organization.refresh_from_db() + self.assertEqual(self.data_generator.organization.name, 'Test Organization') + self.assertEqual(self.data_generator.organization.description, 'Test Description') + + def test_add_organization_with_duplicate_name(self): + """ + Test adding an organization with a name that already exists. + """ + response = self.client.post( + reverse('add_organization', kwargs={'slug': self.data_generator.project.slug}), + { + 'name': 'Test Organization', # Duplicate name + 'description': 'New Org Description', + 'domains': [] + } + ) + + # Check that the response is still 200 (indicating the form was not valid) + self.assertEqual(response.status_code, 200) + + # Check for the presence of an error message in the response context + self.assertContains(response, "Organization with this Name already exists.") + + # Verify that no new organization was created + self.assertEqual(Organization.objects.count(), 1) + + def test_delete_non_existent_organization(self): + """ + Test attempting to delete an organization that does not exist. + """ + # Attempt to delete an organization with a non-existent ID + non_existent_id = self.data_generator.organization.id + 999 + + response = self.client.post( + reverse('delete_organization', kwargs={'id': non_existent_id}), + follow=True # Follow the redirect after deletion + ) + + # Check that the response is 200 + self.assertEqual(response.status_code, 200) + + messages_list = list(get_messages(response.wsgi_request)) + self.assertIn( + "Organization not found.", + [str(message) for message in messages_list] + ) + + # Verify that the existing organization is still present + self.assertTrue(Organization.objects.filter(id=self.data_generator.organization.id).exists()) diff --git a/web/targetApp/views.py b/web/targetApp/views.py index 09067991..573e73d0 100644 --- a/web/targetApp/views.py +++ b/web/targetApp/views.py @@ -1,32 +1,74 @@ import csv import io -import ipaddress import logging -import validators - from datetime import timedelta from urllib.parse import urlparse +import validators + from django import http from django.conf import settings from django.contrib import messages from django.db.models import Count +from django.http import Http404 from django.shortcuts import get_object_or_404, render from django.urls import reverse from django.utils import timezone from django.utils.safestring import mark_safe from rolepermissions.decorators import has_permission_decorator +from reNgine.definitions import ( + PERM_MODIFY_TARGETS, + FOUR_OH_FOUR_URL, +) + +from reNgine.common_func import ( + get_ip_info, + get_ips_from_cidr_range, +) +from reNgine.tasks import ( + run_command, + sanitize_url, +) +from startScan.models import ( + EndPoint, + IpAddress, + Port, + Vulnerability, + VulnerabilityTags, + Email, + Employee, + CveId, + CweId, + CountryISO, + Subdomain, + ScanHistory, + EngineType, +) +from targetApp.models import ( + Domain, + Organization, + Project, +) +from targetApp.forms import ( + AddTargetForm, + UpdateTargetForm, + AddOrganizationForm, + UpdateOrganizationForm, +) -from reNgine.common_func import * -from reNgine.tasks import run_command, sanitize_url -from scanEngine.models import * -from startScan.models import * -from targetApp.forms import * -from targetApp.models import * logger = logging.getLogger(__name__) def index(request): + """ + index renders the index page for the target application. It returns the HTML template for the target index view, allowing users to access the main interface for managing targets. + + Args: + request (HttpRequest): The HTTP request object containing metadata about the request. + + Returns: + HttpResponse: The rendered HTML response for the target index page. + """ # TODO bring default target page return render(request, 'target/index.html') @@ -49,7 +91,8 @@ def add_target(request, slug): # Multiple targets if multiple_targets: bulk_targets = [t.rstrip() for t in request.POST['addTargets'].split('\n') if t] - logger.info(f'Adding multiple targets: {bulk_targets}') + sanitized_targets = [target if isinstance(target, str) and validators.domain(target) else 'Invalid target' for target in bulk_targets] + logger.info('Adding multiple targets: %s', sanitized_targets) description = request.POST.get('targetDescription', '') h1_team_handle = request.POST.get('targetH1TeamHandle') organization_name = request.POST.get('targetOrganization') @@ -70,7 +113,8 @@ def add_target(request, slug): is_url = bool(validators.url(target)) # Set ip_domain / http_url based on type of input - logger.info(f'{target} | Domain? {is_domain} | IP? {is_ip} | CIDR range? {is_range} | URL? {is_url}') + sanitized_target = target if isinstance(target, str) and validators.domain(target) else 'Invalid target' + logger.info('%s | Domain? %s | IP? %s | CIDR range? %s | URL? %s', sanitized_target, is_domain, is_ip, is_range, is_url) if is_domain: domains.append(target) @@ -106,7 +150,13 @@ def add_target(request, slug): msg) continue - logger.info(f'IPs: {ips} | Domains: {domains} | URLs: {http_urls} | Ports: {ports}') + # Sanitize the lists for logging + sanitized_ips = [ip if validators.ipv4(ip) or validators.ipv6(ip) else 'Invalid IP' for ip in ips] + sanitized_domains = [domain if isinstance(domain, str) and validators.domain(domain) else 'Invalid Domain' for domain in domains] + sanitized_http_urls = [url if validators.url(url) else 'Invalid URL' for url in http_urls] + sanitized_ports = [port if isinstance(port, int) else 'Invalid Port' for port in ports] + logger.info('IPs: %s | Domains: %s | URLs: %s | Ports: %s', + sanitized_ips, sanitized_domains, sanitized_http_urls, sanitized_ports) for domain_name in domains: if not Domain.objects.filter(name=domain_name).exists(): @@ -120,11 +170,12 @@ def add_target(request, slug): domain.save() added_target_count += 1 if created: - logger.info(f'Added new domain {domain.name}') + logger.info('Added new domain %s', domain.name) if organization_name: organization = None - if Organization.objects.filter(name=organization_name).exists(): + organization_query = Organization.objects.filter(name=organization_name) + if organization_query.exists(): organization = organization_query[0] else: organization = Organization.objects.create( @@ -140,7 +191,7 @@ def add_target(request, slug): target_domain=domain, http_url=http_url) if created: - logger.info(f'Added new endpoint {endpoint.http_url}') + logger.info('Added new endpoint %s', endpoint.http_url) for ip_address in ips: ip_data = get_ip_info(ip_address) @@ -150,12 +201,12 @@ def add_target(request, slug): ip.version = ip_data.version ip.save() if created: - logger.warning(f'Added new IP {ip}') + logger.warning('Added new IP %s', ip) for port in ports: port, created = Port.objects.get_or_create(number=port_number) if created: - logger.warning(f'Added new port {port.number}.') + logger.warning('Added new port %s', port.number) # Import from txt / csv elif 'import-txt-target' in request.POST or 'import-csv-target' in request.POST: @@ -168,6 +219,13 @@ def add_target(request, slug): 'Files uploaded are not .txt or .csv files.') return http.HttpResponseRedirect(reverse('add_target', kwargs={'slug': slug})) + if (txt_file and txt_file.size == 0) or (csv_file and csv_file.size == 0): + messages.add_message( + request, + messages.ERROR, + 'The uploaded file is empty. Please upload a valid file.') + return http.HttpResponseRedirect(reverse('add_target', kwargs={'slug': slug})) + if txt_file: is_txt = txt_file.content_type == 'text/plain' or txt_file.name.split('.')[-1] == 'txt' if not is_txt: @@ -236,6 +294,10 @@ def add_target(request, slug): for ip in resolved_ips: is_domain = bool(validators.domain(ip)) is_ip = bool(validators.ipv4(ip)) or bool(validators.ipv6(ip)) + if not is_ip and not is_domain: + messages.add_message(request, messages.ERROR, f'IP {ip} is not a valid IP address / domain. Skipping.') + logger.warning('Invalid IP address/domain provided. Skipping.') + continue description = request.POST.get('targetDescription', '') h1_team_handle = request.POST.get('targetH1TeamHandle') if not Domain.objects.filter(name=ip).exists(): @@ -249,7 +311,7 @@ def add_target(request, slug): domain.save() added_target_count += 1 if created: - logger.info(f'Added new domain {domain.name}') + logger.info('Added new domain %s', domain.name) if is_ip: ip_data = get_ip_info(ip) ip, created = IpAddress.objects.get_or_create(address=ip) @@ -258,9 +320,9 @@ def add_target(request, slug): ip.version = ip_data.version ip.save() if created: - logger.info(f'Added new IP {ip}') + logger.info('Added new IP %s', ip) - except Exception as e: + except (Http404, ValueError) as e: logger.exception(e) messages.add_message( request, @@ -302,17 +364,35 @@ def list_target(request, slug): @has_permission_decorator(PERM_MODIFY_TARGETS, redirect_url=FOUR_OH_FOUR_URL) def delete_target(request, id): - obj = get_object_or_404(Domain, id=id) if request.method == "POST": - run_command(f'rm -rf {settings.RENGINE_RESULTS}/{obj.name}') - run_command(f'rm -rf {settings.RENGINE_RESULTS}/{obj.name}*') # for backward compatibility - obj.delete() - responseData = {'status': 'true'} - messages.add_message( - request, - messages.INFO, - 'Domain successfully deleted!') + try: + target = get_object_or_404(Domain, id=id) + run_command(f'rm -rf {settings.RENGINE_RESULTS}/{target.name}') + run_command(f'rm -rf {settings.RENGINE_RESULTS}/{target.name}*') # for backward compatibility + target.delete() + responseData = {'status': 'true'} + messages.add_message( + request, + messages.INFO, + 'Domain successfully deleted!' + ) + except Http404: + if isinstance(id, int): # Ensure id is an integer + logger.error('Domain not found: %d', id) + else: + logger.error('Domain not found: Invalid ID provided') + messages.add_message( + request, + messages.ERROR, + 'Domain not found.') + responseData = {'status': 'false'} else: + valid_methods = ['GET', 'POST', 'PUT', 'DELETE', 'PATCH', 'OPTIONS', 'HEAD'] + if request.method in valid_methods: + logger.error('Invalid request method: %s', request.method) + else: + logger.error('Invalid request method: Unknown method provided') + responseData = {'status': 'false'} messages.add_message( request, @@ -506,23 +586,22 @@ def target_summary(request, slug, id): @has_permission_decorator(PERM_MODIFY_TARGETS, redirect_url=FOUR_OH_FOUR_URL) def add_organization(request, slug): form = AddOrganizationForm(request.POST or None, project=slug) - if request.method == "POST": - if form.is_valid(): - data = form.cleaned_data - project = Project.objects.get(slug=slug) - organization = Organization.objects.create( - name=data['name'], - description=data['description'], - project=project, - insert_date=timezone.now()) - for domain_id in request.POST.getlist("domains"): - domain = Domain.objects.get(id=domain_id) - organization.domains.add(domain) - messages.add_message( - request, - messages.INFO, - f'Organization {data["name"]} added successfully') - return http.HttpResponseRedirect(reverse('list_organization', kwargs={'slug': slug})) + if request.method == "POST" and form.is_valid(): + data = form.cleaned_data + project = Project.objects.get(slug=slug) + organization = Organization.objects.create( + name=data['name'], + description=data['description'], + project=project, + insert_date=timezone.now()) + for domain_id in request.POST.getlist("domains"): + domain = Domain.objects.get(id=domain_id) + organization.domains.add(domain) + messages.add_message( + request, + messages.INFO, + f'Organization {data["name"]} added successfully') + return http.HttpResponseRedirect(reverse('list_organization', kwargs={'slug': slug})) context = { "organization_active": "active", "form": form @@ -541,13 +620,20 @@ def list_organization(request, slug): @has_permission_decorator(PERM_MODIFY_TARGETS, redirect_url=FOUR_OH_FOUR_URL) def delete_organization(request, id): if request.method == "POST": - obj = get_object_or_404(Organization, id=id) - obj.delete() - responseData = {'status': 'true'} - messages.add_message( - request, - messages.INFO, - 'Organization successfully deleted!') + try: + organization = get_object_or_404(Organization, id=id) + organization.delete() + messages.add_message( + request, + messages.INFO, + 'Organization successfully deleted!') + responseData = {'status': 'true'} + except Http404: + messages.add_message( + request, + messages.ERROR, + 'Organization not found.') + responseData = {'status': 'false'} else: responseData = {'status': 'false'} messages.add_message( @@ -561,8 +647,8 @@ def delete_organization(request, id): def update_organization(request, slug, id): organization = get_object_or_404(Organization, id=id) form = UpdateOrganizationForm() + domain_list = [] if request.method == "POST": - print(request.POST.getlist("domains")) form = UpdateOrganizationForm(request.POST, instance=organization) if form.is_valid(): data = form.cleaned_data @@ -596,21 +682,3 @@ def update_organization(request, slug, id): "form": form } return render(request, 'organization/update.html', context) - -def get_ip_info(ip_address): - is_ipv4 = bool(validators.ipv4(ip_address)) - is_ipv6 = bool(validators.ipv6(ip_address)) - ip_data = None - if is_ipv4: - ip_data = ipaddress.IPv4Address(ip_address) - elif is_ipv6: - ip_data = ipaddress.IPv6Address(ip_address) - else: - return None - return ip_data - -def get_ips_from_cidr_range(target): - try: - return [str(ip) for ip in ipaddress.IPv4Network(target)] - except Exception as e: - logger.error(f'{target} is not a valid CIDR range. Skipping.') diff --git a/web/tests/test_nmap.py b/web/tests/test_nmap.py index 67364312..4091efc7 100644 --- a/web/tests/test_nmap.py +++ b/web/tests/test_nmap.py @@ -1,14 +1,14 @@ import logging import os import unittest +import pathlib os.environ['RENGINE_SECRET_KEY'] = 'secret' os.environ['CELERY_ALWAYS_EAGER'] = 'True' from celery.utils.log import get_task_logger from reNgine.settings import CELERY_DEBUG -from reNgine.tasks import parse_nmap_results, parse_nmap_vuln_output, parse_nmap_vulscan_output -import pathlib +from reNgine.tasks import parse_nmap_results logger = get_task_logger(__name__) DOMAIN_NAME = os.environ['DOMAIN_NAME'] @@ -34,7 +34,7 @@ def setUp(self): def test_nmap_parse(self): for xml_file in self.all_xml: vulns = parse_nmap_results(self.nmap_vuln_single_xml) - self.assertGreater(self.vulns, 0) + self.assertGreater(len(vulns), 0) # Fixed to use len(vulns) def test_nmap_vuln_single(self): pass diff --git a/web/tests/test_scan.py b/web/tests/test_scan.py index c3399164..a36fa355 100644 --- a/web/tests/test_scan.py +++ b/web/tests/test_scan.py @@ -2,20 +2,29 @@ import logging import os import unittest +import yaml +from dotenv import load_dotenv -os.environ['RENGINE_SECRET_KEY'] = 'secret' -os.environ['CELERY_ALWAYS_EAGER'] = 'True' +# Load environment variables from a .env file +load_dotenv() + +os.environ.setdefault('RENGINE_SECRET_KEY', os.getenv('RENGINE_SECRET_KEY', 'secret')) +os.environ.setdefault('CELERY_ALWAYS_EAGER', os.getenv('CELERY_ALWAYS_EAGER', 'True')) -import yaml -from celery.utils.log import get_task_logger from reNgine.settings import CELERY_DEBUG +from celery.utils.log import get_task_logger +from scanEngine.models import EngineType +from django.utils import timezone from reNgine.tasks import (dir_file_fuzz, fetch_url, http_crawl, initiate_scan, osint, port_scan, subdomain_discovery, vulnerability_scan) -from startScan.models import * +from startScan.models import Endpoint, Domain, ScanHistory, Subdomain logger = get_task_logger(__name__) -DOMAIN_NAME = os.environ['DOMAIN_NAME'] +# To pass the DOMAIN_NAME variable when running tests, you can use: +# DOMAIN_NAME=example.com python3 manage.py test +# Or set a default value if the environment variable is not defined +DOMAIN_NAME = os.environ.get('DOMAIN_NAME', 'example.com') # if not CELERY_DEBUG: # logging.disable(logging.CRITICAL) @@ -42,7 +51,7 @@ def setUp(self): scan_type=self.engine, start_scan_date=timezone.now()) self.scan.save() - self.endpoint, _ = EndPoint.objects.get_or_create( + self.endpoint, _ = Endpoint.objects.get_or_create( scan_history=self.scan, target_domain=self.domain, http_url=self.url) diff --git a/web/utils/test_base.py b/web/utils/test_base.py new file mode 100644 index 00000000..2b0b7a91 --- /dev/null +++ b/web/utils/test_base.py @@ -0,0 +1,57 @@ +import logging + +from utils.test_utils import TestDataGenerator, TestValidation +from django.test import TestCase, Client +from django.contrib.auth import get_user_model +from dashboard.views import on_user_logged_in + +__all__ = [ + 'BaseTestCase', +] + +class BaseTestCase(TestCase): + """ + Base test case for all API tests. + Sets up common fixtures and mocks the user login process. + """ + + fixtures = [ + "dashboard.json", + "targetApp.json", + "scanEngine.json", + "startScan.json", + "recon_note.json", + "fixtures/auth.json", + "fixtures/django_celery_beat.json", + ] + + def setUp(self): + self.client = Client() + user = get_user_model() + self.user = user.objects.get(username="rengine") + + # Save original on_user_logged_in function + self.original_on_user_logged_in = on_user_logged_in + + # Replace on_user_logged_in with a mock function + def mock_on_user_logged_in(sender, request, **kwargs): + pass + + on_user_logged_in.__code__ = mock_on_user_logged_in.__code__ + + # Login + self.client.force_login(self.user) + + # Ensure the session is saved after login + self.client.session.save() + + # Create test data + self.data_generator = TestDataGenerator() + self.test_validation = TestValidation() + + # Disable logging for tests + logging.disable(logging.CRITICAL) + + def tearDown(self): + # Restore original on_user_logged_in function + on_user_logged_in.__code__ = self.original_on_user_logged_in.__code__ diff --git a/web/utils/test_utils.py b/web/utils/test_utils.py new file mode 100644 index 00000000..bfae9218 --- /dev/null +++ b/web/utils/test_utils.py @@ -0,0 +1,529 @@ +""" +This file contains the test cases +""" + +import logging +import json + +from django.utils import timezone +from django.test import override_settings +from django.template.loader import get_template +from django.template import Template + +from dashboard.models import Project, SearchHistory +from recon_note.models import TodoNote +from scanEngine.models import ( + EngineType, + Hackerone, + InstalledExternalTool, + InterestingLookupModel, + Proxy, + VulnerabilityReportSetting, + Wordlist, +) + +from startScan.models import ( + Command, + DirectoryFile, + DirectoryScan, + Dork, + Email, + EndPoint, + Employee, + IpAddress, + ScanActivity, + ScanHistory, + SubScan, + Subdomain, + Technology, + Vulnerability, + Port, + CountryISO, + MetaFinderDocument, +) + +from targetApp.models import ( + DNSRecord, + Domain, + DomainInfo, + DomainRegistration, + HistoricalIP, + NameServer, + Organization, + Registrar, + RelatedDomain, + WhoisStatus, +) +__all__ = [ + 'TestDataGenerator' +] + +class TestDataGenerator: + """ + Base test case for all API tests. + Sets up common fixtures and mocks the user login process. + """ + + + subscans = [] + vulnerabilities = [] + + # Disable logging for tests + logging.disable(logging.CRITICAL) + + + def create_project_base(self): + """Create a basic project setup with essential objects.""" + self.create_project() + self.create_domain() + self.create_scan_history() + self.create_subdomain() + self.create_endpoint() + self.create_port() + self.create_ip_address() + + def create_project_full(self): + """Create a full project setup with all related objects.""" + self.create_project_base() + self.create_vulnerability() + self.create_directory_scan() + self.create_directory_file() + self.create_subscan() + self.create_interesting_lookup_model() + self.create_search_history() + self.create_todo_note() + self.create_engine_type() + self.create_organization() + self.create_employee() + self.create_email() + self.create_dork() + self.create_whois_status() + self.create_name_server() + self.create_dns_record() + self.create_related_domain() + self.create_historical_ip() + self.create_technology() + self.create_country_iso() + self.create_domain_registration() + self.create_domain_info() + self.create_metafinder_document() + self.create_scan_activity() + self.create_command() + self.create_installed_external_tool() + self.create_wordlist() + self.create_proxy() + self.create_hackerone() + self.create_report_setting() + self.create_external_tool() + + def create_project(self): + """Create and return a test project.""" + self.project = Project.objects.create( + name="Test Project", + insert_date=timezone.now(), + slug="test-project" + ) + return self.project + + def create_domain(self): + """Create and return a test domain.""" + self.domain = Domain.objects.create( + name="example.com", + project=self.project, + insert_date=timezone.now() + ) + return self.domain + + def create_scan_history(self): + """Create and return a test scan history.""" + self.scan_history = ScanHistory.objects.create( + domain=self.domain, + start_scan_date=timezone.now(), + scan_type_id=1, + scan_status=2, + tasks=[ + 'fetch_url', + 'subdomain_discovery', + 'port_scan', + 'vulnerability_scan', + 'osint', + 'dir_file_fuzz', + 'screenshot', + 'waf_detection', + 'nuclei_scan', + 'endpoint_scan' + ] + ) + return self.scan_history + + def create_subdomain(self, name="admin.example.com"): + """Create and return a test subdomain.""" + self.subdomain = Subdomain.objects.create( + name=name, + target_domain=self.domain, + scan_history=self.scan_history, + ) + return self.subdomain + + def create_endpoint(self, name="endpoint"): + """Create and return a test endpoint.""" + self.endpoint = EndPoint.objects.create( + target_domain=self.domain, + subdomain=self.subdomain, + scan_history=self.scan_history, + discovered_date=timezone.now(), + http_url=f"https://admin.example.com/{name}", + ) + return self.endpoint + + def create_vulnerability(self): + """Create and return a test vulnerability.""" + self.vulnerabilities.append( + Vulnerability.objects.create( + name="Common Vulnerability", + severity=1, + discovered_date=timezone.now(), + target_domain=self.domain, + subdomain=self.subdomain, + scan_history=self.scan_history, + endpoint=self.endpoint, + ) + ) + return self.vulnerabilities + + def create_directory_scan(self): + """Create and return a test directory scan.""" + self.directory_scan = DirectoryScan.objects.create( + command_line="Test Command", + scanned_date=timezone.now() + ) + return self.directory_scan + + def create_directory_file(self): + """Create and return a test directory file.""" + self.directory_file = DirectoryFile.objects.create( + name="test.txt", + url="https://example.com/test.txt" + ) + return self.directory_file + + def create_subscan(self): + """Create and return a test subscan.""" + self.subscans.append( + SubScan.objects.create( + start_scan_date=timezone.now(), + scan_history=self.scan_history, + subdomain=self.subdomain, + status=1, + ) + ) + return self.subscans + + def create_installed_external_tool(self): + """Create and return a test installed external tool.""" + self.installed_external_tool = InstalledExternalTool.objects.create( + name="OneForAll", + github_url="https://github.com/shmilylty/OneForAll", + update_command="git pull", + install_command="git clone https://github.com/shmilylty/OneForAll", + github_clone_path="/home/rengine/tools/.github/OneForAll" + ) + return self.installed_external_tool + + def create_todo_note(self): + """Create and return a test todo note.""" + self.todo_note = TodoNote.objects.create( + title="Test Note", + description="Test Description", + project=self.project, + subdomain=self.subdomain, + scan_history=self.scan_history, + ) + return self.todo_note + + def create_search_history(self): + """Create and return a test search history.""" + self.search_history = SearchHistory.objects.create(query="Test Query") + return self.search_history + + def create_interesting_lookup_model(self): + """Create and return a test interesting lookup model.""" + self.interesting_lookup_model = InterestingLookupModel.objects.create( + keywords="admin", + custom_type=True, + title_lookup=True, + url_lookup=True, + condition_200_http_lookup=False, + ) + return self.interesting_lookup_model + + def create_engine_type(self): + """Create and return a test engine type.""" + self.engine_type = EngineType.objects.create( + engine_name="Test Engine", + yaml_configuration="http_crawl: {}", + default_engine=True, + ) + return self.engine_type + + def create_organization(self): + """Create and return a test organization.""" + self.organization = Organization.objects.create( + name="Test Organization", + description="Test Description", + insert_date=timezone.now(), + project=self.project, + ) + self.organization.domains.add(self.domain) + return self.organization + + def create_employee(self): + """Create and return a test employee.""" + self.employee = Employee.objects.create(name="Test Employee") + self.scan_history.employees.add(self.employee) + return self.employee + + def create_email(self): + """Create and return a test email.""" + self.email = Email.objects.create( + address="test@example.com", + password="password" + ) + self.scan_history.emails.add(self.email) + return self.email + + def create_dork(self): + """Create and return a test dork.""" + self.dork = Dork.objects.create(type="Test Dork", url="https://example.com") + self.scan_history.dorks.add(self.dork) + return self.dork + + def create_domain_info(self): + """Create and return a test domain info.""" + self.domain_info = DomainInfo.objects.create( + created=timezone.now(), + updated=timezone.now(), + expires=timezone.now(), + geolocation_iso="US", + registrant=self.domain_registration, + admin=self.domain_registration, + tech=self.domain_registration, + ) + self.domain_info.name_servers.add(self.name_server) + self.domain_info.dns_records.add(self.dns_record) + self.domain_info.related_domains.add(self.related_domain) + self.domain_info.related_tlds.add(self.related_domain) + self.domain_info.similar_domains.add(self.related_domain) + self.domain_info.historical_ips.add(self.historical_ip) + return self.domain_info + + def create_whois_status(self): + """Create and return a test WHOIS status.""" + self.whois_status = WhoisStatus.objects.create( + name="clienttransferprohibited", + ) + return self.whois_status + + def create_name_server(self): + """Create and return a test name server.""" + self.name_server = NameServer.objects.create( + name="Test Name Server", + ) + return self.name_server + + def create_dns_record(self): + """Create and return a test DNS record.""" + self.dns_record = DNSRecord.objects.create( + name="Test DNS Record", + type="a", + ) + return self.dns_record + + def create_related_domain(self): + """Create and return a test related domain.""" + self.related_domain = RelatedDomain.objects.create( + name="test.com", + ) + return self.related_domain + + def create_domain_registration(self): + """Create and return a test domain registration.""" + self.domain_registration = DomainRegistration.objects.create( + name="Test Domain Registration" + ) + return self.domain_registration + + def create_registrar(self): + """Create and return a test registrar.""" + self.registrar = Registrar.objects.create( + name="Test Registrar", + ) + return self.registrar + + def create_historical_ip(self): + """Create and return a test historical IP.""" + self.historical_ip = HistoricalIP.objects.create(ip="127.0.0.1") + return self.historical_ip + + def create_technology(self): + """Create and return a test technology.""" + self.technology = Technology.objects.create(name="Test Technology") + self.subdomain.technologies.add(self.technology) + return self.technology + + def create_country_iso(self): + """Create and return a test country ISO.""" + self.country_iso = CountryISO.objects.create(iso="US") + return self.country_iso + + def create_ip_address(self): + """Create and return a test IP address.""" + self.ip_address = IpAddress.objects.create(address="1.1.1.1") + self.ip_address.ports.add(self.port) + self.subdomain.ip_addresses.add(self.ip_address) + return self.ip_address + + def create_port(self): + """Create and return a test port.""" + self.port = Port.objects.create( + number=80, service_name="http", description="open", is_uncommon=True + ) + return self.port + + def create_metafinder_document(self): + """Create and return a test MetaFinder document.""" + self.metafinder_document = MetaFinderDocument.objects.create( + title="Test MetaFinder Document", + url="https://example.com", + author="Test Author", + doc_name="test.pdf", + creation_date=timezone.now(), + modified_date=timezone.now(), + scan_history=self.scan_history, + target_domain=self.domain, + subdomain=self.subdomain, + ) + return self.metafinder_document + + def create_scan_activity(self): + """Create and return a test scan activity.""" + self.scan_activity = ScanActivity.objects.create( + name="Test Activity", + title="Test Type", + time=timezone.now(), + scan_of=self.scan_history, + status=1 + ) + return self.scan_activity + + def create_command(self): + """Create and return a test command.""" + self.command = Command.objects.create( + command="test command", + time=timezone.now(), + scan_history=self.scan_history, + activity=self.scan_activity + ) + return self.command + + def create_wordlist(self): + """ + Create a test wordlist. + """ + self.wordlist = Wordlist.objects.create(name='Test Wordlist', short_name='test', count=100) + return self.wordlist + + def create_proxy(self): + """ + Create a test proxy. + """ + self.proxy = Proxy.objects.create(use_proxy=True, proxies='127.0.0.1') + return self.proxy + + def create_hackerone(self): + """ + Create a test hackerone. + """ + self.hackerone = Hackerone.objects.create(username='test', api_key='testkey') + return self.hackerone + + def create_report_setting(self): + """ + Create a test report setting. + """ + self.report_setting = VulnerabilityReportSetting.objects.create( + primary_color='#000000', + secondary_color='#FFFFFF' + ) + return self.report_setting + + def create_external_tool(self): + """ + Create a test external tool. + """ + self.external_tool = InstalledExternalTool.objects.create( + name='Test Tool', + github_url='https://github.com/test/tool') + return self.external_tool + +class TestValidation: + + def is_json(self, value): + try: + json.loads(value) + return True + except ValueError: + return False + +class MockTemplate: + """ + mock_template is a decorator designed to mock a specific Django template during unit tests. + It temporarily overrides the template settings to return a mock template when the specified + template name is requested, allowing for controlled testing of views that rely on that template. + Args: + template_name (str): The name of the template to be mocked. + + Returns: + function: A decorator that wraps the test function, applying the mock template settings. + + Examples: + @mock_template('my_template.html') + def test_my_view(self): + ... + """ + @staticmethod + def mock_template(template_name): + """ + Decorator to mock a specific Django template during unit tests. + """ + def decorator(test_func): + """ + Decorator function to wrap the test function and apply the mock template settings. + """ + def wrapper(*args, **kwargs): + with override_settings(TEMPLATES=[{ + 'BACKEND': 'django.template.backends.django.DjangoTemplates', + 'DIRS': [], + 'APP_DIRS': True, + 'OPTIONS': { + 'context_processors': [ + 'django.template.context_processors.debug', + 'django.template.context_processors.request', + 'django.contrib.auth.context_processors.auth', + 'django.contrib.messages.context_processors.messages', + ], + }, + }]): + original_get_template = get_template + def mock_get_template(name): + return Template('') if name == template_name else original_get_template(name) + + get_template.patched = mock_get_template + try: + return test_func(*args, **kwargs) + finally: + del get_template.patched + + return wrapper + + return decorator From 09b837ad6a0f6406aa9e9d53aef88969e28b57d1 Mon Sep 17 00:00:00 2001 From: Psyray Date: Tue, 17 Sep 2024 00:46:04 +0200 Subject: [PATCH 260/262] feat(todo): enhance todo functionality and error handling (#198) * feat(todo): enhance todo functionality and error handling - Refactored the JavaScript code for creating and displaying todo items to use a template-based approach. - Improved error handling and user feedback for adding, deleting, and updating todo items. - Updated API endpoints and views to include better validation and error messages. - Enhanced the UI with new icons and required fields for better user experience. - Added a hidden template for todo items to streamline the creation of new items dynamically. * feat(todo): enhance todo functionality with async operations and input validation - Refactored JavaScript code to use const instead of var for variable declarations. - Updated event handlers to use async functions for better handling of asynchronous operations. - Improved input validation in the HTML form for adding todos, including setting minimum and maximum lengths and allowed character patterns. - Enhanced error handling and user feedback for adding, deleting, and updating todo items. - Added detailed docstrings to Python view functions for better code documentation and readability. - Introduced logging for JSON decode errors in the Python views. * fix(todo): streamline button click handling and improve UI feedback - Simplified the button click handling logic in todo.js by removing redundant code and improving readability. - Enhanced the user interface feedback for important task toggling and deletion confirmation. - Added autocomplete="on" to the task input field in index.html for better user experience. - Removed redundant class addition in the todo item creation process. * refactor(todo): modularize event listeners and improve search functionality - Refactored todo.js to modularize event listeners into separate functions. - Enhanced search functionality to save and load search terms from local storage. - Updated the UI to include a clear search button and improved button styling for task actions. - Adjusted HTML and CSS to support the new search and action button features. * refactor(todo.js): remove redundant variable assignments for PerfectScrollbar instances - Removed redundant variable assignments for PerfectScrollbar instances in the populateTodofunction. - Added a missing semicolon in the importantBtnListener function. * fix(todo): correct function names and enhance task filtering - Corrected the function name from populateTodofunction to populateTodo. - Added a new listener function addTaskPopupListener to handle task popups. - Renamed addActionsBtnListener to actionsBtnListener and added a call to searchFunction within it. - Introduced a new property is_important for tasks and updated the template to conditionally display the important badge. - Enhanced the search functionality to apply the current filter after searching. - Added a new function applyCurrentFilter to filter tasks based on their status (Todo, Done, Important). * fix(note): reset var name for note saving * fix: update data keys in todo.js - Updated key names in todo.js to ensure consistency with backend expectations. * fix: update CSS styles - Adjusted CSS styles in todolist.css for better alignment and spacing. * chore: standardize "to-do" terminology across templates and scripts - Updated various HTML templates and JavaScript files to standardize the terminology from "Todo" to "to-do". * fix: update content type in tests and error messages and correct HTML placeholder - Updated test cases to include content_type='application/json' in POST requests. - Modified error messages in AddReconNote view to remove checks for scan_history_id and subdomain_id. - Corrected the placeholder text in the HTML input field for adding a to-do. * refactor(todo): simplify badge update logic and remove unused checkbox elements - Simplified the logic for updating badge notifications in the todo list by consolidating repeated code into a single function. - Removed unused checkbox elements from the todo item template and related HTML. * feat: add close button to modal - Added a "Close" button to the modal footer in the note index template. - Implemented functionality to hide the modal when the "Close" button is clicked. --- web/api/views.py | 12 +- web/recon_note/static/note/js/todo.js | 561 +++++++++--------- web/recon_note/templates/note/index.html | 121 ++-- web/recon_note/tests/test_recon_note.py | 12 +- web/recon_note/views.py | 156 +++-- .../static/startScan/js/detail_scan.js | 4 +- .../templates/startScan/detail_scan.html | 18 +- .../templates/startScan/subdomains.html | 2 +- web/static/assets/js/vendor.min.js | 1 - web/static/custom/custom.css | 8 +- web/static/custom/todo.js | 10 +- web/static/plugins/snackbar/snackbar.min.js | 1 - web/static/plugins/todo/todolist.css | 16 +- web/targetApp/templates/target/summary.html | 18 +- .../base/_items/recon_note_modal.html | 8 +- web/templates/base/_items/top_nav.html | 2 +- 16 files changed, 546 insertions(+), 404 deletions(-) diff --git a/web/api/views.py b/web/api/views.py index d753cb0a..3eea62e9 100644 --- a/web/api/views.py +++ b/web/api/views.py @@ -648,10 +648,6 @@ def post(self, request): description = data.get('description') project = data.get('project') - if subdomain_id is None: - return Response({"status": False, "error": "Subdomain ID is required."}, status=400) - if scan_history_id is None: - return Response({"status": False, "error": "Scan history ID is required."}, status=400) if not title: return Response({"status": False, "error": "Title is required."}, status=400) if not project: @@ -680,12 +676,10 @@ def post(self, request): note.project = project note.save() - response = {'status': True} + return Response({"status": True, "error": False, "id": note.id}, status=200) except Exception as e: - response = {'status': False, 'message': str(e)} - - return Response(response) - + logger.error(e) + return Response({"status": False, "error": "An error occurred."}, status=400) class ToggleSubdomainImportantStatus(APIView): def post(self, request): diff --git a/web/recon_note/static/note/js/todo.js b/web/recon_note/static/note/js/todo.js index 7d8b8d50..174df53c 100644 --- a/web/recon_note/static/note/js/todo.js +++ b/web/recon_note/static/note/js/todo.js @@ -1,34 +1,52 @@ - -function populateTodofunction(project=null){ - $('.input-search').on('keyup', function() { - var rex = new RegExp($(this).val(), 'i'); - $('.todo-box .todo-item').hide(); - $('.todo-box .todo-item').filter(function() { - return rex.test($(this).text()); - }).show(); - }); - - const taskViewScroll = new PerfectScrollbar('.task-text', { +const populateTodo = function(project=null){ + new PerfectScrollbar('.task-text', { wheelSpeed:.5, swipeEasing:!0, minScrollbarLength:40, maxScrollbarLength:300, suppressScrollX : true }); + new PerfectScrollbar('.todo-box-scroll', { + suppressScrollX : true + }); - new dynamicBadgeNotification('allList'); - new dynamicBadgeNotification('completedList'); - new dynamicBadgeNotification('importantList'); + new PerfectScrollbar('.todoList-sidebar-scroll', { + suppressScrollX : true + }); + + addTaskPopupListener(project); + addTaskBtnListener(project); + actionsBtnListener(); + checkBtnListener(); + importantBtnListener(); + todoItemListener(); + deleteBtnListener(); + + // Load search term from local storage if it exists + const savedSearchTerm = localStorage.getItem('searchTerm'); + if (savedSearchTerm) { + $('.input-search').val(savedSearchTerm); // Set the input value + searchFunction(); + } + updateBadgeCounts(); +} + +const actionsBtnListener = function(){ + const $btns = $('.list-actions').click((event) => { + const selectedId = event.currentTarget.id; + const $el = $('.' + selectedId); + $('#ct > div').hide(); + $el.fadeIn(); + $btns.removeClass('active'); + $(event.currentTarget).addClass('active'); + + // Apply search and filter when changing menu + searchFunction(); + }); +} - $('.mail-menu').on('click', function(event) { - $('.tab-title').addClass('mail-menu-show'); - $('.mail-overlay').addClass('mail-overlay-show'); - }) - $('.mail-overlay').on('click', function(event) { - $('.tab-title').removeClass('mail-menu-show'); - $('.mail-overlay').removeClass('mail-overlay-show'); - }) - $('#addTask').on('click', function(event) { +const addTaskBtnListener = function(project) { + $('#addTask').on('click', function (event) { event.preventDefault(); $('#task').val(''); @@ -38,272 +56,213 @@ function populateTodofunction(project=null){ $('.edit-tsk').hide(); $('#addTaskModal').modal('show'); const ps = new PerfectScrollbar('.todo-box-scroll', { - suppressScrollX : true + suppressScrollX: true }); - populateScanHistory(project=project); - - }); - const ps = new PerfectScrollbar('.todo-box-scroll', { - suppressScrollX : true - }); + populateScanHistory(project); - const todoListScroll = new PerfectScrollbar('.todoList-sidebar-scroll', { - suppressScrollX : true }); +} - var $btns = $('.list-actions').click(function() { - if (this.id == 'all-list') { - var $el = $('.' + this.id).fadeIn(); - $('#ct > div').not($el).hide(); - } else { - var $el = $('.' + this.id).fadeIn(); - $('#ct > div').not($el).hide(); - } - $btns.removeClass('active'); - $(this).addClass('active'); - }) - - checkCheckbox(); - importantDropdown(); - todoItem(); - deleteDropdown(); - - $(".add-tsk").click(function(){ - - var $_task = document.getElementById('task').value; - - var $_taskDescriptionText = document.getElementById('taskdescription').value; - - var $_taskScanHistory = $("#scanHistoryIDropdown option:selected").text(); - - var $_taskSubdomain = $("#subdomainDropdown option:selected").text(); +const addTaskPopupListener = function(project) { + $(".add-tsk").click(async function () { + try { + const $_task = document.getElementById('task').value; + const $_taskDescriptionText = document.getElementById('taskdescription').value; + const $_taskScanHistory = $("#scanHistoryIDropdown option:selected").text(); + const $_taskSubdomain = $("#subdomainDropdown option:selected").text(); + let $_targetText = ''; + + if ($_taskScanHistory != 'Choose Scan History...') { + $_targetText = $_taskScanHistory; + } - var $_targetText = ''; + if ($_taskSubdomain != 'Choose Subdomain...') { + $_targetText += ' Subdomain : ' + $_taskSubdomain; + } - if ($_taskScanHistory != 'Choose Scan History...') { - $_targetText = $_taskScanHistory; - } + let data = { + 'title': $_task, + 'description': $_taskDescriptionText + }; - if ($_taskSubdomain != 'Choose Subdomain...') { - $_targetText += ' Subdomain : ' + $_taskSubdomain; - } + if ($("#scanHistoryIDropdown").val() && $("#scanHistoryIDropdown").val() != 'Choose Scan History...') { + data['scan_history_id'] = parseInt($("#scanHistoryIDropdown").val()); + } - $html = '
'+ - '
'+ - '
'+ - ''+ - '
'+ - - '
'+ - '
'+htmlEncode($_task)+'
'+ - '

'+$_targetText+'

'+ - "

"+htmlEncode($_taskDescriptionText)+"

"+ - ''+ - - '
'+ - ''+ - '
'+ - - ''+ - ''; - - - $("#ct").prepend($html); - $('#addTaskModal').modal('hide'); - checkCheckbox(); - todoItem(); - importantDropdown(); - deleteDropdown(); - new dynamicBadgeNotification('allList'); - $(".list-actions#all-list").trigger('click'); - - data = { - 'title': $_task, - 'description': $_taskDescriptionText - } + if ($("#subdomainDropdown").val() != 'Choose Subdomain...') { + data['subdomain_id'] = parseInt($("#subdomainDropdown").val()); + } - if ($("#scanHistoryIDropdown").val() && $("#scanHistoryIDropdown").val() != 'Choose Scan History...') { - data['scan_history'] = parseInt($("#scanHistoryIDropdown").val()); - } + if (project) { + data['project'] = project; + } - if ($("#subdomainDropdown").val() != 'Choose Subdomain...') { - data['subdomain'] = parseInt($("#subdomainDropdown").val()); - } + let response = await fetch('/api/add/recon_note/', { + method: 'post', + headers: { + "X-CSRFToken": getCookie("csrftoken"), + 'Content-Type': 'application/json' + }, + body: JSON.stringify(data) + }); - if (project) { - data['project'] = project; + const responseData = await response.json(); + swal.queue([{ + title: response.status === 200 ? 'Note added successfully!' : + response.status === 400 ? 'Oops! Unable to add todo!\r\n' + responseData.error : + response.status === 404 ? 'Oops! Note not found!\r\n' + responseData.error : + 'Oops! An error occurred!\r\n' + responseData.error, + icon: response.status === 200 ? 'success' : 'error' + }]); + + if (response.status === 200) { + const newNote = { + id: responseData.id, + title: htmlEncode($_task), + description: htmlEncode($_taskDescriptionText), + domain_name: htmlEncode($_targetText), + subdomain_name: htmlEncode($_taskSubdomain), + is_done: false, + is_important: false + }; + + let todoHTML = $('#todo-template').html(); + + todoHTML = todoHTML + .replace(/{task_id}/g, newNote.id) + .replace(/{title}/g, newNote.title) + .replace(/{target_text}/g, newNote.domain_name ? `Domain: ${newNote.domain_name}` : '') + .replace(/{description}/g, newNote.description) + .replace(/{is_done}/g, newNote.is_done ? 'todo-task-done' : '') + .replace(/{checked}/g, newNote.is_done ? 'checked' : '') + .replace(/{is_important}/g, newNote.is_important ? 'todo-task-important' : ''); + + const $newTodo = $('
').append(todoHTML); + + $("#ct").prepend($newTodo); + $('#addTaskModal').modal('hide'); + checkBtnListener(); + todoItemListener(); + importantBtnListener(); + deleteBtnListener(); + new dynamicBadgeNotification('allList'); + $(".list-actions#all-list").trigger('click'); + } + } catch (error) { + console.error('Error adding todo:', error); + swal('Oops! Something went wrong!', error.message, 'error'); } - - fetch('/api/add/recon_note/', { - method: 'post', - headers: { - "X-CSRFToken": getCookie("csrftoken"), - 'Content-Type': 'application/json' - }, - body: JSON.stringify(data) - }).then(res => res.json()) - .then(res => console.log(res)); }); - - $('.tab-title .nav-pills a.nav-link').on('click', function(event) { - $(this).parents('.mail-box-container').find('.tab-title').removeClass('mail-menu-show') - $(this).parents('.mail-box-container').find('.mail-overlay').removeClass('mail-overlay-show') - }) - } -function dynamicBadgeNotification( setTodoCategoryCount ) { - var todoCategoryCount = setTodoCategoryCount; - - // Get Parents Div(s) - var get_ParentsDiv = $('.todo-item'); - var get_TodoAllListParentsDiv = $('.todo-item.all-list'); - var get_TodoCompletedListParentsDiv = $('.todo-item.todo-task-done'); - var get_TodoImportantListParentsDiv = $('.todo-item.todo-task-important'); +const dynamicBadgeNotification = function(setTodoCategoryCount) { + const todoCategoryCount = setTodoCategoryCount; - // Get Parents Div(s) Counts - var get_TodoListElementsCount = get_TodoAllListParentsDiv.length; - var get_CompletedTaskElementsCount = get_TodoCompletedListParentsDiv.length; - var get_ImportantTaskElementsCount = get_TodoImportantListParentsDiv.length; + // Compter les éléments en se basant uniquement sur les classes CSS + const get_TodoAllListParentsDiv = $('.todo-item').not('.todo-item-template'); + const get_TodoCompletedListParentsDiv = $('.todo-item.todo-task-done').not('.todo-item-template'); + const get_TodoImportantListParentsDiv = $('.todo-item.todo-task-important').not('.todo-item-template'); - // Get Badge Div(s) - var getBadgeTodoAllListDiv = $('#all-list .todo-badge'); - var getBadgeCompletedTaskListDiv = $('#todo-task-done .todo-badge'); - var getBadgeImportantTaskListDiv = $('#todo-task-important .todo-badge'); + // Obtenir les comptes + const get_TodoListElementsCount = get_TodoAllListParentsDiv.length; + const get_CompletedTaskElementsCount = get_TodoCompletedListParentsDiv.length; + const get_ImportantTaskElementsCount = get_TodoImportantListParentsDiv.length; + // Obtenir les éléments de badge + const getBadgeTodoAllListDiv = $('#all-list .todo-badge'); + const getBadgeCompletedTaskListDiv = $('#todo-task-done .todo-badge'); + const getBadgeImportantTaskListDiv = $('#todo-task-important .todo-badge'); - if (todoCategoryCount === 'allList') { - if (get_TodoListElementsCount === 0) { - getBadgeTodoAllListDiv.text(''); - return; - } - if (get_TodoListElementsCount > 9) { - getBadgeTodoAllListDiv.css({ - padding: '2px 0px', - height: '25px', - width: '25px' - }); - } else if (get_TodoListElementsCount <= 9) { - getBadgeTodoAllListDiv.removeAttr('style'); + // Fonction pour mettre à jour un badge + const updateBadge = function(badgeElement, count) { + if (count === 0) { + badgeElement.text(''); + } else { + badgeElement.text(count); + if (count > 9) { + badgeElement.css({ + padding: '2px 0px', + height: '25px', + width: '25px' + }); + } else { + badgeElement.removeAttr('style'); + } } - getBadgeTodoAllListDiv.text(get_TodoListElementsCount); + }; + + // Mettre à jour les badges en fonction de la catégorie + if (todoCategoryCount === 'allList' || todoCategoryCount === undefined) { + updateBadge(getBadgeTodoAllListDiv, get_TodoListElementsCount); } - else if (todoCategoryCount === 'completedList') { - if (get_CompletedTaskElementsCount === 0) { - getBadgeCompletedTaskListDiv.text(''); - return; - } - if (get_CompletedTaskElementsCount > 9) { - getBadgeCompletedTaskListDiv.css({ - padding: '2px 0px', - height: '25px', - width: '25px' - }); - } else if (get_CompletedTaskElementsCount <= 9) { - getBadgeCompletedTaskListDiv.removeAttr('style'); - } - getBadgeCompletedTaskListDiv.text(get_CompletedTaskElementsCount); + if (todoCategoryCount === 'completedList' || todoCategoryCount === undefined) { + updateBadge(getBadgeCompletedTaskListDiv, get_CompletedTaskElementsCount); } - else if (todoCategoryCount === 'importantList') { - if (get_ImportantTaskElementsCount === 0) { - getBadgeImportantTaskListDiv.text(''); - return; - } - if (get_ImportantTaskElementsCount > 9) { - getBadgeImportantTaskListDiv.css({ - padding: '2px 0px', - height: '25px', - width: '25px' - }); - } else if (get_ImportantTaskElementsCount <= 9) { - getBadgeImportantTaskListDiv.removeAttr('style'); - } - getBadgeImportantTaskListDiv.text(get_ImportantTaskElementsCount); + if (todoCategoryCount === 'importantList' || todoCategoryCount === undefined) { + updateBadge(getBadgeImportantTaskListDiv, get_ImportantTaskElementsCount); } } -function deleteDropdown() { - $('.action-dropdown .dropdown-menu .delete.dropdown-item').click(function() { - var id = this.id.split('_')[1]; - var main_this = this; - swal.queue([{ +const deleteBtnListener = function() { + $('.actions-btn .delete-btn').click(async function() { + const id = this.id.split('_')[1]; + const main_this = this; + await swal.queue([{ title: 'Are you sure you want to delete this Recon Note?', text: "You won't be able to revert this!", - type: 'warning', + icon: 'warning', showCancelButton: true, confirmButtonText: 'Delete', padding: '2em', showLoaderOnConfirm: true, - preConfirm: function() { - return fetch('../delete_note', { + preConfirm: async function() { + const response = await fetch('/recon_note/delete_note', { method: 'POST', credentials: "same-origin", headers: { "X-CSRFToken": getCookie("csrftoken") }, - body: JSON.stringify({ - 'id': parseInt(id), - }) - }) - .then(function (response) { - if(!$(main_this).parents('.todo-item').hasClass('todo-task-trash')) { - var getTodoParent = $(main_this).parents('.todo-item'); - var getTodoClass = getTodoParent.attr('class'); - - var getFirstClass = getTodoClass.split(' ')[1]; - var getSecondClass = getTodoClass.split(' ')[2]; - var getThirdClass = getTodoClass.split(' ')[3]; - - if (getFirstClass === 'all-list') { - getTodoParent.removeClass(getFirstClass); - } - if (getSecondClass === 'todo-task-done' || getSecondClass === 'todo-task-important') { - getTodoParent.removeClass(getSecondClass); - } - if (getThirdClass === 'todo-task-done' || getThirdClass === 'todo-task-important') { - getTodoParent.removeClass(getThirdClass); - } - $(main_this).parents('.todo-item').addClass('todo-task-trash'); - } else if($(main_this).parents('.todo-item').hasClass('todo-task-trash')) { - $(main_this).parents('.todo-item').removeClass('todo-task-trash'); - } - new dynamicBadgeNotification('allList'); - new dynamicBadgeNotification('completedList'); - new dynamicBadgeNotification('importantList'); - }) - .catch(function() { + body: JSON.stringify({ 'id': parseInt(id) }) + }).catch(error => { + swal('Network error', 'An error occurred while deleting the note.', 'error'); + throw error; + }); + + if (!response.ok) { + const errorMessages = { + 400: 'Oops! Unable to delete todo!', + 404: 'Oops! Note not found!', + 200: 'Note deleted successfully!' + }; swal.insertQueueStep({ - type: 'error', - title: 'Oops! Unable to delete todo!' - }) - }) + icon: response.status === 200 ? 'success' : 'error', + title: errorMessages[response.status] || 'An unknown error occurred.' + }); + return; + } + + const responseData = await response.json(); + swal.insertQueueStep({ + icon: response.status === 200 ? 'success' : 'error', + title: response.status === 200 ? 'Note deleted successfully!' : 'Oops! An error occurred!\r\n' + responseData.error + }); + + if (response.status === 200) { + $(main_this).parents('.todo-item').remove(); + updateBadgeCounts(); + } } }]); }); } -function checkCheckbox() { - $('.inbox-chkbox').click(function() { - if ($(this).is(":checked")) { - $(this).parents('.todo-item').addClass('todo-task-done'); - } - else if ($(this).is(":not(:checked)")) { - $(this).parents('.todo-item').removeClass('todo-task-done'); - } +const checkBtnListener = function() { + $('.actions-btn .done-btn').click(async function() { + const todoItem = $(this).parents('.todo-item'); + todoItem.toggleClass('todo-task-done'); // Toggle the done class + new dynamicBadgeNotification('completedList'); - fetch('../flip_todo_status', { + await fetch('/recon_note/flip_todo_status', { method: 'post', headers: { "X-CSRFToken": getCookie("csrftoken") @@ -311,28 +270,27 @@ function checkCheckbox() { body: JSON.stringify({ 'id': parseInt(this.id.split('_')[1]), }) - }).then(res => res.json()) - .then(res => console.log(res)); + }).then(res => res.json()); }); } -function importantDropdown() { - $('.important').click(function() { +const importantBtnListener = function() { + $('.actions-btn .important-btn').click(async function() { badge_id = this.id.split('_')[1]; if(!$(this).parents('.todo-item').hasClass('todo-task-important')){ $(this).parents('.todo-item').addClass('todo-task-important'); - var is_important_badge = document.createElement("div"); + const is_important_badge = document.createElement("div"); is_important_badge.classList.add("priority-dropdown"); is_important_badge.classList.add("custom-dropdown-icon"); is_important_badge.id = 'important-badge-' + this.id.split('_')[1]; badge = ` - ` + `; is_important_badge.innerHTML = badge; @@ -340,11 +298,10 @@ function importantDropdown() { } else if($(this).parents('.todo-item').hasClass('todo-task-important')){ $(this).parents('.todo-item').removeClass('todo-task-important'); - $(".list-actions#all-list").trigger('click'); - $("#important-badge-"+badge_id).empty(); + $("#important-badge-"+badge_id).remove(); } new dynamicBadgeNotification('importantList'); - fetch('../flip_important_status', { + await fetch('/recon_note/flip_important_status', { method: 'post', headers: { "X-CSRFToken": getCookie("csrftoken") @@ -352,20 +309,16 @@ function importantDropdown() { body: JSON.stringify({ 'id': parseInt(this.id.split('_')[1]), }) - }).then(res => res.json()) - .then(res => console.log(res)); + }).then(res => res.json()); }); } -function todoItem() { +const todoItemListener = function() { $('.todo-item .todo-content').on('click', function(event) { event.preventDefault(); - - var $_taskTitle = $(this).find('.todo-heading').text(); - - var $_taskTarget = $(this).find('.target').text(); - - var $todoDescription = $(this).find('.todo-text').text(); + const $_taskTitle = $(this).find('.todo-heading').text(); + const $_taskTarget = $(this).find('.target').text(); + const $todoDescription = $(this).find('.todo-text').text(); $('.task-heading').text($_taskTitle); $('.task-text').html(`${$_taskTarget}
` + htmlEncode($todoDescription)); @@ -374,15 +327,83 @@ function todoItem() { }); } -function populateScanHistory(project) { +const populateScanHistory = function(project) { scan_history_select = document.getElementById('scanHistoryIDropdown'); $.getJSON(`/api/listScanHistory/?format=json&project=${project}`, function(data) { for (var history in data){ history_object = data[history]; - var option = document.createElement('option'); + const option = document.createElement('option'); option.value = history_object['id']; option.innerHTML = history_object['domain']['name'] + ' - Scanned ' + moment.utc(history_object['start_scan_date']).fromNow(); scan_history_select.appendChild(option); } }); } + +// Function to update badge counts +const updateBadgeCounts = function() { + new dynamicBadgeNotification('allList'); + new dynamicBadgeNotification('completedList'); + new dynamicBadgeNotification('importantList'); +}; + +// Updated search function +const searchFunction = function() { + const searchTerm = $('.input-search').val(); + const rex = new RegExp(searchTerm, 'i'); // Create a regex from the input + $('.todo-box .todo-item').hide(); // Hide all items + $('.todo-box .todo-item').filter(function() { + return rex.test($(this).text()); // Show items that match the regex + }).show(); + + // Apply the current filter after search + applyCurrentFilter(); + + // Update badge counts after filtering + updateBadgeCounts(); +}; + +// Function to apply the current filter (To-do, Done, Important) +const applyCurrentFilter = function() { + const currentFilter = $('.list-actions.active').attr('id'); + if (currentFilter === 'todo-task-done') { + $('.todo-box .todo-item:visible').not('.todo-task-done').hide(); + } else if (currentFilter === 'todo-task-important') { + $('.todo-box .todo-item:visible').not('.todo-task-important').hide(); + } +}; + +$(document).ready(function() { + // Show or hide the clear button based on input + const updateClearButtonVisibility = function() { + const searchTerm = $('.input-search').val(); + $('#clear-search').toggle(searchTerm.length > 0); // Show the clear button if there's text + }; + + // Initial check to show the clear button if there's a saved search term + const savedSearchTerm = localStorage.getItem('searchTerm'); + if (savedSearchTerm) { + $('.input-search').val(savedSearchTerm); // Set the input value + updateClearButtonVisibility(); // Update visibility based on the saved term + } + + // Show or hide the clear button on input + $('.input-search').on('input', function() { + updateClearButtonVisibility(); + }); + + // Clear the search input when the clear button is clicked + $('#clear-search').on('click', function() { + $('.input-search').val(''); // Clear the input + $(this).hide(); // Hide the clear button + localStorage.removeItem('searchTerm'); // Remove the search term from local storage + searchFunction(); // Call the search function to refresh the list + }); + + // Attach search function to input + $('.input-search').on('keyup', function() { + const searchTerm = $(this).val(); + localStorage.setItem('searchTerm', searchTerm); // Save the search term to local storage + searchFunction(); // Call the search function + }); +}); diff --git a/web/recon_note/templates/note/index.html b/web/recon_note/templates/note/index.html index 4885aa28..e95e70bd 100644 --- a/web/recon_note/templates/note/index.html +++ b/web/recon_note/templates/note/index.html @@ -3,7 +3,7 @@ {% load humanize %} {% block title %} -Recon Todo +Recon to-do {% endblock title %} {% block custom_js_css_link %} @@ -20,20 +20,29 @@
-
                           
- New Todo +
Recon to-do
+ New to-do
@@ -42,8 +51,12 @@
         &nbs
@@ -67,15 +80,16 @@
+ + + + {% endblock main_content %} {% block page_level_script %} @@ -140,6 +190,7 @@
Add Todo
var todo_item = document.createElement("div"); todo_item.classList.add("todo-item"); todo_item.classList.add("all-list"); + todo_item.classList.add("todo-item"); var target_text = ''; if (note_obj['domain_name']) { target_text += 'Domain: ' + note_obj['domain_name'] + ', Scanned ' + moment.utc(note_obj['scan_started_time']).fromNow(); @@ -150,51 +201,47 @@
Add Todo
var is_important_badge = ''; if (note_obj['is_important']) { is_important_badge = `
- +
`; } var badges = is_important_badge; var html = `
-
- -
${htmlEncode(note_obj['title'])}

${target_text}

${htmlEncode(note_obj['description'])}

${badges} -
- -
+
+ + + +
`; todo_item.innerHTML = html; document.getElementById('ct').appendChild(todo_item); if (note_obj['is_done']) { todo_item.classList.add("todo-task-done"); - document.getElementById('checkbox_'+note_obj['id']).checked = true; } if (note_obj['is_important']) { todo_item.classList.add("todo-task-important"); } }; - populateTodofunction(project='{{current_project.slug}}'); + populateTodo(project='{{current_project.slug}}'); $('.bs-tooltip').tooltip(); + $('.modal-footer .btn-secondary').on('click', function() { + $('#addTaskModal').modal('hide'); + }); }); {% endblock page_level_script %} diff --git a/web/recon_note/tests/test_recon_note.py b/web/recon_note/tests/test_recon_note.py index 4684388a..ba598597 100644 --- a/web/recon_note/tests/test_recon_note.py +++ b/web/recon_note/tests/test_recon_note.py @@ -40,7 +40,7 @@ def test_add_recon_note_success(self): "description": "This is a new recon note", "project": self.data_generator.project.slug, } - response = self.client.post(api_url, data) + response = self.client.post(api_url, data, content_type='application/json') self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertTrue(response.json()["status"]) @@ -51,11 +51,11 @@ def test_add_recon_note_missing_data(self): "title": "Incomplete Note", "slug": self.data_generator.project.slug, } - response = self.client.post(api_url, data) + response = self.client.post(api_url, data, content_type='application/json') self.assertIn(response.status_code, [status.HTTP_400_BAD_REQUEST]) self.assertFalse(response.json()["status"]) self.assertIn("error", response.json()) - self.assertEqual(response.json()["error"], "Subdomain ID is required.") + self.assertEqual(response.json()["error"], "Project is required.") def test_list_recon_notes(self): """Test listing all recon notes.""" @@ -67,7 +67,7 @@ def test_delete_recon_note_success(self): """Test deleting a recon note successfully.""" api_url = reverse("delete_note") data = {"id": self.todo_note.id} - response = self.client.post(api_url, data) + response = self.client.post(api_url, data, content_type='application/json') self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertTrue(response.json()["status"]) self.assertFalse(TodoNote.objects.filter(id=self.todo_note.id).exists()) @@ -76,6 +76,6 @@ def test_delete_recon_note_not_found(self): """Test deleting a recon note that does not exist.""" api_url = reverse("delete_note") data = {"id": 99999} # Non-existent ID - response = self.client.post(api_url, data) + response = self.client.post(api_url, data, content_type='application/json') self.assertIn(response.status_code, [status.HTTP_404_NOT_FOUND]) - self.assertFalse(response.json()["status"]) \ No newline at end of file + self.assertFalse(response.json()["status"]) diff --git a/web/recon_note/views.py b/web/recon_note/views.py index 53bf4f75..19f0082f 100644 --- a/web/recon_note/views.py +++ b/web/recon_note/views.py @@ -1,4 +1,11 @@ +""" +Views for the recon_note app. + +This module contains the views for the recon_note app, which handles +the management of todo notesand related operations. +""" import json +import logging from django.http import JsonResponse from django.shortcuts import render @@ -6,67 +13,132 @@ from recon_note.models import TodoNote def list_note(request, slug): - if not slug: - return JsonResponse({'status': False, 'error': 'Slug is required.'}, status=400) + """ + list_note renders the list view for recon notes associated with a specific project. + It prepares the context for the template and returns the rendered HTML response. + + Args: + request (HttpRequest): The HTTP request object containing metadata about the request. + slug (str): The slug of the project for which the recon notes are being listed. + Returns: + HttpResponse: The rendered HTML response for the note list view. + """ context = {'recon_note_active': 'active'} return render(request, 'note/index.html', context) def flip_todo_status(request): - if request.method == "POST": + """ + flip_todo_status toggles the completion status of a todo note based on the provided request data. + It processes a POST request, validates the input, and updates the note's status, + returning a JSON response indicating the result. + + Args: + request (HttpRequest): The HTTP request object containing the note ID and the request method. + + Returns: + JsonResponse: A JSON response indicating the success or failure of the operation, + along with the updated completion status if successful. + + Raises: + JsonDecodeError: If the request body contains invalid JSON. + Http404: If the specified todo note does not exist. + """ + if request.method != "POST": + return JsonResponse({'status': False, 'error': 'Invalid request method.'}, status=400) + + try: body_unicode = request.body.decode('utf-8') body = json.loads(body_unicode) + except json.JSONDecodeError as e: + logging.error('JSON decode error: %s', e) + return JsonResponse({'status': False, 'error': 'Invalid JSON.'}, status=400) - # Check if the ID is present in the request body - note_id = body.get('id') - if note_id is None: - return JsonResponse({'status': False, 'error': 'ID is required.'}, status=400) - - # Check if the note exists before attempting to update its status - try: - note = TodoNote.objects.get(id=note_id) - except TodoNote.DoesNotExist: - return JsonResponse({'status': False, 'error': 'Note not found.'}, status=404) + note_id = body.get('id') + if note_id is None: + return JsonResponse({'status': False, 'error': 'ID is required.'}, status=400) - # Toggle the done status of the note - note.is_done = not note.is_done - note.save() + try: + note = TodoNote.objects.get(id=note_id) + except TodoNote.DoesNotExist: + return JsonResponse({'status': False, 'error': 'Note not found.'}, status=404) - return JsonResponse({'status': True}) + note.is_done = not note.is_done + note.save() + return JsonResponse({'status': True, 'error': False, 'is_done': note.is_done}, status=200) def flip_important_status(request): - if request.method == "POST": + """ + flip_important_status toggles the importance status of a todo note based on the provided request data. + It processes a POST request, validates the input, and updates the note's status, + returning a JSON response indicating the result. + + Args: + request (HttpRequest): The HTTP request object containing the note ID and the request method. + + Returns: + JsonResponse: A JSON response indicating the success or failure of the operation, + along with the updated importance status if successful. + + Raises: + JsonDecodeError: If the request body contains invalid JSON. + Http404: If the specified todo note does not exist. + """ + if request.method != "POST": + return JsonResponse({'status': False, 'error': 'Invalid request method.'}, status=400) + + try: body_unicode = request.body.decode('utf-8') body = json.loads(body_unicode) + except json.JSONDecodeError as e: + logging.error('JSON decode error: %s', e) + return JsonResponse({'status': False, 'error': 'Invalid JSON.'}, status=400) - # Check if the ID is present in the request body - note_id = body.get('id') - if note_id is None: - return JsonResponse({'status': False, 'error': 'ID is required.'}, status=400) + note_id = body.get('id') + if note_id is None: + return JsonResponse({'status': False, 'error': 'ID is required.'}, status=400) - # Check if the note exists before attempting to update its status - try: - note = TodoNote.objects.get(id=note_id) - except TodoNote.DoesNotExist: - return JsonResponse({'status': False, 'error': 'Note not found.'}, status=404) + try: + note = TodoNote.objects.get(id=note_id) + except TodoNote.DoesNotExist: + return JsonResponse({'status': False, 'error': 'Note not found.'}, status=404) - # Toggle the important status of the note - note.is_important = not note.is_important - note.save() - - return JsonResponse({'status': True}) + note.is_important = not note.is_important + note.save() + return JsonResponse({'status': True, 'error': False, 'is_important': note.is_important}, status=200) def delete_note(request): - if request.method == "POST": - # Check if the ID is present in the request body - note_id = request.POST.get('id') - if note_id is None: - return JsonResponse({'status': False, 'error': 'ID is required.'}, status=400) + """ + delete_note handles the deletion of a todo note based on the provided request data. + It processes a POST request, validates the input, and removes the specified note, + returning a JSON response indicating the result. + + Args: + request (HttpRequest): The HTTP request object containing the note ID and the request method. + + Returns: + JsonResponse: A JSON response indicating the success or failure of the deletion operation. + + Raises: + JsonDecodeError: If the request body contains invalid JSON. + Http404: If the specified todo note does not exist. + """ + if request.method != "POST": + return JsonResponse({'status': False, 'error': 'Invalid request method.'}, status=400) + + try: + body_unicode = request.body.decode('utf-8') + body = json.loads(body_unicode) + except json.JSONDecodeError as e: + logging.error('JSON decode error: %s', e) + return JsonResponse({'status': False, 'error': 'Invalid JSON.'}, status=400) - # Check if the note exists before attempting to delete it - if not TodoNote.objects.filter(id=note_id).exists(): - return JsonResponse({'status': False, 'error': 'Note not found.'}, status=404) + note_id = body.get('id') + if note_id is None: + return JsonResponse({'status': False, 'error': 'ID is required.'}, status=400) - TodoNote.objects.filter(id=note_id).delete() + if not TodoNote.objects.filter(id=note_id).exists(): + return JsonResponse({'status': False, 'error': 'Note not found.'}, status=404) - return JsonResponse({'status': True}) + TodoNote.objects.filter(id=note_id).delete() + return JsonResponse({'status': True, 'error': False, 'deleted': True}, status=200) diff --git a/web/startScan/static/startScan/js/detail_scan.js b/web/startScan/static/startScan/js/detail_scan.js index 3b28fe6e..95b2b3d2 100644 --- a/web/startScan/static/startScan/js/detail_scan.js +++ b/web/startScan/static/startScan/js/detail_scan.js @@ -1124,7 +1124,7 @@ $(".add-scan-history-todo").click(function(){ .then(function (response) { if (response.status) { Snackbar.show({ - text: 'Todo Added.', + text: 'To-do Added.', pos: 'top-right', duration: 1500, }); @@ -1179,7 +1179,7 @@ function add_note_for_subdomain_handler(subdomain_id){ if (response.status) { Snackbar.show({ - text: 'Todo Added.', + text: 'To-do Added.', pos: 'top-right', duration: 1500, }); diff --git a/web/startScan/templates/startScan/detail_scan.html b/web/startScan/templates/startScan/detail_scan.html index d32a11a0..9d38c616 100644 --- a/web/startScan/templates/startScan/detail_scan.html +++ b/web/startScan/templates/startScan/detail_scan.html @@ -815,8 +815,8 @@

- Recon Note/Todo - + Recon Note/To-do +

@@ -831,13 +831,13 @@

-

Add Recon Todo

+

Add Recon to-do

- - + +
@@ -849,15 +849,15 @@

Add Recon Todo

- - + +

@@ -2375,7 +2375,7 @@

- +