diff --git a/.env b/.env deleted file mode 100644 index fca522eee..000000000 --- a/.env +++ /dev/null @@ -1,30 +0,0 @@ -# -# General -# -COMPOSE_PROJECT_NAME=rengine - -# -# SSL specific configuration -# -AUTHORITY_NAME=reNgine -AUTHORITY_PASSWORD=nSrmNkwT -COMPANY=reNgine -DOMAIN_NAME=rengine.example.com -COUNTRY_CODE=US -STATE=Georgia -CITY=Atlanta - -# -# Database configurations -# -POSTGRES_DB=rengine -POSTGRES_USER=rengine -POSTGRES_PASSWORD=hE2a5@K&9nEY1fzgA6X -POSTGRES_PORT=5432 -POSTGRES_HOST=db - -# -# Celery Scaling Configurations -# -MAX_CONCURRENCY=80 -MIN_CONCURRENCY=10 diff --git a/.env-dist b/.env-dist new file mode 100644 index 000000000..6399059b4 --- /dev/null +++ b/.env-dist @@ -0,0 +1,45 @@ +# +# General +# +COMPOSE_PROJECT_NAME=rengine + +# +# SSL specific configuration +# +AUTHORITY_NAME=reNgine-ng +AUTHORITY_PASSWORD=nSrmNkwT +COMPANY=reNgine-ng +DOMAIN_NAME=rengine-ng.example.com +COUNTRY_CODE=US +STATE=Georgia +CITY=Atlanta + +# +# Database configurations +# /!\ POSTGRES_USER & PG_USER must be the same user or Celery will fail to start +# +POSTGRES_DB=rengine +POSTGRES_USER=rengine +PGUSER=rengine +POSTGRES_PASSWORD=hE2a5@K&9nEY1fzgA6X +POSTGRES_PORT=5432 +POSTGRES_HOST=db + +# +# Celery Scaling Configurations +# The number of CONCURRENCY defines how many scans will run in parallel +# See https://github.com/Security-Tools-Alliance/rengine-ng/wiki/quick#determining-concurrency-values for more information. +# Please always keep minimum of 5 +# +MIN_CONCURRENCY=5 +MAX_CONCURRENCY=30 + +# +# This section is for non-interactive installations only +# +# reNgine-ng installation type (prebuilt or source) +INSTALL_TYPE=prebuilt +# reNgine-ng web interface super user +DJANGO_SUPERUSER_USERNAME=rengine +DJANGO_SUPERUSER_EMAIL=rengine@example.com +DJANGO_SUPERUSER_PASSWORD=Sm7IJG.IfHAFw9snSKv diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 000000000..7d60f66f7 --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,105 @@ +name: Docker Image CI + +on: + pull_request: + paths: + - 'docker/**' + push: + branches: + - "master" + - "release/**" + paths: + - 'docker/**' + tags: + - "v*.*.*" + release: + types: [published] + workflow_dispatch: + inputs: + push_image: + description: 'Push image to registry' + required: true + default: 'false' + type: choice + options: + - 'true' + - 'false' + +env: + REGISTRY: ghcr.io + OWNER: security-tools-alliance + PROJECT: rengine-ng + +jobs: + build-and-push: + runs-on: ubuntu-latest + strategy: + matrix: + image: [celery, web, postgres, redis, ollama, certs, proxy] + platform: [linux/amd64, linux/arm64] + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Get version + id: get_version + run: | + if [[ $GITHUB_REF == refs/tags/* ]]; then + echo "VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT + else + echo "VERSION=latest" >> $GITHUB_OUTPUT + fi + + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Login to GHCR + if: github.event_name != 'pull_request' || github.event.inputs.push_image == 'true' + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ vars.GHCR_USERNAME }} + password: ${{ secrets.GHCR_PAT }} + + - name: Build and push + uses: docker/build-push-action@v6 + with: + context: ./docker/${{ matrix.image }} + file: ./docker/${{ matrix.image }}/Dockerfile + push: ${{ github.event_name != 'pull_request' || github.event.inputs.push_image == 'true' }} + tags: | + ${{ env.REGISTRY }}/${{ env.OWNER }}/${{ env.PROJECT }}:rengine-${{ matrix.image }}-${{ steps.get_version.outputs.VERSION }} + ${{ env.REGISTRY }}/${{ env.OWNER }}/${{ env.PROJECT }}:rengine-${{ matrix.image }}-latest + platforms: ${{ matrix.platform }} + + update-release: + needs: build-and-push + if: github.event_name == 'release' && github.event.action == 'published' + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Update release description + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + release_id=$(curl -s -H "Authorization: token $GITHUB_TOKEN" \ + "https://api.github.com/repos/${{ github.repository }}/releases/latest" | \ + jq -r .id) + + images="celery web postgres redis ollama certs proxy" + image_list="" + for image in $images; do + image_list="${image_list}- ghcr.io/${{ env.OWNER }}/${{ env.PROJECT }}:rengine-${image}-${{ github.ref_name }}\n" + done + + body="Docker images for this release:\n${image_list}" + + curl -X PATCH -H "Authorization: token $GITHUB_TOKEN" \ + -H "Accept: application/vnd.github.v3+json" \ + "https://api.github.com/repos/${{ github.repository }}/releases/${release_id}" \ + -d "{\"body\": \"$body\"}" diff --git a/.github/workflows/close-issues-on-pr-merge-to-release-branch.yml b/.github/workflows/close-issues-on-pr-merge-to-release-branch.yml index b8265eb9a..e549b0666 100644 --- a/.github/workflows/close-issues-on-pr-merge-to-release-branch.yml +++ b/.github/workflows/close-issues-on-pr-merge-to-release-branch.yml @@ -9,9 +9,17 @@ jobs: close-related-issues: runs-on: ubuntu-latest if: github.event.pull_request.merged == true && startsWith(github.ref, 'refs/heads/release/') + permissions: + issues: write steps: + - name: Extract issue number + id: extract_issue_number + run: | + issue_number=$(echo "${{ github.event.pull_request.body }}" | grep -oE '#[0-9]+' | head -n 1 | tr -d '#') + echo "ISSUE_NUMBER=$issue_number" >> $GITHUB_ENV + - name: Close linked issues uses: peter-evans/close-issue@v3 with: - issue-number: ${{ github.event.pull_request.body }} + issue-number: ${{ env.ISSUE_NUMBER }} comment: "This issue is being closed because the related PR has been merged into a release branch." diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index cc50ad1f6..3dda7d6e0 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -1,35 +1,46 @@ -name: "Code Quality" +name: "CodeQL Advanced" on: push: - branches: [ master ] + branches: [ "**" ] + paths-ignore: + - '**/*.md' + - '**/*.txt' pull_request: - branches: [ master ] - schedule: - - cron: '0 18 * * 5' + branches: [ "**" ] + paths-ignore: + - '**/*.md' + - '**/*.txt' jobs: analyze: - name: Analyze - runs-on: ubuntu-latest + name: Analyze (${{ matrix.language }}) + runs-on: ${{ 'ubuntu-latest' }} + permissions: + security-events: write + packages: read strategy: fail-fast: false matrix: - language: [ 'javascript', 'python' ] + include: + - language: javascript + build-mode: none + - language: python + build-mode: none steps: - name: Checkout repository uses: actions/checkout@v4 - # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v2 + uses: github/codeql-action/init@v3 with: languages: ${{ matrix.language }} - - - name: Autobuild - uses: github/codeql-action/autobuild@v2 + build-mode: ${{ matrix.build-mode }} + queries: security-and-quality - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2 + uses: github/codeql-action/analyze@v3 + with: + category: "/language:${{matrix.language}}" diff --git a/.github/workflows/delete-untagged-images.yml b/.github/workflows/delete-untagged-images.yml new file mode 100644 index 000000000..daef02192 --- /dev/null +++ b/.github/workflows/delete-untagged-images.yml @@ -0,0 +1,52 @@ +name: Delete Untagged GHCR Images + +on: + workflow_dispatch: + inputs: + dry_run: + description: 'Dry run (does not delete images)' + required: true + default: 'true' + type: choice + options: + - 'true' + - 'false' + schedule: + - cron: '0 0 1,15 * *' + +env: + REGISTRY: ghcr.io + OWNER: security-tools-alliance + PROJECT: rengine-ng + +jobs: + delete-untagged-ghcr: + runs-on: ubuntu-latest + steps: + - name: Login to GitHub Container Registry + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ vars.GHCR_USERNAME }} + password: ${{ secrets.GHCR_PAT }} + + - name: Delete untagged images + uses: Chizkiyahu/delete-untagged-ghcr-action@v4 + with: + token: ${{ secrets.GHCR_PAT }} + repository_owner: ${{ env.OWNER }} + repository: ${{ env.PROJECT }} + untagged_only: true + owner_type: org + except_untagged_multiplatform: true + + - name: Summary + if: always() + env: + DRY_RUN: ${{ github.event.inputs.dry_run || 'false' }} + run: | + echo "## Summary of untagged image deletion" >> $GITHUB_STEP_SUMMARY + echo "- Dry run: $DRY_RUN" >> $GITHUB_STEP_SUMMARY + echo "- Owner: $OWNER" >> $GITHUB_STEP_SUMMARY + echo "- Project: $PROJECT" >> $GITHUB_STEP_SUMMARY + echo "Check the logs above for more details on deleted images or images that would have been deleted in dry run mode." >> $GITHUB_STEP_SUMMARY diff --git a/.gitignore b/.gitignore index 7c872ac31..64e774e2b 100644 --- a/.gitignore +++ b/.gitignore @@ -39,6 +39,10 @@ share/python-wheels/ *.egg MANIFEST +# IDE files +.vscode/settings.json +.vscode/tasks-and-contexts.json + *.manifest *.spec @@ -57,7 +61,8 @@ docs/_build/ staticfiles/ secret -/secrets +docker/secrets +logs get-docker.sh diff --git a/.vscode/launch.json b/.vscode/launch.json index c19bffaef..8b37dc119 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -12,7 +12,7 @@ "pathMappings": [ { "localRoot": "${workspaceFolder}/web", - "remoteRoot": "/usr/src/app" + "remoteRoot": "/home/rengine/rengine" } ] }, @@ -27,7 +27,7 @@ "pathMappings": [ { "localRoot": "${workspaceFolder}/web", - "remoteRoot": "/usr/src/app" + "remoteRoot": "/home/rengine/rengine" } ] } diff --git a/.vscode/tasks.json b/.vscode/tasks.json new file mode 100644 index 000000000..e778e7b39 --- /dev/null +++ b/.vscode/tasks.json @@ -0,0 +1,155 @@ +{ + "version": "2.0.0", + "tasks": [ + { + "label": "Build and Push Docker Image", + "type": "shell", + "command": "cd ./docker; docker buildx build -t ghcr.io/security-tools-alliance/rengine-ng:rengine-${image}-${version} -t ghcr.io/security-tools-alliance/rengine-ng:rengine-${image}-latest -f ./${image}/Dockerfile ./${image} && docker push ghcr.io/security-tools-alliance/rengine-ng:rengine-${image}-${version} && if [ \"${input:isLatest}\" = \"true\" ]; then docker push ghcr.io/security-tools-alliance/rengine-ng:rengine-${image}-latest; fi", + "problemMatcher": [], + "options": { + "env": { + "version": "${input:version}", + "image": "${input:image}" + } + } + }, + { + "label": "Build All Docker Images", + "type": "shell", + "dependsOn": [ + "Build CELERY", + "Build WEB", + "Build POSTGRES", + "Build REDIS", + "Build OLLAMA", + "Build CERTS", + "Build PROXY" + ], + "dependsOrder": "sequence", + "problemMatcher": [] + }, + { + "label": "Build and Push All Docker Images", + "type": "shell", + "dependsOn": [ + "Build and Push CELERY", + "Build and Push WEB", + "Build and Push POSTGRES", + "Build and Push REDIS", + "Build and Push OLLAMA", + "Build and Push CERTS", + "Build and Push PROXY" + ], + "dependsOrder": "sequence", + "problemMatcher": [] + }, + { + "label": "Build CELERY", + "type": "shell", + "command": "cd ./docker; docker buildx build -t ghcr.io/security-tools-alliance/rengine-ng:rengine-celery-${input:globalVersion} -t ghcr.io/security-tools-alliance/rengine-ng:rengine-celery-latest -f ./celery/Dockerfile ./celery", + "problemMatcher": [] + }, + { + "label": "Build WEB", + "type": "shell", + "command": "cd ./docker; docker buildx build -t ghcr.io/security-tools-alliance/rengine-ng:rengine-web-${input:globalVersion} -t ghcr.io/security-tools-alliance/rengine-ng:rengine-web-latest -f ./web/Dockerfile ./web", + "problemMatcher": [] + }, + { + "label": "Build POSTGRES", + "type": "shell", + "command": "cd ./docker; docker buildx build -t ghcr.io/security-tools-alliance/rengine-ng:rengine-postgres-${input:globalVersion} -t ghcr.io/security-tools-alliance/rengine-ng:rengine-postgres-latest -f ./postgres/Dockerfile ./postgres", + "problemMatcher": [] + }, + { + "label": "Build REDIS", + "type": "shell", + "command": "cd ./docker; docker buildx build -t ghcr.io/security-tools-alliance/rengine-ng:rengine-redis-${input:globalVersion} -t ghcr.io/security-tools-alliance/rengine-ng:rengine-redis-latest -f ./redis/Dockerfile ./redis", + "problemMatcher": [] + }, + { + "label": "Build OLLAMA", + "type": "shell", + "command": "cd ./docker; docker buildx build -t ghcr.io/security-tools-alliance/rengine-ng:rengine-ollama-${input:globalVersion} -t ghcr.io/security-tools-alliance/rengine-ng:rengine-ollama-latest -f ./ollama/Dockerfile ./ollama", + "problemMatcher": [] + }, + { + "label": "Build CERTS", + "type": "shell", + "command": "cd ./docker; docker buildx build -t ghcr.io/security-tools-alliance/rengine-ng:rengine-certs-${input:globalVersion} -t ghcr.io/security-tools-alliance/rengine-ng:rengine-certs-latest -f ./certs/Dockerfile ./certs", + "problemMatcher": [] + }, + { + "label": "Build PROXY", + "type": "shell", + "command": "cd ./docker; docker buildx build -t ghcr.io/security-tools-alliance/rengine-ng:rengine-proxy-${input:globalVersion} -t ghcr.io/security-tools-alliance/rengine-ng:rengine-proxy-latest -f ./proxy/Dockerfile ./proxy", + "problemMatcher": [] + }, + { + "label": "Build and Push CELERY", + "type": "shell", + "command": "cd ./docker; docker buildx build -t ghcr.io/security-tools-alliance/rengine-ng:rengine-celery-${input:globalVersion} -t ghcr.io/security-tools-alliance/rengine-ng:rengine-celery-latest -f ./celery/Dockerfile ./celery && docker push ghcr.io/security-tools-alliance/rengine-ng:rengine-celery-${input:globalVersion} && if [ \"${input:isLatest}\" = \"true\" ]; then docker push ghcr.io/security-tools-alliance/rengine-ng:rengine-celery-latest; fi", + "problemMatcher": [] + }, + { + "label": "Build and Push WEB", + "type": "shell", + "command": "cd ./docker; docker buildx build -t ghcr.io/security-tools-alliance/rengine-ng:rengine-web-${input:globalVersion} -t ghcr.io/security-tools-alliance/rengine-ng:rengine-web-latest -f ./web/Dockerfile ./web && docker push ghcr.io/security-tools-alliance/rengine-ng:rengine-web-${input:globalVersion} && if [ \"${input:isLatest}\" = \"true\" ]; then docker push ghcr.io/security-tools-alliance/rengine-ng:rengine-web-latest; fi", + "problemMatcher": [] + }, + { + "label": "Build and Push POSTGRES", + "type": "shell", + "command": "cd ./docker; docker buildx build -t ghcr.io/security-tools-alliance/rengine-ng:rengine-postgres-${input:globalVersion} -t ghcr.io/security-tools-alliance/rengine-ng:rengine-postgres-latest -f ./postgres/Dockerfile ./postgres && docker push ghcr.io/security-tools-alliance/rengine-ng:rengine-postgres-${input:globalVersion} && if [ \"${input:isLatest}\" = \"true\" ]; then docker push ghcr.io/security-tools-alliance/rengine-ng:rengine-postgres-latest; fi", + "problemMatcher": [] + }, + { + "label": "Build and Push REDIS", + "type": "shell", + "command": "cd ./docker; docker buildx build -t ghcr.io/security-tools-alliance/rengine-ng:rengine-redis-${input:globalVersion} -t ghcr.io/security-tools-alliance/rengine-ng:rengine-redis-latest -f ./redis/Dockerfile ./redis && docker push ghcr.io/security-tools-alliance/rengine-ng:rengine-redis-${input:globalVersion} && if [ \"${input:isLatest}\" = \"true\" ]; then docker push ghcr.io/security-tools-alliance/rengine-ng:rengine-redis-latest; fi", + "problemMatcher": [] + }, + { + "label": "Build and Push OLLAMA", + "type": "shell", + "command": "cd ./docker; docker buildx build -t ghcr.io/security-tools-alliance/rengine-ng:rengine-ollama-${input:globalVersion} -t ghcr.io/security-tools-alliance/rengine-ng:rengine-ollama-latest -f ./ollama/Dockerfile ./ollama && docker push ghcr.io/security-tools-alliance/rengine-ng:rengine-ollama-${input:globalVersion} && if [ \"${input:isLatest}\" = \"true\" ]; then docker push ghcr.io/security-tools-alliance/rengine-ng:rengine-ollama-latest; fi", + "problemMatcher": [] + }, + { + "label": "Build and Push CERTS", + "type": "shell", + "command": "cd ./docker; docker buildx build -t ghcr.io/security-tools-alliance/rengine-ng:rengine-certs-${input:globalVersion} -t ghcr.io/security-tools-alliance/rengine-ng:rengine-certs-latest -f ./certs/Dockerfile ./certs && docker push ghcr.io/security-tools-alliance/rengine-ng:rengine-certs-${input:globalVersion} && if [ \"${input:isLatest}\" = \"true\" ]; then docker push ghcr.io/security-tools-alliance/rengine-ng:rengine-certs-latest; fi", + "problemMatcher": [] + }, + { + "label": "Build and Push PROXY", + "type": "shell", + "command": "cd ./docker; docker buildx build -t ghcr.io/security-tools-alliance/rengine-ng:rengine-proxy-${input:globalVersion} -t ghcr.io/security-tools-alliance/rengine-ng:rengine-proxy-latest -f ./proxy/Dockerfile ./proxy && docker push ghcr.io/security-tools-alliance/rengine-ng:rengine-proxy-${input:globalVersion} && if [ \"${input:isLatest}\" = \"true\" ]; then docker push ghcr.io/security-tools-alliance/rengine-ng:rengine-proxy-latest; fi", + "problemMatcher": [] + } + ], + "inputs": [ + { + "id": "version", + "type": "promptString", + "description": "Enter the version tag for the Docker image (e.g., v2.1.0)" + }, + { + "id": "globalVersion", + "type": "promptString", + "description": "Enter the version tag for all Docker images (e.g., v2.1.0)" + }, + { + "id": "image", + "type": "pickString", + "description": "Select the image to build", + "options": ["celery", "web", "postgres", "redis", "ollama", "certs", "proxy"] + }, + { + "id": "isLatest", + "type": "pickString", + "description": "Is this the latest version (this will also push the 'latest' tag)?", + "options": ["true", "false"], + "default": "false" + } ] +} \ No newline at end of file diff --git a/Makefile b/Makefile index e786e16c9..8f22245e4 100644 --- a/Makefile +++ b/Makefile @@ -1,78 +1,175 @@ +include .env .DEFAULT_GOAL:=help +# Define RENGINE_VERSION +RENGINE_VERSION := $(shell cat web/reNgine/version.txt) +export RENGINE_VERSION + # Credits: https://github.com/sherifabdlnaby/elastdocker/ # This for future release of Compose that will use Docker Buildkit, which is much efficient. -COMPOSE_PREFIX_CMD := COMPOSE_DOCKER_CLI_BUILD=1 +COMPOSE_PREFIX_CMD := COMPOSE_DOCKER_CLI_BUILD=1 +COMPOSE_CMD := docker compose +COMPOSE_FILE := docker/docker-compose.yml +COMPOSE_FILE_BUILD := docker/docker-compose.build.yml +COMPOSE_FILE_DEV := docker/docker-compose.dev.yml +COMPOSE_FILE_SETUP := docker/docker-compose.setup.yml +SERVICES := db web proxy redis celery celery-beat ollama -COMPOSE_ALL_FILES := -f docker-compose.yml -COMPOSE_DEV_ALL_FILES := -f docker-compose.dev.yml -SERVICES := db web proxy redis celery celery-beat -SERVICES_DEV := db web proxy redis celery celery-beat +# Check if 'docker compose' command is available, otherwise check for 'docker-compose' +DOCKER_COMPOSE := $(shell if command -v docker > /dev/null && docker compose version > /dev/null 2>&1; then echo "docker compose"; elif command -v docker-compose > /dev/null; then echo "docker-compose"; else echo ""; fi) -# Check if 'docker compose' command is available, otherwise use 'docker-compose' -DOCKER_COMPOSE := $(shell if command -v docker > /dev/null && docker compose version > /dev/null 2>&1; then echo "docker compose"; else echo "docker-compose"; fi) -$(info Using: $(shell echo "$(DOCKER_COMPOSE)")) +ifeq ($(DOCKER_COMPOSE),) +$(error Docker Compose not found. Please install Docker Compose) +endif -# -------------------------- +# Check if user is in docker group or is root +DOCKER_GROUP_CHECK := $(shell if [ -n "$$(getent group docker)" ]; then echo "yes"; else echo "no"; fi) -.PHONY: setup certs up build username pull down stop restart rm logs +ifeq ($(DOCKER_GROUP_CHECK),no) +$(error This command must be run with sudo or by a user in the docker group) +endif -certs: ## Generate certificates. - @${COMPOSE_PREFIX_CMD} ${DOCKER_COMPOSE} -f docker-compose.setup.yml run --rm certs +$(info Using: $(DOCKER_COMPOSE)) -setup: ## Generate certificates. - @make certs +# Define common commands +DOCKER_COMPOSE_CMD := ${COMPOSE_PREFIX_CMD} ${DOCKER_COMPOSE} +DOCKER_COMPOSE_FILE_CMD := ${DOCKER_COMPOSE_CMD} -f ${COMPOSE_FILE} -up: ## Build and start all services. - ${COMPOSE_PREFIX_CMD} ${DOCKER_COMPOSE} ${COMPOSE_ALL_FILES} up -d --build ${SERVICES} +# -------------------------- -build: ## Build all services. - ${COMPOSE_PREFIX_CMD} ${DOCKER_COMPOSE} ${COMPOSE_ALL_FILES} build ${SERVICES} +.PHONY: certs up dev_up build_up build pull superuser_create superuser_delete superuser_changepassword migrate down stop restart remove_images test logs images prune help -username: ## Generate Username (Use only after make up). - ${COMPOSE_PREFIX_CMD} ${DOCKER_COMPOSE} ${COMPOSE_ALL_FILES} exec web python3 manage.py createsuperuser +pull: ## Pull pre-built Docker images from repository. + ${DOCKER_COMPOSE_FILE_CMD} pull -pull: ## Pull Docker images. - docker login docker.pkg.github.com - ${COMPOSE_PREFIX_CMD} ${DOCKER_COMPOSE} ${COMPOSE_ALL_FILES} pull +images: ## Show all Docker images for reNgine services. + @docker images --filter=reference='ghcr.io/security-tools-alliance/rengine-ng:*' --format "table {{.Repository}}\t{{.Tag}}\t{{.ID}}\t{{.Size}}" -down: ## Down all services. - ${COMPOSE_PREFIX_CMD} ${DOCKER_COMPOSE} ${COMPOSE_ALL_FILES} down +build: ## Build all Docker images locally. + @make remove_images + ${DOCKER_COMPOSE_FILE_CMD} -f ${COMPOSE_FILE_BUILD} build ${SERVICES} -stop: ## Stop all services. - ${COMPOSE_PREFIX_CMD} ${DOCKER_COMPOSE} ${COMPOSE_ALL_FILES} stop ${SERVICES} +build_up: ## Build and start all services. + @make down + @make build + @make up -restart: ## Restart all services. - ${COMPOSE_PREFIX_CMD} ${DOCKER_COMPOSE} ${COMPOSE_ALL_FILES} restart ${SERVICES} +certs: ## Generate certificates. + @${DOCKER_COMPOSE_CMD} -f ${COMPOSE_FILE_SETUP} run --rm certs -rm: ## Remove all services containers. - ${COMPOSE_PREFIX_CMD} ${DOCKER_COMPOSE} $(COMPOSE_ALL_FILES) rm -f ${SERVICES} +up: ## Pull and start all services. + ${DOCKER_COMPOSE_FILE_CMD} up -d ${SERVICES} -test: - ${COMPOSE_PREFIX_CMD} ${DOCKER_COMPOSE} $(COMPOSE_ALL_FILES) exec celery python3 -m unittest tests/test_scan.py +dev_up: ## Pull and start all services with development configuration (more debug logs and Django Toolbar in UI). + @make down + ${DOCKER_COMPOSE_FILE_CMD} -f ${COMPOSE_FILE_DEV} up -d ${SERVICES} -logs: ## Tail all logs with -n 1000. - ${COMPOSE_PREFIX_CMD} ${DOCKER_COMPOSE} $(COMPOSE_ALL_FILES) logs --follow --tail=1000 ${SERVICES} +superuser_create: ## Generate username (use only after `make up`). +ifeq ($(isNonInteractive), true) + ${DOCKER_COMPOSE_FILE_CMD} exec web poetry -C /home/rengine run python3 manage.py createsuperuser --username ${DJANGO_SUPERUSER_USERNAME} --email ${DJANGO_SUPERUSER_EMAIL} --noinput +else + ${DOCKER_COMPOSE_FILE_CMD} exec web poetry -C /home/rengine run python3 manage.py createsuperuser +endif -images: ## Show all Docker images. - ${COMPOSE_PREFIX_CMD} ${DOCKER_COMPOSE} $(COMPOSE_ALL_FILES) images ${SERVICES} +superuser_delete: ## Delete username (use only after `make up`). + ${DOCKER_COMPOSE_FILE_CMD} exec -T web poetry -C /home/rengine run python3 manage.py shell -c "from django.contrib.auth import get_user_model; User = get_user_model(); User.objects.filter(username='${DJANGO_SUPERUSER_USERNAME}').delete()" -prune: ## Remove containers and delete volume data. - @make stop && make rm && docker volume prune -f +superuser_changepassword: ## Change password for user (use only after `make up` & `make username`). +ifeq ($(isNonInteractive), true) + ${DOCKER_COMPOSE_FILE_CMD} exec -T web poetry -C /home/rengine run python3 manage.py shell -c "from django.contrib.auth import get_user_model; User = get_user_model(); u = User.objects.get(username='${DJANGO_SUPERUSER_USERNAME}'); u.set_password('${DJANGO_SUPERUSER_PASSWORD}'); u.save()" +else + ${DOCKER_COMPOSE_FILE_CMD} exec web poetry -C /home/rengine run python3 manage.py changepassword +endif -help: ## Show this help. - @echo "Make application Docker images and manage containers using Docker Compose files." - @awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m\033[0m (default: help)\n\nTargets:\n"} /^[a-zA-Z_-]+:.*?##/ { printf " \033[36m%-12s\033[0m %s\n", $$1, $$2 }' $(MAKEFILE_LIST) +migrate: ## Apply Django migrations + ${DOCKER_COMPOSE_FILE_CMD} exec web poetry -C /home/rengine run python3 manage.py migrate + +down: ## Down all services and remove containers. + ${DOCKER_COMPOSE_FILE_CMD} down + +stop: ## Stop all services. + ${DOCKER_COMPOSE_FILE_CMD} stop ${SERVICES} + +restart: ## Restart specified services or all if not specified. Use DEV=1 for development mode, COLD=1 for down and up instead of restart. + @if [ "$(COLD)" = "1" ]; then \ + if [ "$(DEV)" = "1" ]; then \ + if [ -n "$(filter-out $@,$(MAKECMDGOALS))" ]; then \ + echo "Cold restart $(filter-out $@,$(MAKECMDGOALS)) in dev mode"; \ + ${DOCKER_COMPOSE_FILE_CMD} -f ${COMPOSE_FILE_DEV} down $(filter-out $@,$(MAKECMDGOALS)); \ + ${DOCKER_COMPOSE_FILE_CMD} -f ${COMPOSE_FILE_DEV} up -d $(filter-out $@,$(MAKECMDGOALS)); \ + else \ + echo "Cold restart ${SERVICES} in dev mode"; \ + ${DOCKER_COMPOSE_FILE_CMD} -f ${COMPOSE_FILE_DEV} down; \ + ${DOCKER_COMPOSE_FILE_CMD} -f ${COMPOSE_FILE_DEV} up -d ${SERVICES}; \ + fi \ + else \ + if [ -n "$(filter-out $@,$(MAKECMDGOALS))" ]; then \ + echo "Cold restart $(filter-out $@,$(MAKECMDGOALS)) in production mode"; \ + ${DOCKER_COMPOSE_FILE_CMD} down $(filter-out $@,$(MAKECMDGOALS)); \ + ${DOCKER_COMPOSE_FILE_CMD} up -d $(filter-out $@,$(MAKECMDGOALS)); \ + else \ + echo "Cold restart ${SERVICES} in production mode"; \ + ${DOCKER_COMPOSE_FILE_CMD} down; \ + ${DOCKER_COMPOSE_FILE_CMD} up -d ${SERVICES}; \ + fi \ + fi \ + else \ + if [ "$(DEV)" = "1" ]; then \ + if [ -n "$(filter-out $@,$(MAKECMDGOALS))" ]; then \ + echo "Restart $(filter-out $@,$(MAKECMDGOALS)) in dev mode"; \ + ${DOCKER_COMPOSE_FILE_CMD} -f ${COMPOSE_FILE_DEV} restart $(filter-out $@,$(MAKECMDGOALS)); \ + else \ + echo "Restart ${SERVICES} in dev mode"; \ + ${DOCKER_COMPOSE_FILE_CMD} -f ${COMPOSE_FILE_DEV} restart ${SERVICES}; \ + fi \ + else \ + if [ -n "$(filter-out $@,$(MAKECMDGOALS))" ]; then \ + echo "Restart $(filter-out $@,$(MAKECMDGOALS)) in production mode"; \ + ${DOCKER_COMPOSE_FILE_CMD} restart $(filter-out $@,$(MAKECMDGOALS)); \ + else \ + echo "Restart ${SERVICES} in production mode"; \ + ${DOCKER_COMPOSE_FILE_CMD} restart ${SERVICES}; \ + fi \ + fi \ + fi + +remove_images: ## Remove all Docker images for reNgine-ng services. + @images=$$(docker images --filter=reference='ghcr.io/security-tools-alliance/rengine-ng:*' --format "{{.ID}}"); \ + if [ -n "$$images" ]; then \ + echo "Removing images: $$images"; \ + docker rmi -f $$images; \ + else \ + echo "No images found for ghcr.io/security-tools-alliance/rengine-ng"; \ + fi -dev_build: ## Build all services. - ${COMPOSE_PREFIX_CMD} ${DOCKER_COMPOSE} ${COMPOSE_DEV_ALL_FILES} build ${SERVICES_DEV} +test: + ${DOCKER_COMPOSE_FILE_CMD} exec celery poetry -C /home/rengine run python3 -m unittest tests/test_scan.py -dev_up: ## Build and start all services. - ${COMPOSE_PREFIX_CMD} ${DOCKER_COMPOSE} ${COMPOSE_DEV_ALL_FILES} up -d --build ${SERVICES_DEV} +logs: ## Tail all containers logs with -n 1000 (useful for debug). + ${DOCKER_COMPOSE_FILE_CMD} logs --follow --tail=1000 ${SERVICES} -dev_down: ## Down all services. - ${COMPOSE_PREFIX_CMD} ${DOCKER_COMPOSE} ${COMPOSE_DEV_ALL_FILES} down +prune: ## Remove containers, delete volume data, and prune Docker system. + @make down + @make remove_images + @docker volume rm $$(docker volume ls -q --filter name=rengine_) 2>/dev/null || true + @docker system prune -af --volumes -dev_logs: ## Tail all logs with -n 1000. - ${COMPOSE_PREFIX_CMD} ${DOCKER_COMPOSE} $(COMPOSE_DEV_ALL_FILES) logs --follow --tail=1000 ${SERVICES_DEV} +help: ## Show this help. + @echo "Manage Docker images, containers and Django commands using Docker Compose files." + @echo "" + @echo "Usage:" + @echo " make (default: help)" + @echo "" + @echo "Targets:" + @echo " make restart [service1] [service2] ... Restart specific services in production mode" + @echo " make restart DEV=1 [service1] [service2] ... Restart specific services in development mode" + @echo " make restart Restart all services in production mode" + @echo " make restart DEV=1 Restart all services in development mode" + @echo " make restart COLD=1 [service1] [service2] ... Cold restart (recreate containers) specific services in production mode" + @echo " make restart DEV=1 COLD=1 [service1] [service2] ... Cold restart (recreate containers) specific services in development mode" + @echo " make restart COLD=1 Cold restart (recreate containers) all services in production mode" + @echo " make restart DEV=1 COLD=1 Cold restart (recreate containers) all services in development mode" + +%: + @: diff --git a/README.md b/README.md index f8f1d0d84..63c4ad3c3 100644 --- a/README.md +++ b/README.md @@ -77,56 +77,11 @@ You can find detailed documentation in the repository [Wiki](https://github.com/ ### Quick Installation -**Note:** Only Ubuntu/VPS - -1. Clone this repo - - ```bash - git clone https://github.com/Security-Tools-Alliance/rengine-ng && cd rengine-ng - ``` - -1. Edit the dotenv file, **please make sure to change the password for postgresql `POSTGRES_PASSWORD`!** - - ```bash - nano .env - ``` - -1. In the dotenv file, you may also modify the Scaling Configurations - - ```bash - MAX_CONCURRENCY=80 - MIN_CONCURRENCY=10 - ``` - - MAX_CONCURRENCY: This parameter specifies the maximum number of reNgine-ng's concurrent Celery worker processes that can be spawned. In this case, it's set to 80, meaning that the application can utilize up to 80 concurrent worker processes to execute tasks concurrently. This is useful for handling a high volume of scans or when you want to scale up processing power during periods of high demand. If you have more CPU cores, you will need to increase this for maximized performance. - - MIN_CONCURRENCY: On the other hand, MIN_CONCURRENCY specifies the minimum number of concurrent worker processes that should be maintained, even during periods of lower demand. In this example, it's set to 10, which means that even when there are fewer tasks to process, at least 10 worker processes will be kept running. This helps ensure that the application can respond promptly to incoming tasks without the overhead of repeatedly starting and stopping worker processes. - - These settings allow for dynamic scaling of Celery workers, ensuring that the application efficiently manages its workload by adjusting the number of concurrent workers based on the workload's size and complexity - -1. Run the installation script, Please keep an eye for any prompt, you will also be asked for username and password for reNgine-ng. - - ```bash - sudo ./install.sh - ``` - - If `install.sh` does not have execution permissions, please grant it execution permissions: `chmod +x install.sh` - -Detailed installation instructions can be found at [https://github.com/Security-Tools-Alliance/rengine-ng/wiki/Installation#-quick-installation](https://github.com/Security-Tools-Alliance/rengine-ng/wiki/Installation#-quick-installation) +Detailed installation instructions can be found in the [install section of the wiki](https://github.com/Security-Tools-Alliance/rengine-ng/wiki/Installation#-quick-installation) ### Updating -1. Updating is as simple as running the following command: - - ```bash - cd rengine-ng && sudo ./update.sh - ``` - - If `update.sh` does not have execution permissions, please grant it execution permissions: `sudo chmod +x update.sh` - - **NOTE:** if you're updating from 1.3.6 and you're getting a 'password authentication failed' error, consider uninstalling 1.3.6 first, then install 2.x.x as you'd normally do. - -Detailed update instructions: +Detailed update instructions can be found in the [update section of the wiki](https://github.com/Security-Tools-Alliance/rengine-ng/wiki/Installation#-quick-installation) ### Changelog diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml deleted file mode 100644 index 53554e052..000000000 --- a/docker-compose.dev.yml +++ /dev/null @@ -1,199 +0,0 @@ -version: '3.8' - -services: - db: - restart: always - image: "postgres:12.3-alpine" - environment: - - POSTGRES_DB=${POSTGRES_DB} - - POSTGRES_USER=${POSTGRES_USER} - - PGUSER=${POSTGRES_USER} - - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} - - POSTGRES_PORT=${POSTGRES_PORT} - volumes: - - postgres_data:/var/lib/postgresql/data/ - healthcheck: - test: ["CMD-SHELL", "pg_isready"] - interval: 10s - timeout: 5s - retries: 5 - networks: - - rengine_network - ports: - - "127.0.0.1:5432:5432" - - redis: - image: "redis:alpine" - hostname: redis - healthcheck: - test: ["CMD", "redis-cli","ping"] - interval: 10s - timeout: 5s - retries: 5 - networks: - - rengine_network - - celery: - build: - context: ./web - restart: always - entrypoint: /usr/src/app/celery-entrypoint-dev.sh - volumes: - - ./web:/usr/src/app - - github_repos:/usr/src/github - - wordlist:/usr/src/wordlist - - scan_results:/usr/src/scan_results - - gf_patterns:/root/.gf - - nuclei_templates:/root/nuclei-templates - - tool_config:/root/.config - - static_volume:/usr/src/app/staticfiles/ - environment: - - CELERY_DEBUG=1 - - CELERY_REMOTE_DEBUG=0 - - CELERY_REMOTE_DEBUG_PORT=5679 - - CELERY_BROKER=redis://redis:6379/0 - - CELERY_BACKEND=redis://redis:6379/0 - - DOMAIN_NAME=${DOMAIN_NAME} - - POSTGRES_DB=${POSTGRES_DB} - - POSTGRES_USER=${POSTGRES_USER} - - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} - - POSTGRES_PORT=${POSTGRES_PORT} - - POSTGRES_HOST=${POSTGRES_HOST} - - MAX_CONCURRENCY=${MAX_CONCURRENCY} - - MIN_CONCURRENCY=${MIN_CONCURRENCY} - ports: - - "127.0.0.1:5679:5679" - healthcheck: - test: ["CMD", "celery","-A","reNgine","status"] - interval: 10s - timeout: 10s - retries: 60 - depends_on: - db: - condition: service_healthy - redis: - condition: service_healthy - networks: - - rengine_network - - celery-beat: - build: ./web - entrypoint: /usr/src/app/beat-entrypoint-dev.sh - command: celery -A reNgine beat -l INFO --scheduler django_celery_beat.schedulers:DatabaseScheduler - environment: - - CELERY_DEBUG=1 - - CELERY_BROKER=redis://redis:6379/0 - - CELERY_BACKEND=redis://redis:6379/0 - - POSTGRES_DB=${POSTGRES_DB} - - POSTGRES_USER=${POSTGRES_USER} - - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} - - POSTGRES_PORT=${POSTGRES_PORT} - - POSTGRES_HOST=${POSTGRES_HOST} - depends_on: - celery: - condition: service_healthy - volumes: - - ./web:/usr/src/app - - github_repos:/usr/src/github - - wordlist:/usr/src/wordlist - - scan_results:/usr/src/scan_results - - gf_patterns:/root/.gf - - nuclei_templates:/root/nuclei-templates - - tool_config:/root/.config - networks: - - rengine_network - - web: - build: - context: ./web - entrypoint: /usr/src/app/entrypoint-dev.sh - restart: always - image: docker.pkg.github.com/yogeshojha/rengine/rengine:latest - environment: - - UI_DEBUG=1 - - UI_REMOTE_DEBUG=0 - - UI_REMOTE_DEBUG_PORT=5678 - - CELERY_BROKER=redis://redis:6379/0 - - CELERY_BACKEND=redis://redis:6379/0 - - DOMAIN_NAME=${DOMAIN_NAME} - - POSTGRES_DB=${POSTGRES_DB} - - POSTGRES_USER=${POSTGRES_USER} - - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} - - POSTGRES_PORT=${POSTGRES_PORT} - - POSTGRES_HOST=${POSTGRES_HOST} - volumes: - - ./web:/usr/src/app - - github_repos:/usr/src/github - - wordlist:/usr/src/wordlist - - scan_results:/usr/src/scan_results - - gf_patterns:/root/.gf - - nuclei_templates:/root/nuclei-templates - - tool_config:/root/.config - - static_volume:/usr/src/app/staticfiles - ports: - - "8000:8000" - - "127.0.0.1:5678:5678" - healthcheck: - test: ["CMD", "curl", "-f", "-k", "http://localhost:8000"] - interval: 10s - timeout: 10s - retries: 10 - depends_on: - db: - condition: service_healthy - celery: - condition: service_healthy - celery-beat: - condition: service_started - networks: - rengine_network: - aliases: - - rengine - - proxy: - restart: always - image: nginx:alpine - ports: - - 8082:8082/tcp - - 443:443/tcp - depends_on: - web: - condition: service_healthy - db: - condition: service_healthy - redis: - condition: service_healthy - secrets: - - source: proxy.ca - target: /etc/nginx/certs/rengine_chain.pem - - source: proxy.cert - target: /etc/nginx/certs/rengine.pem - - source: proxy.key - target: /etc/nginx/certs/rengine_rsa.key - volumes: - - ./config/nginx/rengine.conf:/etc/nginx/conf.d/rengine.conf:ro - - static_volume:/usr/src/app/staticfiles - - scan_results:/usr/src/scan_results - networks: - - rengine_network - -networks: - rengine_network: - -volumes: - tool_config: - postgres_data: - gf_patterns: - nuclei_templates: - github_repos: - wordlist: - scan_results: - static_volume: - -secrets: - proxy.ca: - file: ./secrets/certs/rengine_chain.pem - proxy.key: - file: ./secrets/certs/rengine_rsa.key - proxy.cert: - file: ./secrets/certs/rengine.pem diff --git a/docker-compose.setup.yml b/docker-compose.setup.yml deleted file mode 100644 index 6046b1136..000000000 --- a/docker-compose.setup.yml +++ /dev/null @@ -1,14 +0,0 @@ -version: '3.8' -services: - certs: - build: ./certs - environment: - - AUTHORITY_NAME=${AUTHORITY_NAME:-CA} - - AUTHORITY_PASSWORD=${AUTHORITY_PASSWORD:-CHANGE_IT} - - COMPANY=${COMPANY:-Company} - - DOMAIN_NAME=${DOMAIN_NAME:-example.com} - - COUNTRY_CODE=${COUNTRY_CODE:-CC} - - STATE=${STATE:-State} - - CITY=${CITY:-City} - volumes: - - ./secrets/certs:/certs:rw diff --git a/docker-compose.yml b/docker-compose.yml deleted file mode 100644 index 7423f95ac..000000000 --- a/docker-compose.yml +++ /dev/null @@ -1,190 +0,0 @@ -version: '3.8' - -services: - db: - restart: always - image: "postgres:12.3-alpine" - environment: - - POSTGRES_DB=${POSTGRES_DB} - - POSTGRES_USER=${POSTGRES_USER} - - PGUSER=${POSTGRES_USER} - - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} - - POSTGRES_PORT=${POSTGRES_PORT} - volumes: - - postgres_data:/var/lib/postgresql/data/ - healthcheck: - test: ["CMD-SHELL", "pg_isready"] - interval: 10s - timeout: 5s - retries: 5 - networks: - - rengine_network - - redis: - image: "redis:alpine" - hostname: redis - healthcheck: - test: ["CMD", "redis-cli","ping"] - interval: 10s - timeout: 5s - retries: 5 - networks: - - rengine_network - - celery: - build: - context: ./web - restart: always - entrypoint: /usr/src/app/celery-entrypoint.sh - volumes: - - ./web:/usr/src/app - - github_repos:/usr/src/github - - wordlist:/usr/src/wordlist - - scan_results:/usr/src/scan_results - - gf_patterns:/root/.gf - - nuclei_templates:/root/nuclei-templates - - tool_config:/root/.config - - static_volume:/usr/src/app/staticfiles/ - environment: - - CELERY_BROKER=redis://redis:6379/0 - - CELERY_BACKEND=redis://redis:6379/0 - - DOMAIN_NAME=${DOMAIN_NAME} - - POSTGRES_DB=${POSTGRES_DB} - - POSTGRES_USER=${POSTGRES_USER} - - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} - - POSTGRES_PORT=${POSTGRES_PORT} - - POSTGRES_HOST=${POSTGRES_HOST} - - MAX_CONCURRENCY=${MAX_CONCURRENCY} - - MIN_CONCURRENCY=${MIN_CONCURRENCY} - healthcheck: - test: ["CMD", "celery","-A","reNgine","status"] - interval: 10s - timeout: 10s - retries: 60 - depends_on: - db: - condition: service_healthy - redis: - condition: service_healthy - networks: - - rengine_network - - celery-beat: - build: ./web - entrypoint: /usr/src/app/beat-entrypoint.sh - command: celery -A reNgine beat -l INFO --scheduler django_celery_beat.schedulers:DatabaseScheduler - environment: - - CELERY_BROKER=redis://redis:6379/0 - - CELERY_BACKEND=redis://redis:6379/0 - - POSTGRES_DB=${POSTGRES_DB} - - POSTGRES_USER=${POSTGRES_USER} - - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} - - POSTGRES_PORT=${POSTGRES_PORT} - - POSTGRES_HOST=${POSTGRES_HOST} - depends_on: - celery: - condition: service_healthy - volumes: - - ./web:/usr/src/app - - github_repos:/usr/src/github - - wordlist:/usr/src/wordlist - - scan_results:/usr/src/scan_results - - gf_patterns:/root/.gf - - nuclei_templates:/root/nuclei-templates - - tool_config:/root/.config - networks: - - rengine_network - - web: - build: - context: ./web - entrypoint: /usr/src/app/entrypoint.sh - restart: always - image: docker.pkg.github.com/yogeshojha/rengine/rengine:latest - environment: - - CELERY_BROKER=redis://redis:6379/0 - - CELERY_BACKEND=redis://redis:6379/0 - - DOMAIN_NAME=${DOMAIN_NAME} - - POSTGRES_DB=${POSTGRES_DB} - - POSTGRES_USER=${POSTGRES_USER} - - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} - - POSTGRES_PORT=${POSTGRES_PORT} - - POSTGRES_HOST=${POSTGRES_HOST} - # THIS IS A MUST FOR CHECKING UPDATE, EVERYTIME A COMMIT IS MERGED INTO - # MASTER, UPDATE THIS!!! MAJOR.MINOR.PATCH https://semver.org/ - - RENGINE_CURRENT_VERSION='2.0.7' - volumes: - - ./web:/usr/src/app - - github_repos:/usr/src/github - - wordlist:/usr/src/wordlist - - scan_results:/usr/src/scan_results - - gf_patterns:/root/.gf - - nuclei_templates:/root/nuclei-templates - - tool_config:/root/.config - - static_volume:/usr/src/app/staticfiles/ - ports: - - "8000:8000" - healthcheck: - test: ["CMD", "curl", "-f", "-k", "http://localhost:8000"] - interval: 10s - timeout: 10s - retries: 10 - depends_on: - db: - condition: service_healthy - celery: - condition: service_healthy - celery-beat: - condition: service_started - networks: - rengine_network: - aliases: - - rengine - - proxy: - restart: always - image: nginx:alpine - ports: - - 8082:8082/tcp - - 443:443/tcp - depends_on: - web: - condition: service_healthy - db: - condition: service_healthy - redis: - condition: service_healthy - secrets: - - source: proxy.ca - target: /etc/nginx/certs/rengine_chain.pem - - source: proxy.cert - target: /etc/nginx/certs/rengine.pem - - source: proxy.key - target: /etc/nginx/certs/rengine_rsa.key - volumes: - - ./config/nginx/rengine.conf:/etc/nginx/conf.d/rengine.conf:ro - - static_volume:/usr/src/app/staticfiles/ - - scan_results:/usr/src/scan_results - networks: - - rengine_network - -networks: - rengine_network: - -volumes: - tool_config: - postgres_data: - gf_patterns: - nuclei_templates: - github_repos: - wordlist: - scan_results: - static_volume: - -secrets: - proxy.ca: - file: ./secrets/certs/rengine_chain.pem - proxy.key: - file: ./secrets/certs/rengine_rsa.key - proxy.cert: - file: ./secrets/certs/rengine.pem diff --git a/web/celery-entrypoint-dev.sh b/docker/beat/entrypoint-dev.sh similarity index 58% rename from web/celery-entrypoint-dev.sh rename to docker/beat/entrypoint-dev.sh index 147d07e7b..2b0e136cc 100755 --- a/web/celery-entrypoint-dev.sh +++ b/docker/beat/entrypoint-dev.sh @@ -1,23 +1,14 @@ #!/bin/bash if [ "$CELERY_DEBUG" == "1" ]; then - # Django debug toolbar - pip install django-debug-toolbar==4.3.0 - python3 manage.py collectstatic --noinput export CELERY_LOGLEVEL='debug' fi # Check if remote debugging is enabled and set concurrency to 1 for easier debug if [ "$CELERY_REMOTE_DEBUG" == "1" ]; then - # Live debug - pip install debugpy - - # To debug opened port with netstat - apt install net-tools -y - # Set celery concurrency to 1 because thread processes is hard to debug export MIN_CONCURRENCY=1 export MAX_CONCURRENCY=1 fi -./celery-entrypoint.sh \ No newline at end of file +/entrypoint.sh \ No newline at end of file diff --git a/docker/beat/entrypoint.sh b/docker/beat/entrypoint.sh new file mode 100755 index 000000000..9bd232705 --- /dev/null +++ b/docker/beat/entrypoint.sh @@ -0,0 +1,9 @@ +#!/bin/bash + +if [ ! "$CELERY_LOGLEVEL" ]; then + export CELERY_LOGLEVEL='info' +fi + +poetry run -C $HOME/ celery -A reNgine beat --loglevel=$CELERY_LOGLEVEL --scheduler django_celery_beat.schedulers:DatabaseScheduler + +exec "$@" \ No newline at end of file diff --git a/docker/celery/Dockerfile b/docker/celery/Dockerfile new file mode 100644 index 000000000..f45f1dc62 --- /dev/null +++ b/docker/celery/Dockerfile @@ -0,0 +1,193 @@ +FROM debian:12 + +# Labels and Credits +LABEL \ + name="reNgine-ng" \ + author="Security-Tools-Alliance (https://github.com/Security-Tools-Alliance) & Yogesh Ojha " \ + description="reNgine-ng is an automated pipeline of recon process, useful for information gathering during web application penetration testing." + +# Environment Variables +ENV DEBIAN_FRONTEND="noninteractive" \ + DATABASE="postgres" +ENV USERNAME="rengine" + +RUN apt update -y && apt install -y \ + build-essential \ + zlib1g-dev \ + libncurses5-dev \ + libgdbm-dev \ + libnss3-dev \ + libssl-dev \ + libreadline-dev \ + libffi-dev \ + libsqlite3-dev \ + libbz2-dev \ + wget \ + libpcap-dev \ + libpq-dev \ + vim \ + xvfb \ + git \ + unzip \ + curl \ + gettext \ + nmap \ + net-tools \ + htop \ + firefox-esr \ + fontconfig fonts-freefont-ttf fonts-noto fonts-terminus + +RUN fc-cache -f && \ + fc-list | sort + +ENV USERNAME="rengine" +RUN addgroup --gid 1000 --system $USERNAME && \ + mkdir -p /home/$USERNAME && \ + adduser --gid 1000 --system --shell /bin/false --disabled-password --uid 1000 --home /home/$USERNAME $USERNAME && \ + chown $USERNAME:$USERNAME /home/$USERNAME + +# Download and install geckodriver +RUN ARCH=$(dpkg --print-architecture) && \ + version=0.35.0 && \ + geckodriver_arm="geckodriver-v${version}-linux-aarch64.tar.gz" && \ + geckodriver_amd="geckodriver-v${version}-linux64.tar.gz" && \ + if [ "${ARCH}" = "arm64" ]; then \ + wget "https://github.com/mozilla/geckodriver/releases/download/v${version}/${geckodriver_arm}" && \ + tar -xvf "${geckodriver_arm}" -C /usr/local/bin/ && \ + rm "${geckodriver_arm}"; \ + elif [ "${ARCH}" = "amd64" ]; then \ + wget "https://github.com/mozilla/geckodriver/releases/download/v${version}/${geckodriver_amd}" && \ + tar -xvf "${geckodriver_amd}" -C /usr/local/bin/ && \ + rm "${geckodriver_amd}"; \ + else \ + echo "Unknown architecture: $ARCH" && \ + exit 1; \ + fi + +# Download and install go +RUN ARCH=$(dpkg --print-architecture) && \ + #GO_VERSION=$(curl -s https://go.dev/VERSION?m=text) && \ + GO_VERSION=1.23.0 && \ + if [ "${ARCH}" = "arm64" ]; then \ + wget https://go.dev/dl/go${GO_VERSION}.linux-arm64.tar.gz && \ + tar -xvf go${GO_VERSION}.linux-arm64.tar.gz -C /usr/local/ && \ + rm go${GO_VERSION}.linux-arm64.tar.gz; \ + elif [ "${ARCH}" = "amd64" ]; then \ + wget https://go.dev/dl/go${GO_VERSION}.linux-amd64.tar.gz && \ + tar -xvf go${GO_VERSION}.linux-amd64.tar.gz -C /usr/local/ && \ + rm go${GO_VERSION}.linux-amd64.tar.gz; \ + else \ + echo "Unknown architecture: $ARCH" ; \ + exit 1; \ + fi + +# Install python 3.10 +RUN cd /root && wget https://www.python.org/ftp/python/3.10.0/Python-3.10.0.tgz && \ + tar -xvf Python-3.10.0.tgz && \ + rm Python-3.10.0.tgz && \ + cd Python-3.10.0 && \ + ./configure --enable-optimizations && \ + make -j4 && \ + make altinstall + +USER $USERNAME +WORKDIR /home/$USERNAME + +ENV TOOLPATH="/home/${USERNAME}/tools" +ENV BINPATH="/home/${USERNAME}/.local/bin" +ENV WORDLISTPATH="/home/${USERNAME}/wordlists" +ENV PIPX_BIN_DIR="${TOOLPATH}/pipx" +ENV GOROOT="/usr/local/go" +ENV GOPATH="${TOOLPATH}/go" +ENV PATH="${PATH}:${GOROOT}/bin:${GOPATH}/bin:${PIPX_BIN_DIR}" + +RUN mkdir -p $TOOLPATH/.github && \ + mkdir -p $BINPATH + + +# Download Go packages +RUN ARCH=$(dpkg --print-architecture) \ + && if [ "$ARCH" = "arm64" ] || [ "$ARCH" = "amd64" ]; then \ + GOARCH=$ARCH go install -v github.com/jaeles-project/gospider@v1.1.6 \ + && GOARCH=$ARCH go install -ldflags="-s -w" -v github.com/tomnomnom/gf@dcd4c361f9f5ba302294ed38b8ce278e8ba69006 \ + && GOARCH=$ARCH go install -ldflags="-s -w" -v github.com/tomnomnom/unfurl@v0.4.3 \ + && GOARCH=$ARCH go install -ldflags="-s -w" -v github.com/tomnomnom/waybackurls@v0.1.0 \ + && GOARCH=$ARCH go install -ldflags="-s -w" -v github.com/projectdiscovery/httpx/cmd/httpx@v1.6.0 \ + && GOARCH=$ARCH go install -ldflags="-s -w" -v github.com/projectdiscovery/subfinder/v2/cmd/subfinder@v2.6.6 \ + && GOARCH=$ARCH go install -ldflags="-s -w" -v github.com/projectdiscovery/nuclei/v3/cmd/nuclei@v3.2.6 \ + && GOARCH=$ARCH go install -ldflags="-s -w" -v github.com/projectdiscovery/naabu/v2/cmd/naabu@v2.3.0 \ + && GOARCH=$ARCH go install -ldflags="-s -w" -v github.com/hakluke/hakrawler@latest \ + && GOARCH=$ARCH go install -ldflags="-s -w" -v github.com/lc/gau/v2/cmd/gau@v2.2.1 \ + && GOARCH=$ARCH go install -ldflags="-s -w" -v github.com/owasp-amass/amass/v4/...@v4.2.0 \ + && GOARCH=$ARCH go install -ldflags="-s -w" -v github.com/ffuf/ffuf/v2@v2.1.0 \ + && GOARCH=$ARCH go install -ldflags="-s -w" -v github.com/projectdiscovery/tlsx/cmd/tlsx@v1.1.6 \ + && GOARCH=$ARCH go install -ldflags="-s -w" -v github.com/hahwul/dalfox/v2@v2.9.2 \ + && GOARCH=$ARCH go install -ldflags="-s -w" -v github.com/projectdiscovery/katana/cmd/katana@v1.1.0 \ + && GOARCH=$ARCH go install -ldflags="-s -w" -v github.com/dwisiswant0/crlfuzz/cmd/crlfuzz@v1.4.1 \ + && GOARCH=$ARCH go install -ldflags="-s -w" -v github.com/sa7mon/s3scanner@c544f1cf00f70cae3f2155b24d336f515b7c598b \ + && chmod 700 -R $GOPATH/pkg/* \ + && rm -rf $GOPATH/pkg/* \ + && rm -rf /home/$USERNAME/.cache/go-build/*; \ + else \ + echo "Unknown architecture: $ARCH" ; \ + exit 1; \ + fi + +# Set environment variables +ENV PYTHONDONTWRITEBYTECODE=1 +ENV PYTHONUNBUFFERED=1 +ENV PATH="${PATH}:${BINPATH}" + +# Install python tools +RUN python3.10 -m pip install pipx && pipx ensurepath && printf "poetry\n\ + watchdog\n\ + https://github.com/aboul3la/Sublist3r/archive/refs/tags/1.1.zip\n\ + https://github.com/laramies/theHarvester/archive/refs/tags/4.6.0.zip\n\ + git+https://github.com/ncrocfer/whatportis@59a1718bf7c531f2a5a4e213cad0c047ce9c1c94\n\ + git+https://github.com/EnableSecurity/wafw00f@914dbf4feab7e2529f064f4300b5fde84ea1cce3\n\ + h8mail\n" | xargs -L1 pipx install || true + +# Install tools +RUN ln -s /usr/local/bin/geckodriver $BINPATH/geckodriver && \ + cd $TOOLPATH/.github && git clone https://github.com/shmilylty/OneForAll.git && cd OneForAll && git reset --hard 9ecfda229199ebf30d9338f4c88cbeb7c40e16c2 && \ + cd $TOOLPATH/.github && git clone https://github.com/FortyNorthSecurity/EyeWitness.git && cd EyeWitness && git reset --hard cb09a842f93109836219b2aa2f9f25c58a34bc8c && \ + cd $TOOLPATH/.github && git clone https://github.com/UnaPibaGeek/ctfr.git && cd ctfr && git reset --hard 6c7fecdc6346c4f5322049e38f415d5bddaa420d && \ + cd $TOOLPATH/.github && git clone https://github.com/Tuhinshubhra/CMSeeK.git && cd CMSeeK && git reset --hard 20f9780d2e682874be959cfd487045c92e3c73f4 && \ + cd $TOOLPATH/.github && git clone https://github.com/GiJ03/Infoga.git && cd Infoga && git reset --hard 6834c6f863c2bdc92cc808934bb293571d1939c1 && \ + cd $TOOLPATH/.github && wget https://github.com/m3n0sd0n4ld/GooFuzz/releases/download/1.2.5/GooFuzz.v.1.2.5.zip && unzip GooFuzz.v.1.2.5.zip && rm GooFuzz.v.1.2.5.zip && mv GooFuzz* GooFuzz && echo "#!/bin/bash\n\nbash $TOOLPATH/.github/GooFuzz/GooFuzz \"\$@\"" > $BINPATH/GooFuzz && chmod +x $BINPATH/GooFuzz && \ + cd $TOOLPATH/.github && git clone https://github.com/1ndianl33t/Gf-Patterns && cd Gf-Patterns && git reset --hard 565382db80f001af288b8d71c525a7ce7f17e80d && mkdir -p /home/$USERNAME/.gf/ && cp -r *.json /home/$USERNAME/.gf/ && \ + cd $TOOLPATH/.github && git clone https://github.com/tomnomnom/gf.git && cd gf && git reset --hard dcd4c361f9f5ba302294ed38b8ce278e8ba69006 && cp -r examples/*.json /home/$USERNAME/.gf/ && \ + mkdir -p /home/$USERNAME/.nmap/ && cd /home/$USERNAME/.nmap/ && git clone https://github.com/scipag/vulscan.git && cd vulscan && git reset --hard 2640d62400e9953fb9a33e6033dc59a9dc9606ba && ln -s $TOOLPATH/.github/vulscan /home/$USERNAME/.nmap/vulscan && \ + mkdir -p $WORDLISTPATH && \ + wget https://raw.githubusercontent.com/maurosoria/dirsearch/master/db/dicc.txt -O $WORDLISTPATH/dicc.txt && \ + wget https://raw.githubusercontent.com/danielmiessler/SecLists/master/Fuzzing/fuzz-Bo0oM.txt -O $WORDLISTPATH/fuzz-Bo0oM.txt && \ + wget https://raw.githubusercontent.com/danielmiessler/SecLists/master/Discovery/DNS/deepmagic.com-prefixes-top50000.txt -O $WORDLISTPATH/deepmagic.com-prefixes-top50000.txt && \ + mkdir -p /home/$USERNAME/nuclei-templates && wget https://raw.githubusercontent.com/NagliNagli/Shockwave-OSS/bd7445cd320a174d3073f0a61867a40849d28436/ssrf.yaml -O /home/$USERNAME/nuclei-templates/ssrf_nagli.yaml && \ + mkdir -p /home/$USERNAME/results + +# Copy poetry config files +COPY --chown=$USERNAME:$USERNAME ./*.toml /home/$USERNAME + +# Install aliases for tools needing a pyproject.toml +RUN cd $TOOLPATH/.github/OneForAll && mv /home/$USERNAME/oneforall-pyproject.toml pyproject.toml && poetry env use python3.10 && poetry install --no-cache && echo "#!/bin/bash\n\npoetry -C $TOOLPATH/.github/OneForAll/ run python $TOOLPATH/.github/OneForAll/oneforall.py \"\$@\"" > $BINPATH/oneforall && chmod +x $BINPATH/oneforall && \ + cd $TOOLPATH/.github/ctfr && mv /home/$USERNAME/ctfr-pyproject.toml pyproject.toml && poetry env use python3.10 && poetry install --no-cache && echo "#!/bin/bash\n\npoetry -C $TOOLPATH/.github/ctfr/ run python $TOOLPATH/.github/ctfr/ctfr.py \"\$@\"" > $BINPATH/ctfr && chmod +x $BINPATH/ctfr && \ + cd $TOOLPATH/.github/EyeWitness/Python && mv /home/$USERNAME/eyewitness-pyproject.toml pyproject.toml && poetry env use python3.10 && poetry install --no-cache && echo "#!/bin/bash\n\npoetry -C $TOOLPATH/.github/EyeWitness/Python run python $TOOLPATH/.github/EyeWitness/Python/EyeWitness.py \"\$@\"" > $BINPATH/EyeWitness && chmod +x $BINPATH/EyeWitness && \ + cd $TOOLPATH/.github/CMSeeK && mv /home/$USERNAME/cmseek-pyproject.toml pyproject.toml && poetry env use python3.10 && poetry install --no-cache && echo "#!/bin/bash\n\npoetry -C $TOOLPATH/.github/CMSeeK/ run python $TOOLPATH/.github/CMSeeK/cmseek.py \"\$@\"" > $BINPATH/cmseek && chmod +x $BINPATH/cmseek && \ + cd $TOOLPATH/.github/Infoga && mv /home/$USERNAME/infoga-pyproject.toml pyproject.toml && poetry env use python3.10 && poetry install --no-cache && echo "#!/bin/bash\n\npoetry -C $TOOLPATH/.github/Infoga/ run python $TOOLPATH/.github/Infoga/infoga.py \"\$@\"" > $BINPATH/infoga && chmod +x $BINPATH/infoga && \ + cd /home/$USERNAME && poetry install + +# Create tools config files +RUN nuclei -silent && naabu -version && subfinder -version && mkdir -p /home/$USERNAME/.config/theHarvester +COPY --chown=$USERNAME:$USERNAME ./config/the-harvester-api-keys.yaml /home/$USERNAME/.config/theHarvester/api-keys.yaml +COPY --chown=$USERNAME:$USERNAME ./config/amass.ini /home/$USERNAME/.config/amass.ini +COPY --chown=$USERNAME:$USERNAME ./config/.gau.toml /home/$USERNAME/.config/.gau.toml +RUN ln -s /home/$USERNAME/.config/.gau.toml /home/$USERNAME/.gau.toml + +COPY ./entrypoint.sh /entrypoint.sh +RUN mkdir -p /home/$USERNAME/rengine /home/$USERNAME/scan_results \ + && chown -R $USERNAME:$USERNAME /home/$USERNAME/rengine \ + && chown -R $USERNAME:$USERNAME /home/$USERNAME/scan_results +VOLUME /home/$USERNAME/rengine +VOLUME /home/$USERNAME/scan_results +WORKDIR /home/$USERNAME/rengine +ENTRYPOINT ["/entrypoint.sh"] \ No newline at end of file diff --git a/docker/celery/cmseek-pyproject.toml b/docker/celery/cmseek-pyproject.toml new file mode 100644 index 000000000..a37c82835 --- /dev/null +++ b/docker/celery/cmseek-pyproject.toml @@ -0,0 +1,15 @@ +[tool.poetry] +name = "cmseek" +version = "1.1.3" +description = "A content management system (CMS) manages the creation and modification of digital content. It typically supports multiple users in a collaborative environment. Some noteable examples are: WordPress, Joomla, Drupal etc." +authors = ["Tuhinshubhra "] +license = "GPL-3.0" +readme = "README.md" + +[tool.poetry.dependencies] +python = "^3.10" +requests = "^2.31.0" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" \ No newline at end of file diff --git a/docker/celery/config/.gau.toml b/docker/celery/config/.gau.toml new file mode 100644 index 000000000..ff13611ef --- /dev/null +++ b/docker/celery/config/.gau.toml @@ -0,0 +1,19 @@ +threads = 2 +verbose = false +retries = 15 +subdomains = false +parameters = false +providers = ["wayback","commoncrawl","otx","urlscan"] +blacklist = ["ttf","woff","svg","png","jpg"] +json = false + +[urlscan] + apikey = "" + +[filters] + from = "" + to = "" + matchstatuscodes = [] + matchmimetypes = [] + filterstatuscodes = [] + filtermimetypes = ["image/png", "image/jpg", "image/svg+xml"] diff --git a/docker/celery/config/amass.ini b/docker/celery/config/amass.ini new file mode 100644 index 000000000..722519502 --- /dev/null +++ b/docker/celery/config/amass.ini @@ -0,0 +1,263 @@ +# Copyright 2017-2020 Jeff Foley. All rights reserved. +# Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file. + +# Should results only be collected passively and without DNS resolution? Not recommended. +#mode = passive +# Would you like to use active techniques that communicate directly with the discovered assets, +# such as pulling TLS certificates from discovered IP addresses and attempting DNS zone transfers? +#mode = active + +# The directory that stores the Cayley graph database and other output files +# The default for Linux systems is: $HOME/.config/amass +#output_directory = amass + +# Another location (directory) where the user can provide ADS scripts to the engine. +#scripts_directory = + +# The maximum number of DNS queries that can be performed concurrently during the enumeration. +#maximum_dns_queries = 20000 + +# DNS resolvers used globally by the amass package. +#[resolvers] +#monitor_resolver_rate = true +#resolver = 1.1.1.1 ; Cloudflare +#resolver = 8.8.8.8 ; Google +#resolver = 64.6.64.6 ; Verisign +#resolver = 74.82.42.42 ; Hurricane Electric +#resolver = 1.0.0.1 ; Cloudflare Secondary +#resolver = 8.8.4.4 ; Google Secondary +#resolver = 64.6.65.6 ; Verisign Secondary +#resolver = 77.88.8.1 ; Yandex.DNS Secondary + +[scope] +# The network infrastructure settings expand scope, not restrict the scope. +# Single IP address or range (e.g. a.b.c.10-245) +#address = 192.168.1.1 +#cidr = 192.168.1.0/24 +#asn = 26808 +#port = 80 +port = 443 +#port = 8080 + +# Root domain names used in the enumeration. The findings are limited by the root domain names provided. +#[scope.domains] +#domain = owasp.org +#domain = appsecusa.org +#domain = appsec.eu +#domain = appsec-labs.com + +# Are there any subdomains that are out of scope? +#[scope.blacklisted] +#subdomain = education.appsec-labs.com +#subdomain = 2012.appsecusa.org + +# The graph database discovered DNS names, associated network infrastructure, results from data sources, etc. +# This information is then used in future enumerations and analysis of the discoveries. +#[graphdbs] +#local_database = true ; Set this to false to disable use of the local database. + +# postgres://[username:password@]host[:port]/database-name?sslmode=disable of the PostgreSQL +# database and credentials. Sslmode is optional, and can be disable, require, verify-ca, or verify-full. +#[graphdbs.postgres] +#primary = false ; Specify which graph database is the primary db, or the local database will be selected. +#url = "postgres://[username:password@]host[:port]/database-name?sslmode=disable" +#options="connect_timeout=10" + +# MqSQL database and credentials URL format: +# [username:password@]tcp(host[:3306])/database-name?timeout=10s +#[graphdbs.mysql] +#url = [username:password@]tcp(host[:3306])/database-name?timeout=10s + +# Settings related to DNS name brute forcing. +#[bruteforce] +#enabled = true +#recursive = true +# Number of discoveries made in a subdomain before performing recursive brute forcing: Default is 1. +#minimum_for_recursive = 1 +#wordlist_file = /usr/share/wordlists/all.txt +#wordlist_file = /usr/share/wordlists/all.txt # multiple lists can be used + +# Would you like to permute resolved names? +#[alterations] +#enabled = true +# edit_distance specifies the number of times a primitive edit operation will be +# performed on a name sample during fuzzy label searching. +#edit_distance = 1 ; Setting this to zero will disable this expensive feature. +#flip_words = true # test-dev.owasp.org -> test-prod.owasp.org +#flip_numbers = true # test1.owasp.org -> test2.owasp.org +#add_words = true # test.owasp.org -> test-dev.owasp.org +#add_numbers = true # test.owasp.org -> test1.owasp.org +# Multiple lists can be used. +#wordlist_file = /usr/share/wordlists/all.txt +#wordlist_file = /usr/share/wordlists/all.txt + +[data_sources] +# When set, this time-to-live is the minimum value applied to all data source caching. +minimum_ttl = 1440 ; One day + +# Are there any data sources that should be disabled? +#[data_sources.disabled] +#data_source = Ask +#data_source = Exalead +#data_source = IPv4Info + +# Provide data source configuration information. +# See the following format: +#[data_sources.SOURCENAME] ; The SOURCENAME must match the name in the data source implementation. +#ttl = 4320 ; Time-to-live value sets the number of minutes that the responses are cached. +# Unique identifier for this set of SOURCENAME credentials. +# Multiple sets of credentials can be provided and will be randomly selected. +#[data_sources.SOURCENAME.CredentialSetID] +#apikey = ; Each data source uses potentially different keys for authentication. +#secret = ; See the examples below for each data source. +#username = +#password = + +#https://otx.alienvault.com (Free) +#[data_sources.AlienVault] +#[data_sources.AlienVault.Credentials] +#apikey = + +#https://app.binaryedge.com (Free) +#[data_sources.BinaryEdge] +#ttl = 10080 +#[data_sources.BinaryEdge.Credentials] +#apikey = + +#https://c99.nl (Paid) +#[data_sources.C99] +#ttl = 4320 +#[data_sources.C99.account1] +#apikey = +#[data_sources.C99.account2] +#apikey = + +#https://censys.io (Free) +#[data_sources.Censys] +#ttl = 10080 +#[data_sources.Censys.Credentials] +#apikey = +#secret = + +#https://chaos.projectdiscovery.io (Free-InviteOnly) +#[data_sources.Chaos] +#ttl = 4320 +#[data_sources.Chaos.Credentials] +#apikey = + +#https://cloudflare.com (Free) +#[data_sources.Cloudflare] +#[data_sources.Cloudflare.Credentials] +#apikey = + +#Closed Source Invite Only +#[data_sources.CIRCL] +#[data_sources.CIRCL.Credentials] +#username = +#password = + +#https://dnsdb.info (Paid) +#[data_sources.DNSDB] +#ttl = 4320 +#[data_sources.DNSDB.Credentials] +#apikey = + +#https://developer.facebook.com (Free) +# Look here for how to obtain the Facebook credentials: +# https://goldplugins.com/documentation/wp-social-pro-documentation/how-to-get-an-app-id-and-secret-key-from-facebook/ +#[data_sources.FacebookCT] +#ttl = 4320 +#[data_sources.FacebookCT.app1] +#apikey = +#secret = +#[data_sources.FacebookCT.app2] +#apikey = +#secret = + +#https://github.com (Free) +#[data_sources.GitHub] +#ttl = 4320 +#[data_sources.GitHub.accountname] +#apikey = + +#https://networksdb.io (Free) +#[data_sources.NetworksDB] +#[data_sources.NetworksDB.Credentials] +#apikey = + +#https://passivetotal.com (Free) +#[data_sources.PassiveTotal] +#ttl = 10080 +#[data_sources.PassiveTotal.Credentials] +#username = +#apikey = + +#https://recon.dev (Free) +#[data_sources.ReconDev] +#[data_sources.ReconDev.free] +#apikey = +#[data_sources.ReconDev.paid] +#apikey = + +#https://securitytrails.com (Free) +#[data_sources.SecurityTrails] +#ttl = 1440 +#[data_sources.SecurityTrails.Credentials] +#apikey = + +#https://shodan.io (Free) +#[data_sources.Shodan] +#ttl = 10080 +#[data_sources.Shodan.Credentials] +#apikey = + +#https://spyse.com (Paid/Free-trial) +#[data_sources.Spyse] +#ttl = 4320 +#[data_sources.Spyse.Credentials] +#apikey = + +#https://developer.twitter.com (Free) +# Provide your Twitter App Consumer API key and Consumer API secrety key +#[data_sources.Twitter] +#[data_sources.Twitter.account1] +#apikey = +#secret = +#[data_sources.Twitter.account2] +#apikey = +#secret = + +#https://umbrella.cisco.com (Paid-Enterprise) +# The apikey must be an API access token created through the Investigate management UI +#[data_sources.Umbrella] +#[data_sources.Umbrella.Credentials] +#apikey = + +#https://urlscan.io (Free) +# URLScan can be used without an API key, but the key allows new submissions to be made +#[data_sources.URLScan] +#[data_sources.URLScan.Credentials] +#apikey = + +#https://virustotal.com (Free) +#[data_sources.VirusTotal] +#ttl = 10080 +#[data_sources.VirusTotal.Credentials] +#apikey = + +#https://whoisxmlapi.com (Free) +#[data_sources.WhoisXML] +#[data_sources.WhoisXML.Credentials] +#apikey = + +#https://zetalytics.com (Paid) +#[data_sources.ZETAlytics] +#ttl = 1440 +#[data_sources.ZETAlytics.Credentials] +#apikey = + +#[data_sources.ZoomEye] +#ttl = 1440 +#[data_sources.ZoomEye.Credentials] +#username = +#password = diff --git a/docker/celery/config/the-harvester-api-keys.yaml b/docker/celery/config/the-harvester-api-keys.yaml new file mode 100644 index 000000000..c0098ad53 --- /dev/null +++ b/docker/celery/config/the-harvester-api-keys.yaml @@ -0,0 +1,65 @@ +apikeys: + bevigil: + key: + + binaryedge: + key: + + bing: + key: + + bufferoverun: + key: + + censys: + id: + secret: + + criminalip: + key: + + fullhunt: + key: + + github: + key: + + hunter: + key: + + hunterhow: + key: + + intelx: + key: + + netlas: + key: + + onyphe: + key: + + pentestTools: + key: + + projectDiscovery: + key: + + rocketreach: + key: + + securityTrails: + key: + + shodan: + key: + + tomba: + key: + secret: + + virustotal: + key: + + zoomeye: + key: diff --git a/docker/celery/ctfr-pyproject.toml b/docker/celery/ctfr-pyproject.toml new file mode 100644 index 000000000..42447cb4b --- /dev/null +++ b/docker/celery/ctfr-pyproject.toml @@ -0,0 +1,15 @@ +[tool.poetry] +name = "ctfr" +version = "0.1.0" +description = "Certificate Transparency Logs subdomain enumeration tool" +authors = ["UnaPibaGeek"] +license = "GPL-3.0" +readme = "README.md" + +[tool.poetry.dependencies] +python = "^3.10" +requests = "^2.31.0" + + +[build-system] +requires = ["poetry-core"] diff --git a/docker/celery/entrypoint-dev.sh b/docker/celery/entrypoint-dev.sh new file mode 100755 index 000000000..2b0e136cc --- /dev/null +++ b/docker/celery/entrypoint-dev.sh @@ -0,0 +1,14 @@ +#!/bin/bash + +if [ "$CELERY_DEBUG" == "1" ]; then + export CELERY_LOGLEVEL='debug' +fi + +# Check if remote debugging is enabled and set concurrency to 1 for easier debug +if [ "$CELERY_REMOTE_DEBUG" == "1" ]; then + # Set celery concurrency to 1 because thread processes is hard to debug + export MIN_CONCURRENCY=1 + export MAX_CONCURRENCY=1 +fi + +/entrypoint.sh \ No newline at end of file diff --git a/docker/celery/entrypoint.sh b/docker/celery/entrypoint.sh new file mode 100755 index 000000000..2aad31b78 --- /dev/null +++ b/docker/celery/entrypoint.sh @@ -0,0 +1,52 @@ +#!/bin/bash + +print_msg() { + printf "\r\n" + printf "========================================\r\n" + printf "$1\r\n" + printf "========================================\r\n\r\n" +} + +print_msg "Generate Django migrations files" +poetry run -C $HOME/ python3 manage.py makemigrations +print_msg "Migrate database" +poetry run -C $HOME/ python3 manage.py migrate +print_msg "Collect static files" +poetry run -C $HOME/ python3 manage.py collectstatic --no-input --clear + +# Load default engines, keywords, and external tools +print_msg "Load default engines" +poetry run -C $HOME/ python3 manage.py loaddata fixtures/default_scan_engines.yaml --app scanEngine.EngineType +print_msg "Load default keywords" +poetry run -C $HOME/ python3 manage.py loaddata fixtures/default_keywords.yaml --app scanEngine.InterestingLookupModel +print_msg "Load default external tools" +poetry run -C $HOME/ python3 manage.py loaddata fixtures/external_tools.yaml --app scanEngine.InstalledExternalTool + +if [ ! "$CELERY_LOGLEVEL" ]; then + export CELERY_LOGLEVEL='info' +fi + +print_msg "Start celery workers" +watchmedo auto-restart --recursive --pattern="*.py" --directory="/home/rengine/rengine/" -- poetry run -C $HOME/ celery -A reNgine.tasks worker --loglevel=$CELERY_LOGLEVEL --autoscale=$MAX_CONCURRENCY,$MIN_CONCURRENCY -Q main_scan_queue & +watchmedo auto-restart --recursive --pattern="*.py" --directory="/home/rengine/rengine/" -- poetry run -C $HOME/ celery -A reNgine.tasks worker --pool=gevent --concurrency=30 --loglevel=$CELERY_LOGLEVEL -Q initiate_scan_queue -n initiate_scan_worker & +watchmedo auto-restart --recursive --pattern="*.py" --directory="/home/rengine/rengine/" -- poetry run -C $HOME/ celery -A reNgine.tasks worker --pool=gevent --concurrency=30 --loglevel=$CELERY_LOGLEVEL -Q subscan_queue -n subscan_worker & +watchmedo auto-restart --recursive --pattern="*.py" --directory="/home/rengine/rengine/" -- poetry run -C $HOME/ celery -A reNgine.tasks worker --pool=gevent --concurrency=20 --loglevel=$CELERY_LOGLEVEL -Q report_queue -n report_worker & +watchmedo auto-restart --recursive --pattern="*.py" --directory="/home/rengine/rengine/" -- poetry run -C $HOME/ celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$CELERY_LOGLEVEL -Q send_notif_queue -n send_notif_worker & +watchmedo auto-restart --recursive --pattern="*.py" --directory="/home/rengine/rengine/" -- poetry run -C $HOME/ celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$CELERY_LOGLEVEL -Q send_scan_notif_queue -n send_scan_notif_worker & +watchmedo auto-restart --recursive --pattern="*.py" --directory="/home/rengine/rengine/" -- poetry run -C $HOME/ celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$CELERY_LOGLEVEL -Q send_task_notif_queue -n send_task_notif_worker & +watchmedo auto-restart --recursive --pattern="*.py" --directory="/home/rengine/rengine/" -- poetry run -C $HOME/ celery -A reNgine.tasks worker --pool=gevent --concurrency=5 --loglevel=$CELERY_LOGLEVEL -Q send_file_to_discord_queue -n send_file_to_discord_worker & +watchmedo auto-restart --recursive --pattern="*.py" --directory="/home/rengine/rengine/" -- poetry run -C $HOME/ celery -A reNgine.tasks worker --pool=gevent --concurrency=5 --loglevel=$CELERY_LOGLEVEL -Q send_hackerone_report_queue -n send_hackerone_report_worker & +watchmedo auto-restart --recursive --pattern="*.py" --directory="/home/rengine/rengine/" -- poetry run -C $HOME/ celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$CELERY_LOGLEVEL -Q parse_nmap_results_queue -n parse_nmap_results_worker & +watchmedo auto-restart --recursive --pattern="*.py" --directory="/home/rengine/rengine/" -- poetry run -C $HOME/ celery -A reNgine.tasks worker --pool=gevent --concurrency=20 --loglevel=$CELERY_LOGLEVEL -Q geo_localize_queue -n geo_localize_worker & +watchmedo auto-restart --recursive --pattern="*.py" --directory="/home/rengine/rengine/" -- poetry run -C $HOME/ celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$CELERY_LOGLEVEL -Q query_whois_queue -n query_whois_worker & +watchmedo auto-restart --recursive --pattern="*.py" --directory="/home/rengine/rengine/" -- poetry run -C $HOME/ celery -A reNgine.tasks worker --pool=gevent --concurrency=30 --loglevel=$CELERY_LOGLEVEL -Q remove_duplicate_endpoints_queue -n remove_duplicate_endpoints_worker & +watchmedo auto-restart --recursive --pattern="*.py" --directory="/home/rengine/rengine/" -- poetry run -C $HOME/ celery -A reNgine.tasks worker --pool=gevent --concurrency=50 --loglevel=$CELERY_LOGLEVEL -Q run_command_queue -n run_command_worker & +watchmedo auto-restart --recursive --pattern="*.py" --directory="/home/rengine/rengine/" -- poetry run -C $HOME/ celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$CELERY_LOGLEVEL -Q query_reverse_whois_queue -n query_reverse_whois_worker & +watchmedo auto-restart --recursive --pattern="*.py" --directory="/home/rengine/rengine/" -- poetry run -C $HOME/ celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$CELERY_LOGLEVEL -Q query_ip_history_queue -n query_ip_history_worker & +watchmedo auto-restart --recursive --pattern="*.py" --directory="/home/rengine/rengine/" -- poetry run -C $HOME/ celery -A reNgine.tasks worker --pool=gevent --concurrency=30 --loglevel=$CELERY_LOGLEVEL -Q gpt_queue -n gpt_worker & +watchmedo auto-restart --recursive --pattern="*.py" --directory="/home/rengine/rengine/" -- poetry run -C $HOME/ celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$CELERY_LOGLEVEL -Q dorking_queue -n dorking_worker & +watchmedo auto-restart --recursive --pattern="*.py" --directory="/home/rengine/rengine/" -- poetry run -C $HOME/ celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$CELERY_LOGLEVEL -Q osint_discovery_queue -n osint_discovery_worker & +watchmedo auto-restart --recursive --pattern="*.py" --directory="/home/rengine/rengine/" -- poetry run -C $HOME/ celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$CELERY_LOGLEVEL -Q h8mail_queue -n h8mail_worker & +watchmedo auto-restart --recursive --pattern="*.py" --directory="/home/rengine/rengine/" -- poetry run -C $HOME/ celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$CELERY_LOGLEVEL -Q theHarvester_queue -n theHarvester_worker + +exec "$@" \ No newline at end of file diff --git a/docker/celery/eyewitness-pyproject.toml b/docker/celery/eyewitness-pyproject.toml new file mode 100644 index 000000000..ceef5cb17 --- /dev/null +++ b/docker/celery/eyewitness-pyproject.toml @@ -0,0 +1,19 @@ +[tool.poetry] +name = "eyewitness" +version = "0.1.0" +description = "EyeWitness is designed to take screenshots of websites provide some server header info, and identify default credentials if known." +authors = ["RedSiege"] +license = "GPL-3.0" +readme = "README.md" + +[tool.poetry.dependencies] +python = ">3.9,<4" +fuzzywuzzy = "^0.18.0" +selenium = "4.9.1" +python-Levenshtein = "^0.25.1" +PyVirtualDisplay = "^3.0" +netaddr = "^1.2.1" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/docker/celery/infoga-pyproject.toml b/docker/celery/infoga-pyproject.toml new file mode 100644 index 000000000..a328ab8d3 --- /dev/null +++ b/docker/celery/infoga-pyproject.toml @@ -0,0 +1,18 @@ +[tool.poetry] +name = "infoga" +version = "0.1.0" +description = "Infoga is a tool gathering email accounts informations (ip,hostname,country,...) from different public source (search engines, pgp key servers and shodan) and check if emails was leaked using haveibeenpwned.com API." +authors = ["m4llOk "] +license = "GPL-3.0" +readme = "README.md" + +[tool.poetry.dependencies] +python = "^3.10" +colorama = "^0.4.6" +requests = "^2.31.0" +urllib3 = "^2.2.1" + + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" \ No newline at end of file diff --git a/docker/celery/oneforall-pyproject.toml b/docker/celery/oneforall-pyproject.toml new file mode 100644 index 000000000..55299d3e7 --- /dev/null +++ b/docker/celery/oneforall-pyproject.toml @@ -0,0 +1,37 @@ +[tool.poetry] +name = "oneforall" +version = "0.4.0" +description = "OneForAll is a powerful and robust subdomain enumeration tool." +license = "GPL-3.0-or-later" +authors = ["shmilylty "] +readme = "README.md" +repository = "https://github.com/shmilylty/OneForAll" + +[tool.poetry.dependencies] +python = ">3.8,<3.12" +beautifulsoup4 = "4.11.1" +bs4 = "0.0.1" +certifi = "2022.06.15" +chardet = "5.0.0" +colorama = "0.4.4" +dnspython = "2.2.1" +exrex = "0.10.5" +fire = "0.4.0" +future = "0.18.2" +idna = "3.3" +loguru = "0.6.0" +pysocks = "1.7.1" +requests = "2.28.1" +six = "1.16.0" +soupsieve = "2.3.2" +sqlalchemy = "1.3.22" +tenacity = "8.0.1" +termcolor = "1.1.0" +tqdm = "4.64.0" +treelib = "1.6.1" +urllib3 = "1.26.9" +win32-setctime = "1.1.0" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/docker/celery/pyproject.toml b/docker/celery/pyproject.toml new file mode 100644 index 000000000..1c3b78066 --- /dev/null +++ b/docker/celery/pyproject.toml @@ -0,0 +1,56 @@ +[tool.poetry] +name = "celery-rengine" +version = "0.1.0" +description = "" +authors = ["Talanor "] +package-mode = false + +[tool.poetry.dependencies] +python = "^3.10" +aiodns = "3.0.0" +argh = "0.26.2" +beautifulsoup4 = "4.9.3" +celery = "5.4.0" +debugpy = "1.8.5" +discord-webhook = "1.3.0" +django = "3.2.25" +django-ace = "1.32.4" +django-celery-beat = "2.6.0" +django-debug-toolbar = "4.3.0" +django-extensions = "3.2.3" +django-environ = "0.11.2" +django-login-required-middleware = "0.9.0" +django-role-permissions = "3.2.0" +django-mathfilters = "1.0.0" +django-timezone-field = "6.1.0" +djangorestframework = "3.14.0" +djangorestframework-datatables = "0.7.2" +dotted-dict = "1.1.3" +drf-yasg = "1.21.5" +gunicorn = "23.0.0" +gevent = "24.2.1" +humanize = "4.3.0" +langchain = "0.1.0" +markdown = "3.3.4" +metafinder = "1.2" +netaddr = "0.8.0" +netlas = "0.4.1" +openai = "0.28.0" +pyyaml = "6.0.1" +pysocks = "1.7.1" +psycopg2 = "2.9.7" +pycvesearch = "1.0" +redis = "5.0.3" +requests = "2.32.2" +scapy = "2.4.3" +tldextract = "3.5.0" +uro = "1.0.0" +validators = "0.18.2" +watchdog = "4.0.0" +weasyprint = "53.3" +whatportis = { git = "https://github.com/Security-Tools-Alliance/whatportis" } +xmltodict = "0.13.0" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" \ No newline at end of file diff --git a/certs/Dockerfile b/docker/certs/Dockerfile similarity index 95% rename from certs/Dockerfile rename to docker/certs/Dockerfile index 7bd93d433..4f71720d6 100644 --- a/certs/Dockerfile +++ b/docker/certs/Dockerfile @@ -10,7 +10,7 @@ RUN echo "[ v3_ca ]" >>/etc/ssl/openssl.cnf && \ echo "authorityKeyIdentifier = keyid:always,issuer:always" >>/etc/ssl/openssl.cnf && \ echo "keyUsage = critical, digitalSignature, cRLSign, keyCertSign" >>/etc/ssl/openssl.cnf -COPY entrypoint.sh / +COPY ./entrypoint.sh / RUN chmod +x /entrypoint.sh ENTRYPOINT ["/entrypoint.sh"] diff --git a/certs/entrypoint.sh b/docker/certs/entrypoint.sh similarity index 89% rename from certs/entrypoint.sh rename to docker/certs/entrypoint.sh index fded161ba..4d707f8e3 100755 --- a/certs/entrypoint.sh +++ b/docker/certs/entrypoint.sh @@ -19,10 +19,14 @@ cert() { -out ${FILENAME}.csr \ -subj "/C=${COUNTRY_CODE}/ST=${STATE}/L=${CITY}/O=${COMPANY}/CN=${COMMON_NAME}" + # Creating SAN extension which is needed by modern browsers + echo "subjectAltName=DNS:${COMMON_NAME}" > client-ext.cnf + # Create a new certificate using our own CA openssl x509 -req -sha256 -passin pass:${AUTHORITY_PASSWORD} -days 3650 \ -in ${FILENAME}.csr -CA ca.crt -CAkey ca.key \ - -out ${FILENAME}.crt + -out ${FILENAME}.crt \ + -extfile client-ext.cnf # Rename files and remove useless ones mv ${FILENAME}.crt ${FILENAME}.pem diff --git a/docker/docker-compose.build.yml b/docker/docker-compose.build.yml new file mode 100644 index 000000000..3af75bcbf --- /dev/null +++ b/docker/docker-compose.build.yml @@ -0,0 +1,18 @@ +services: + db: + build: ./postgres + + redis: + build: ./redis + + celery: + build: ./celery + + web: + build: ./web + + proxy: + build: ./proxy + + ollama: + build: ./ollama diff --git a/docker/docker-compose.dev.yml b/docker/docker-compose.dev.yml new file mode 100644 index 000000000..41fbb1590 --- /dev/null +++ b/docker/docker-compose.dev.yml @@ -0,0 +1,42 @@ +services: + db: + ports: + - "127.0.0.1:5432:5432" + + redis: + ports: + - "127.0.0.1:6379:6379" + + celery: + entrypoint: /entrypoint-dev.sh + environment: + - CELERY_DEBUG=1 + - CELERY_REMOTE_DEBUG=0 + - CELERY_REMOTE_DEBUG_PORT=5679 + volumes: + - ./celery/entrypoint-dev.sh:/entrypoint-dev.sh:ro + ports: + - "127.0.0.1:5679:5679" + + celery-beat: + entrypoint: /entrypoint-dev.sh + environment: + - CELERY_DEBUG=1 + volumes: + - ./beat/entrypoint-dev.sh:/entrypoint-dev.sh:ro + + web: + entrypoint: /entrypoint-dev.sh + environment: + - UI_DEBUG=1 + - UI_REMOTE_DEBUG=0 + - UI_REMOTE_DEBUG_PORT=5678 + volumes: + - ./web/entrypoint-dev.sh:/entrypoint-dev.sh:ro + ports: + - "127.0.0.1:8000:8000" + - "127.0.0.1:5678:5678" + + ollama: + ports: + - "127.0.0.1:11434:11434" diff --git a/docker/docker-compose.setup.yml b/docker/docker-compose.setup.yml new file mode 100644 index 000000000..62c2ef5d9 --- /dev/null +++ b/docker/docker-compose.setup.yml @@ -0,0 +1,7 @@ +services: + certs: + image: ghcr.io/security-tools-alliance/rengine-ng:rengine-certs-v${RENGINE_VERSION} + env_file: + - ../.env + volumes: + - ./secrets/certs:/certs:rw diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml new file mode 100644 index 000000000..fb139f64a --- /dev/null +++ b/docker/docker-compose.yml @@ -0,0 +1,200 @@ +name: rengine + +services: + db: + image: ghcr.io/security-tools-alliance/rengine-ng:rengine-postgres-v${RENGINE_VERSION} + env_file: + - ../.env + restart: unless-stopped + container_name: rengine-db-1 + volumes: + - postgres_data:/var/lib/postgresql/data/ + healthcheck: + test: ["CMD-SHELL", "pg_isready"] + interval: 10s + timeout: 5s + retries: 5 + networks: + - rengine_network + + redis: + image: ghcr.io/security-tools-alliance/rengine-ng:rengine-redis-v${RENGINE_VERSION} + env_file: + - ../.env + restart: unless-stopped + container_name: rengine-redis-1 + hostname: redis + healthcheck: + test: ["CMD", "redis-cli","ping"] + interval: 10s + timeout: 5s + retries: 5 + networks: + - rengine_network + + celery: + image: ghcr.io/security-tools-alliance/rengine-ng:rengine-celery-v${RENGINE_VERSION} + env_file: + - ../.env + user: rengine + restart: unless-stopped + container_name: rengine-celery-1 + entrypoint: /entrypoint.sh + environment: + - CELERY_BROKER=redis://redis:6379/0 + - CELERY_BACKEND=redis://redis:6379/0 + volumes: + - ../web:/home/rengine/rengine:rw,z + - ./celery/entrypoint.sh:/entrypoint.sh:ro + - scan_results:/home/rengine/scan_results + - tool_config:/home/rengine/.config + - nuclei_templates:/home/rengine/nuclei-templates + - gf_patterns:/home/rengine/.gf + - wordlist:/home/rengine/wordlists + healthcheck: + test: ["CMD", "poetry", "-C", "/home/rengine", "run", "celery","-A","reNgine","status"] + interval: 10s + timeout: 10s + retries: 60 + depends_on: + db: + condition: service_healthy + redis: + condition: service_healthy + networks: + - rengine_network + + celery-beat: + image: ghcr.io/security-tools-alliance/rengine-ng:rengine-celery-v${RENGINE_VERSION} + env_file: + - ../.env + user: rengine + restart: unless-stopped + container_name: rengine-celery-beat-1 + entrypoint: /entrypoint.sh + environment: + - CELERY_BROKER=redis://redis:6379/0 + - CELERY_BACKEND=redis://redis:6379/0 + depends_on: + celery: + condition: service_healthy + volumes: + - ../web:/home/rengine/rengine:rw,z + - ./beat/entrypoint.sh:/entrypoint.sh:ro + - scan_results:/home/rengine/scan_results + - tool_config:/home/rengine/.config + - nuclei_templates:/home/rengine/nuclei-templates + - gf_patterns:/home/rengine/.gf + - wordlist:/home/rengine/wordlists + networks: + - rengine_network + + web: + image: ghcr.io/security-tools-alliance/rengine-ng:rengine-web-v${RENGINE_VERSION} + env_file: + - ../.env + user: rengine + restart: unless-stopped + container_name: rengine-web-1 + entrypoint: /entrypoint.sh + environment: + - RENGINE_HOME=/home/rengine/rengine + - CELERY_BROKER=redis://redis:6379/0 + - CELERY_BACKEND=redis://redis:6379/0 + volumes: + - ../web:/home/rengine/rengine:rw,z + - ./web/entrypoint.sh:/entrypoint.sh:ro + - tool_config:/home/rengine/.config + - nuclei_templates:/home/rengine/nuclei-templates + - gf_patterns:/home/rengine/.gf + - wordlist:/home/rengine/wordlists + healthcheck: + test: ["CMD", "curl", "-f", "-k", "http://localhost:8000"] + interval: 10s + timeout: 10s + retries: 10 + depends_on: + db: + condition: service_healthy + celery: + condition: service_healthy + celery-beat: + condition: service_started + networks: + rengine_network: + aliases: + - rengine + + proxy: + image: ghcr.io/security-tools-alliance/rengine-ng:rengine-proxy-v${RENGINE_VERSION} + env_file: + - ../.env + restart: unless-stopped + container_name: rengine-proxy-1 + depends_on: + web: + condition: service_healthy + db: + condition: service_healthy + redis: + condition: service_healthy + secrets: + - source: proxy.ca + target: /etc/nginx/certs/rengine_chain.pem + - source: proxy.cert + target: /etc/nginx/certs/rengine.pem + - source: proxy.key + target: /etc/nginx/certs/rengine_rsa.key + volumes: + - ./proxy/config/rengine.conf:/etc/nginx/conf.d/rengine.conf:ro + - ../web:/home/rengine/rengine:rw,z + - scan_results:/home/rengine/scan_results + networks: + - rengine_network + ports: + - 8082:8082/tcp + - 443:443/tcp + + ollama: + image: ghcr.io/security-tools-alliance/rengine-ng:rengine-ollama-v${RENGINE_VERSION} + env_file: + - ../.env + restart: unless-stopped + container_name: rengine-ollama-1 + volumes: + - ollama_data:/home/rengine/.ollama:rw,z + networks: + - rengine_network + +networks: + rengine_network: + name: rengine_network + +volumes: + tool_config: + name: rengine_tool_config + postgres_data: + name: rengine_postgres_data + gf_patterns: + name: rengine_gf_patterns + nuclei_templates: + name: rengine_nuclei_templates + github_repos: + name: rengine_github_repos + wordlist: + name: rengine_wordlist + scan_results: + name: rengine_scan_results + static_volume: + name: rengine_static_volume + ollama_data: + name: rengine_ollama_data + + +secrets: + proxy.ca: + file: ./secrets/certs/rengine_chain.pem + proxy.key: + file: ./secrets/certs/rengine_rsa.key + proxy.cert: + file: ./secrets/certs/rengine.pem \ No newline at end of file diff --git a/docker/ollama/Dockerfile b/docker/ollama/Dockerfile new file mode 100644 index 000000000..f30cc3915 --- /dev/null +++ b/docker/ollama/Dockerfile @@ -0,0 +1 @@ +FROM ollama/ollama:0.3.6 \ No newline at end of file diff --git a/docker/postgres/Dockerfile b/docker/postgres/Dockerfile new file mode 100644 index 000000000..052d3595a --- /dev/null +++ b/docker/postgres/Dockerfile @@ -0,0 +1 @@ +FROM postgres:12.3-alpine \ No newline at end of file diff --git a/docker/proxy/Dockerfile b/docker/proxy/Dockerfile new file mode 100644 index 000000000..950908a9f --- /dev/null +++ b/docker/proxy/Dockerfile @@ -0,0 +1 @@ +FROM nginx:1.27.1-alpine3.20 \ No newline at end of file diff --git a/config/nginx/rengine.conf b/docker/proxy/config/rengine.conf similarity index 91% rename from config/nginx/rengine.conf rename to docker/proxy/config/rengine.conf index bce3b68be..263c0565a 100644 --- a/config/nginx/rengine.conf +++ b/docker/proxy/config/rengine.conf @@ -7,8 +7,9 @@ server { server { - listen 443 ssl http2; - listen [::]:443 ssl http2; + listen 443 ssl; + listen [::]:443 ssl; + http2 on; server_name rengine recon; charset utf-8; @@ -25,15 +26,15 @@ server { proxy_set_header X-Real-IP $remote_addr; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-Proto $scheme; - proxy_pass http://rengine:8000/; + proxy_pass http://web:8000/; } location /staticfiles/ { - alias /usr/src/app/staticfiles/; + alias /home/rengine/rengine/staticfiles/; } location /media/ { - alias /usr/src/scan_results/; + alias /home/rengine/scan_results/; } ssl_protocols TLSv1.2; diff --git a/docker/redis/Dockerfile b/docker/redis/Dockerfile new file mode 100644 index 000000000..12c2f3d47 --- /dev/null +++ b/docker/redis/Dockerfile @@ -0,0 +1 @@ +FROM redis:7.4.0-alpine3.20 \ No newline at end of file diff --git a/docker/web/Dockerfile b/docker/web/Dockerfile new file mode 100644 index 000000000..dfc6d437f --- /dev/null +++ b/docker/web/Dockerfile @@ -0,0 +1,35 @@ +FROM --platform=$BUILDPLATFORM python:3.10-alpine + +# Labels and Credits +LABEL \ + name="reNgine-ng" \ + author="Security-Tools-Alliance (https://github.com/Security-Tools-Alliance) & Yogesh Ojha " \ + description="reNgine-ng is a automated pipeline of recon process, useful for information gathering during web application penetration testing." + +# Environment Variables +ENV DEBIAN_FRONTEND="noninteractive" \ + DATABASE="postgres" +ENV USERNAME="rengine" + +RUN apk add --no-cache bash postgresql-libs curl fontconfig ttf-freefont font-noto terminus-font net-tools htop vim && \ + apk add --no-cache py3-pip gcc musl-dev python3-dev pango zlib-dev jpeg-dev openjpeg-dev g++ libffi-dev && \ + apk add --no-cache --virtual .build-deps gcc python3-dev musl-dev postgresql-dev && \ + fc-cache -f && \ + fc-list | sort && \ + addgroup --gid 1000 -S $USERNAME && \ + adduser -g 1000 -u 1000 -S --shell /bin/bash --ingroup $USERNAME $USERNAME + +USER $USERNAME +ENV PATH=/home/$USERNAME/.local/bin:${PATH} + +COPY --chown=$USERNAME:$USERNAME ./pyproject.toml /home/$USERNAME/pyproject.toml +WORKDIR /home/$USERNAME +RUN echo 'PATH="$HOME/.local/bin:${PATH}"' >> $HOME/.bashrc && \ + pip install pipx && pipx install poetry && poetry install + +EXPOSE 8000 +USER $USERNAME +RUN mkdir -p /home/$USERNAME/rengine +VOLUME /home/$USERNAME/rengine +WORKDIR /home/$USERNAME/rengine +ENTRYPOINT ["/entrypoint.sh"] \ No newline at end of file diff --git a/docker/web/entrypoint-dev.sh b/docker/web/entrypoint-dev.sh new file mode 100755 index 000000000..e9d96e00f --- /dev/null +++ b/docker/web/entrypoint-dev.sh @@ -0,0 +1,24 @@ +#!/bin/bash + +print_msg() { + printf "\r\n" + printf "========================================\r\n" + printf "$1\r\n" + printf "========================================\r\n\r\n" +} + +print_msg "Generate Django migrations files" +poetry run -C $HOME/ python3 manage.py makemigrations + +print_msg "Migrate database" +poetry run -C $HOME/ python3 manage.py migrate + +# Collect static files for development +print_msg "Collect static files" +poetry run -C $HOME/ python3 manage.py collectstatic --noinput + +# Run development server +print_msg "Launching Django development Web server" +poetry run -C $HOME/ python3 manage.py runserver 0.0.0.0:8000 + +exec "$@" diff --git a/docker/web/entrypoint.sh b/docker/web/entrypoint.sh new file mode 100755 index 000000000..8721143fc --- /dev/null +++ b/docker/web/entrypoint.sh @@ -0,0 +1,9 @@ +#!/bin/bash + +# Collect static files +poetry run -C $HOME/ python3 manage.py collectstatic --noinput + +# Run production server +poetry run -C $HOME/ gunicorn reNgine.wsgi:application -w 8 --bind 0.0.0.0:8000 --limit-request-line 0 + +exec "$@" \ No newline at end of file diff --git a/docker/web/pyproject.toml b/docker/web/pyproject.toml new file mode 100644 index 000000000..4b78c33c3 --- /dev/null +++ b/docker/web/pyproject.toml @@ -0,0 +1,56 @@ +[tool.poetry] +name = "celery-rengine" +version = "0.1.0" +description = "" +authors = ["Talanor "] +package-mode = false + +[tool.poetry.dependencies] +python = "^3.10" +aiodns = "3.0.0" +argh = "0.26.2" +beautifulsoup4 = "4.9.3" +celery = "5.4.0" +debugpy = "1.8.5" +discord-webhook = "1.3.0" +django = "3.2.25" +django-ace = "1.32.4" +django-celery-beat = "2.6.0" +django-debug-toolbar = "4.3.0" +django-environ = "0.11.2" +django-extensions = "3.2.3" +django-login-required-middleware = "0.9.0" +django-role-permissions = "3.2.0" +django-mathfilters = "1.0.0" +django-timezone-field = "6.1.0" +djangorestframework = "3.14.0" +djangorestframework-datatables = "0.7.2" +dotted-dict = "1.1.3" +drf-yasg = "1.21.5" +gunicorn = "23.0.0" +gevent = "24.2.1" +humanize = "4.3.0" +langchain = "0.1.0" +markdown = "3.3.4" +metafinder = "1.2" +netaddr = "0.8.0" +netlas = "0.4.1" +openai = "0.28.0" +pyyaml = "6.0.1" +pysocks = "1.7.1" +psycopg2 = "2.9.7" +pycvesearch = "1.0" +redis = "5.0.3" +requests = "2.32.2" +scapy = "2.4.3" +tldextract = "3.5.0" +uro = "1.0.0" +validators = "0.18.2" +watchdog = "4.0.0" +weasyprint = "53.3" +whatportis = { git = "https://github.com/Security-Tools-Alliance/whatportis" } +xmltodict = "0.13.0" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" \ No newline at end of file diff --git a/install.sh b/install.sh index 8136560e6..1f406e8a7 100755 --- a/install.sh +++ b/install.sh @@ -1,19 +1,9 @@ #!/bin/bash -# Define color codes. -# Using `tput setaf` at some places because the variable only works with log/echo - -COLOR_BLACK=0 -COLOR_RED=1 -COLOR_GREEN=2 -COLOR_YELLOW=3 -COLOR_BLUE=4 -COLOR_MAGENTA=5 -COLOR_CYAN=6 -COLOR_WHITE=7 -COLOR_DEFAULT=$COLOR_WHITE # Use white as default for clarity - -# Fetch the internal and external IP address so that it can be printed later when the script has finished installing reNgine-ng +# Import common functions +source "$(pwd)/scripts/common_functions.sh" # Open the file if you want to know the meaning of each color + +# Fetch the internal and external IP address external_ip=$(curl -s https://ipecho.net/plain) internal_ips=$(ip -4 -br addr | awk '$2 == "UP" {print $3} /^lo/ {print $3}' | cut -d'/' -f1) formatted_ips="" @@ -21,134 +11,294 @@ for ip in $internal_ips; do formatted_ips="${formatted_ips}https://$ip\n" done -# Log messages in different colors -log() { - local color=${2:-$COLOR_DEFAULT} # Use default color if $2 is not set - if [ "$color" -ne $COLOR_DEFAULT ]; then - tput setaf "$color" +# Check Docker installation +check_docker_installation() { + while true; do + log "Docker is not installed. You have two options for installation:" $COLOR_CYAN + log "1) Docker Desktop: A user-friendly application with a GUI, suitable for developers. It includes Docker Engine, Docker CLI, Docker Compose, and other tools." $COLOR_GREEN + log "2) Docker Engine: A lightweight, command-line interface suitable for servers and advanced users. It's the core of Docker without additional GUI tools." $COLOR_GREEN + + read -p "Enter your choice (1 or 2): " docker_choice + + case $docker_choice in + 1) + log "Please install Docker Desktop from: https://docs.docker.com/desktop/" $COLOR_YELLOW + break + ;; + 2) + log "Please install Docker Engine from: https://docs.docker.com/engine/install/" $COLOR_YELLOW + break + ;; + *) + log "Invalid choice. Please choose 1 or 2." $COLOR_RED + ;; + esac + done + + log "After installation, please restart this script." $COLOR_CYAN + exit 1 +} + +# Check Docker version and status +check_docker() { + local min_version="20.10.0" + log "Checking Docker installation (minimum required version: $min_version)..." $COLOR_CYAN + + if ! command -v docker &> /dev/null; then + check_docker_installation + fi + + if ! DOCKER_ERROR=$(docker info 2>&1); then + echo "Docker check failed: ${DOCKER_ERROR}" + log "Docker is not running. Please start Docker and try again." $COLOR_RED + log "You can start Docker using: sudo systemctl start docker (on most Linux systems)" $COLOR_YELLOW + exit 1 + fi + + local version=$(docker version --format '{{.Server.Version}}') + + if ! [[ "$(printf '%s\n' "$min_version" "$version" | sort -V | head -n1)" = "$min_version" ]]; then + log "Docker version $version is installed, but reNgine-ng requires version $min_version or higher." $COLOR_RED + log "Please upgrade Docker to continue. Visit https://docs.docker.com/engine/install/ for installation instructions." $COLOR_YELLOW + exit 1 fi - printf "$1\r\n" - tput sgr0 # Reset text color + + log "Docker version $version is installed and running." $COLOR_GREEN + log "It's recommended to use the latest version of Docker. Check https://docs.docker.com/engine/release-notes/ for updates." $COLOR_YELLOW } -# Check for root privileges -if [ "$(whoami)" != "root" ] - then - log "" - log "Error installing reNgine-ng: please run this script as root!" $COLOR_RED - log "Example: sudo ./install.sh" $COLOR_RED - exit -fi +# Check Docker Compose version and set the appropriate command +check_docker_compose() { + local min_version="2.2.0" + log "Checking Docker Compose installation (minimum required version: $min_version)..." $COLOR_CYAN -cat web/art/reNgine.txt - -log "\r\nBefore running this script, please make sure Docker is running and you have made changes to the '.env' file." $COLOR_RED -log "Changing the PostgreSQL username & password in the '.env' is highly recommended.\r\n" $COLOR_RED - -log "Please note that this installation script is only intended for Linux" $COLOR_RED -log "Only x86_64 platform are supported" $COLOR_RED - -log "" -tput setaf 1; -read -p "Are you sure you made changes to the '.env' file (y/n)? " answer -case ${answer:0:1} in - y|Y|yes|YES|Yes ) - log "\nContinuing installation!\n" $COLOR_GREEN - ;; - * ) - if ! command -v nano &> /dev/null; then - . /etc/os-release - case "$ID" in - ubuntu|debian) sudo apt update && sudo apt install -y nano ;; - fedora) sudo dnf install -y nano ;; - centos|rhel) sudo yum install -y nano ;; - arch) sudo pacman -Sy nano ;; - opensuse|suse) sudo zypper install -y nano ;; - *) log "Unsupported Linux distribution. Please install nano manually." $COLOR_RED; exit 1 ;; - esac - [ $? -eq 0 ] && log "nano installed!" $COLOR_GREEN || { log "Failed to install nano." $COLOR_RED; exit 1; } - else - log "nano already installed, skipping." $COLOR_GREEN - fi - nano .env - ;; -esac - -log "Installing reNgine-ng and its dependencies..." $COLOR_CYAN - -log "Installing curl..." $COLOR_CYAN - -if ! command -v curl &> /dev/null; then - . /etc/os-release - case "$ID" in - ubuntu|debian) sudo apt update && sudo apt install -y curl ;; - fedora) sudo dnf install -y curl ;; - centos|rhel) sudo yum install -y curl ;; - arch) sudo pacman -Sy curl ;; - opensuse|suse) sudo zypper install -y curl ;; - *) log "Unsupported Linux distribution. Please install curl manually." $COLOR_RED; exit 1 ;; - esac - [ $? -eq 0 ] && log "CURL installed!" $COLOR_GREEN || { log "Failed to install CURL." $COLOR_RED; exit 1; } -else - log "CURL already installed, skipping." $COLOR_GREEN -fi + if command -v docker &> /dev/null && docker compose version &> /dev/null; then + DOCKER_COMPOSE="docker compose" + elif command -v docker-compose &> /dev/null; then + DOCKER_COMPOSE="docker-compose" + else + if docker compose version 2>&1 | grep -q "is not a docker command"; then + log "Docker Compose is not installed. Please install Docker Compose v$min_version or later from https://docs.docker.com/compose/install/" $COLOR_RED + log "After installation, please restart this script." $COLOR_CYAN + exit 1 + else + log "An unexpected error occurred while checking for Docker Compose. Please ensure Docker and Docker Compose are correctly installed." $COLOR_RED + exit 1 + fi + fi + + local version=$($DOCKER_COMPOSE version --short) + + if ! [[ "$(printf '%s\n' "$min_version" "$version" | sort -V | head -n1)" = "$min_version" ]]; then + log "Docker Compose version $version is installed, but reNgine-ng requires version $min_version or higher." $COLOR_RED + log "Please upgrade Docker Compose to continue. Visit https://docs.docker.com/compose/install/ for installation instructions." $COLOR_YELLOW + log "After upgrade, please restart this script." $COLOR_CYAN + exit 1 + fi + + log "Using Docker Compose command: $DOCKER_COMPOSE (version $version)" $COLOR_GREEN + log "It's recommended to use the latest version of Docker Compose. Check https://docs.docker.com/compose/release-notes/ for updates." $COLOR_YELLOW + export DOCKER_COMPOSE +} + +# Generic function to install a package +install_package() { + local package_name="$1" + log "Installing $package_name..." $COLOR_CYAN + if ! command -v "$package_name" &> /dev/null; then + . /etc/os-release + DISTRO_FAMILY="${ID_LIKE:-$ID}" + case "$DISTRO_FAMILY" in + *debian*) sudo apt update && sudo apt install -y "$package_name" ;; + *fedora*|*centos*|*rhel*) sudo dnf install -y "$package_name" ;; + *arch*) sudo pacman -Sy "$package_name" ;; + *suse*|*opensuse*) sudo zypper install -y "$package_name" ;; + *) log "Unsupported Linux distribution: $DISTRO_FAMILY. Please install $package_name manually." $COLOR_RED; return 1 ;; + esac + if [ $? -eq 0 ]; then + log "$package_name installed successfully!" $COLOR_GREEN + else + log "Failed to install $package_name. Please check your internet connection and try again." $COLOR_RED + log "If the problem persists, try installing $package_name manually." $COLOR_YELLOW + return 1 + fi + else + log "$package_name is already installed, skipping." $COLOR_GREEN + fi +} + +# Install nano text editor +install_nano() { + install_package "nano" +} + +# Install curl for downloading files +install_curl() { + install_package "curl" +} + +# Install make for building projects +install_make() { + install_package "make" +} -log "Installing Docker..." $COLOR_CYAN -if ! command -v docker 2> /dev/null; then - curl -fsSL https://get.docker.com -o get-docker.sh && sh get-docker.sh - log "Docker installed!" $COLOR_GREEN -else - log "Docker already installed, skipping." $COLOR_GREEN + +# Check for root privileges +if [ $EUID -eq 0 ]; then + if [ "$SUDO_USER" = "root" ] || [ "$SUDO_USER" = "" ]; then + log "Error: Do not run this script as root user. Use 'sudo' with a non-root user." $COLOR_RED + log "Example: 'sudo ./install.sh'" $COLOR_RED + exit 1 + fi fi -log "Installing Docker Compose..." $COLOR_CYAN -if ! command -v docker compose 2> /dev/null; then - curl -L "https://github.com/docker/compose/releases/latest/download/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose - chmod +x /usr/local/bin/docker-compose - ln -s /usr/local/bin/docker-compose /usr/bin/docker-compose - log "Docker Compose installed!" $COLOR_GREEN -else - log "Docker Compose already installed, skipping." $COLOR_GREEN +# Check if the script is run with sudo +if [ -z "$SUDO_USER" ]; then + log "Error: This script must be run with sudo." $COLOR_RED + log "Example: 'sudo ./install.sh'" $COLOR_RED + exit 1 fi -if ! command -v make &> /dev/null; then - . /etc/os-release - case "$ID" in - ubuntu|debian) sudo apt update && sudo apt install -y make ;; - fedora) sudo dnf install -y make ;; - centos|rhel) sudo yum install -y make ;; - arch) sudo pacman -Sy make ;; - opensuse|suse) sudo zypper install -y make ;; - *) log "Unsupported Linux distribution. Please install make manually." $COLOR_RED; exit 1 ;; - esac - [ $? -eq 0 ] && log "make installed!" $COLOR_GREEN || { log "Failed to install make." $COLOR_RED; exit 1; } -else - log "make already installed, skipping." $COLOR_GREEN +# Check that the project directory is not owned by root +project_dir=$(pwd) +if [ "$(stat -c '%U' $project_dir)" = "root" ]; then + log "The project directory is owned by root. Changing ownership..." $COLOR_YELLOW + sudo chown -R $SUDO_USER:$SUDO_USER $project_dir + if [ $? -eq 0 ]; then + log "Project directory ownership successfully changed." $COLOR_GREEN + else + log "Failed to change project directory ownership." $COLOR_RED + exit 1 + fi fi -log "Checking Docker status..." $COLOR_CYAN -if docker info >/dev/null 2>&1; then - log "Docker is running." $COLOR_GREEN -else - log "Docker is not running. Please run Docker and try again." $COLOR_RED - log "You can run Docker service using: sudo systemctl start docker" $COLOR_RED +usageFunction() +{ + log "Usage: $0 (-n) (-h)" $COLOR_GREEN + log "\t-n Non-interactive installation (Optional)" $COLOR_GREEN + log "\t-h Show usage" $COLOR_GREEN exit 1 -fi +} + +# Main installation process +main() { + cat web/art/reNgine.txt + + log "\r\nBefore running this script, please make sure Docker is installed and running, and you have made changes to the '.env' file." $COLOR_RED + log "Changing the PostgreSQL username & password in the '.env' is highly recommended.\r\n" $COLOR_RED + + log "Please note that this installation script is only intended for Linux" $COLOR_RED + log "x86_64 and arm64 platform (compatible with Apple Mx series) are supported" $COLOR_RED + + log "Raspberry Pi is not recommended, all install tests have failed" $COLOR_RED + log "" + tput setaf 1; + + isNonInteractive=false + while getopts nh opt; do + case $opt in + n) isNonInteractive=true ;; + h) usageFunction ;; + ?) usageFunction ;; + esac + done + + if [ $isNonInteractive = false ]; then + read -p "Are you sure you made changes to the '.env' file (y/n)? " answer + case ${answer:0:1} in + y|Y|yes|YES|Yes ) + log "\nContinuing installation!\n" $COLOR_GREEN + ;; + * ) + install_nano + nano .env + ;; + esac + + log "Checking and installing reNgine-ng prerequisites..." $COLOR_CYAN + + install_curl + install_make + check_docker + check_docker_compose -log "Installing reNgine-ng, please be patient as it could take a while..." $COLOR_CYAN -sleep 5 + log "Do you want to build Docker images from source or use pre-built images (recommended)? \nThis saves significant build time but requires good download speeds for it to complete fast." $COLOR_RED + log "1) From source" $COLOR_YELLOW + log "2) Use pre-built images (default)" $COLOR_YELLOW + read -p "Enter your choice (1 or 2, default is 2): " choice -log "Generating certificates and building Docker images..." $COLOR_CYAN -make certs && make build && log "reNgine-ng is built" $COLOR_GREEN || { log "reNgine-ng installation failed!" $COLOR_RED; exit 1; } + case $choice in + 1) + INSTALL_TYPE="source" + ;; + 2|"") + INSTALL_TYPE="prebuilt" + ;; + *) + log "Invalid choice. Defaulting to pre-built images." $COLOR_RED + INSTALL_TYPE="prebuilt" + ;; + esac -log "Docker containers starting, please wait as Celery container could take a while..." $COLOR_CYAN -sleep 5 -make up && log "reNgine-ng is installed!" $COLOR_GREEN || { log "reNgine-ng installation failed!" $COLOR_RED; exit 1; } + log "Selected installation type: $INSTALL_TYPE" $COLOR_CYAN + fi + + # Non-interactive install + if [ $isNonInteractive = true ]; then + # Load and verify .env file + if [ -f .env ]; then + export $(grep -v '^#' .env | xargs) + else + log "Error: .env file not found, copy/paste the .env-dist file to .env and edit it" $COLOR_RED + exit 1 + fi + + if [ -z "$DJANGO_SUPERUSER_USERNAME" ] || [ -z "$DJANGO_SUPERUSER_EMAIL" ] || [ -z "$DJANGO_SUPERUSER_PASSWORD" ]; then + log "Error: DJANGO_SUPERUSER_USERNAME, DJANGO_SUPERUSER_EMAIL, and DJANGO_SUPERUSER_PASSWORD must be set in .env for non-interactive installation" $COLOR_RED + exit 1 + fi + + INSTALL_TYPE=${INSTALL_TYPE:-prebuilt} + log "Non-interactive installation parameter set. Installation begins." $COLOR_GREEN + fi + + if [ -z "$INSTALL_TYPE" ]; then + log "Error: INSTALL_TYPE is not set" $COLOR_RED + exit 1 + elif [ "$INSTALL_TYPE" != "prebuilt" ] && [ "$INSTALL_TYPE" != "source" ]; then + log "Error: INSTALL_TYPE must be either 'prebuilt' or 'source'" $COLOR_RED + exit 1 + fi + + log "Installing reNgine-ng from $INSTALL_TYPE, please be patient as the installation could take a while..." $COLOR_CYAN + sleep 5 -log "Creating an account..." $COLOR_CYAN -make username + log "Generating certificates..." $COLOR_CYAN + make certs && log "Certificates have been generated" $COLOR_GREEN || { log "Certificate generation failed!" $COLOR_RED; exit 1; } -log "\r\nThank you for installing reNgine-ng, happy recon!" $COLOR_GREEN + if [ "$INSTALL_TYPE" = "source" ]; then + log "Building Docker images..." $COLOR_CYAN + make build && log "Docker images have been built" $COLOR_GREEN || { log "Docker images build failed!" $COLOR_RED; exit 1; } + fi + + if [ "$INSTALL_TYPE" = "prebuilt" ]; then + log "Pulling pre-built Docker images..." $COLOR_CYAN + make pull && log "Docker images have been pulled" $COLOR_GREEN || { log "Docker images pull failed!" $COLOR_RED; exit 1; } + fi + + log "Docker containers starting, please wait as starting the Celery container could take a while..." $COLOR_CYAN + sleep 5 + make up && log "reNgine-ng is started!" $COLOR_GREEN || { log "reNgine-ng start failed!" $COLOR_RED; exit 1; } + + log "Creating an account..." $COLOR_CYAN + make superuser_create isNonInteractive=$isNonInteractive + + log "reNgine-ng is successfully installed and started!" $COLOR_GREEN + log "\r\nThank you for installing reNgine-ng, happy recon!" $COLOR_GREEN + + log "\r\nIn case you're running this locally, reNgine-ng should be available at one of the following IPs:\n$formatted_ips" $COLOR_GREEN + log "In case you're running this on a server, reNgine-ng should be available at: https://$external_ip/" $COLOR_GREEN +} -log "\r\nIn case you're running this locally, reNgine-ng should be available at one of the following IPs:\n$formatted_ips" $COLOR_GREEN -log "In case you're running this on a server, reNgine-ng should be available at: https://$external_ip/" $COLOR_GREEN +# Run the main installation process +main diff --git a/make.bat b/make.bat deleted file mode 100644 index b4d055ce3..000000000 --- a/make.bat +++ /dev/null @@ -1,35 +0,0 @@ -@echo off - -:: Credits: https://github.com/ninjhacks - -set COMPOSE_ALL_FILES = -f docker-compose.yml -set SERVICES = db web proxy redis celery celery-beat - -:: Generate certificates. -if "%1" == "certs" docker compose -f docker-compose.setup.yml run --rm certs -:: Generate certificates. -if "%1" == "setup" docker compose -f docker-compose.setup.yml run --rm certs -:: Build and start all services. -if "%1" == "up" docker compose %COMPOSE_ALL_FILES% up -d --build %SERVICES% -:: Build all services. -if "%1" == "build" docker compose %COMPOSE_ALL_FILES% build %SERVICES% -:: Generate Username (Use only after make up). -if "%1" == "username" docker compose %COMPOSE_ALL_FILES% exec web python3 manage.py createsuperuser -:: Pull Docker images. -if "%1" == "pull" docker login docker.pkg.github.com & docker compose %COMPOSE_ALL_FILES% pull -:: Down all services. -if "%1" == "down" docker compose %COMPOSE_ALL_FILES% down -:: Stop all services. -if "%1" == "stop" docker compose %COMPOSE_ALL_FILES% stop %SERVICES% -:: Restart all services. -if "%1" == "restart" docker compose %COMPOSE_ALL_FILES% restart %SERVICES% -:: Remove all services containers. -if "%1" == "rm" docker compose %COMPOSE_ALL_FILES% rm -f %SERVICES% -:: Tail all logs with -n 1000. -if "%1" == "logs" docker compose %COMPOSE_ALL_FILES% logs --follow --tail=1000 %SERVICES% -:: Show all Docker images. -if "%1" == "images" docker compose %COMPOSE_ALL_FILES% images %SERVICES% -:: Remove containers and delete volume data. -if "%1" == "prune" docker compose %COMPOSE_ALL_FILES% stop %SERVICES% & docker compose %COMPOSE_ALL_FILES% rm -f %SERVICES% & docker volume prune -f -:: Show this help. -if "%1" == "help" @echo Make application Docker images and manage containers using Docker Compose files only for Windows. diff --git a/scripts/common_functions.sh b/scripts/common_functions.sh new file mode 100644 index 000000000..a3d4566f7 --- /dev/null +++ b/scripts/common_functions.sh @@ -0,0 +1,22 @@ +#!/bin/bash + +# Define color codes. +COLOR_BLACK=0 +COLOR_RED=1 # For errors and important messages +COLOR_GREEN=2 # For succesful output/messages +COLOR_YELLOW=3 # For questions and choices +COLOR_BLUE=4 +COLOR_MAGENTA=5 +COLOR_CYAN=6 # For actions that are being executed +COLOR_WHITE=7 # Default, we don't really use this explicitly +COLOR_DEFAULT=$COLOR_WHITE # Use white as default for clarity + +# Log messages in different colors +log() { + local color=${2:-$COLOR_DEFAULT} # Use default color if $2 is not set + if [ "$color" -ne $COLOR_DEFAULT ]; then + tput setaf "$color" + fi + printf "$1\r\n" + tput sgr0 # Reset text color +} diff --git a/scripts/run_tests.sh b/scripts/run_tests.sh new file mode 100755 index 000000000..e5ff4fe09 --- /dev/null +++ b/scripts/run_tests.sh @@ -0,0 +1,529 @@ +#!/bin/bash + +# Exit on any error +set -e + +# Import common functions +source "$(pwd)/common_functions.sh" + +# Function to determine host architecture +get_host_architecture() { + local arch=$(uname -m) + case $arch in + x86_64) + echo "amd64" + ;; + aarch64) + echo "arm64" + ;; + *) + echo "Unsupported architecture: $arch" >&2 + exit 1 + ;; + esac +} + +# Function to display help message +show_help() { + echo "Usage: $0 [--arch ] [--clean-temp] [--clean-all] [--without-build] [test1] [test2] ..." + echo + echo "Run tests for the reNgine-ng project in a VM environment." + echo + echo "Mandatory arguments:" + echo " branch_name The Git branch to test" + echo " test_file The test file to run" + echo + echo "Optional arguments:" + echo " --arch Specify the architecture (amd64 or arm64). If not specified, uses host architecture." + echo " --clean-temp Clean temporary files and VM without prompting" + echo " --clean-all Clean temporary files, VM, and installed packages without prompting" + echo " --without-build Run all tests except the build test" + echo " test1 test2 ... Specific tests to run from the test file" + echo + echo "Examples:" + echo " $0 # Run all tests on host architecture" + echo " $0 --arch amd64 # Run all tests on amd64 architecture" + echo " $0 --arch arm64 feature-branch # Run tests on arm64 for feature-branch" + echo " $0 --arch amd64 master makefile certs pull # Run specific tests on amd64" + echo " $0 --clean-temp # Clean temporary files and VM without prompting" + echo " $0 --clean-all # Clean temporary files, VM, and installed packages without prompting" + echo " $0 --without-build # Run all tests except the build test" + echo + echo "The script will create a VM for the specified architecture, set up the environment, and run the specified tests." +} + +# Get host architecture +HOST_ARCH=$(get_host_architecture) + +# Initialize cleanup variables +CLEAN_TEMP=false +CLEAN_ALL=false + +# Parse command line arguments +ARCH="" +WITHOUT_BUILD=false +while [[ $# -gt 0 ]]; do + case $1 in + --arch) + ARCH="$2" + shift 2 + ;; + --clean-temp) + CLEAN_TEMP=true + shift + ;; + --clean-all) + CLEAN_ALL=true + shift + ;; + --without-build) + WITHOUT_BUILD=true + shift + ;; + -h|--help) + show_help + exit 0 + ;; + *) + break + ;; + esac +done + +# If architecture is not specified, use host architecture +if [ -z "$ARCH" ]; then + ARCH="$HOST_ARCH" + log "Architecture not specified. Using host architecture: $ARCH" $COLOR_YELLOW +fi + +# Validate architecture +if [ "$ARCH" != "amd64" ] && [ "$ARCH" != "arm64" ]; then + log "Error: Invalid architecture. Must be either amd64 or arm64." $COLOR_RED + exit 1 +fi + +# Function to check if a branch exists +branch_exists() { + git ls-remote --exit-code --heads origin "$1" &>/dev/null +} + +# Set default branch +DEFAULT_BRANCH="master" + +# VM parameters +VM_NAME="test-rengine-ng" +VM_IMAGE="test-debian.qcow2" +VM_RAM="8G" +VM_CPUS="8" +VM_DISK_SIZE="30G" # Adjust this value as needed + +# SSH parameters +SSH_OPTIONS="-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null" + +# Rengine root directory inside the VM +RENGINE_ROOT='~/rengine' + +# Check if mandatory arguments are provided +if [ $# -lt 2 ]; then + log "Error: branch_name and test_file are mandatory parameters." $COLOR_RED + show_help + exit 1 +fi + +# Extract branch_name and test_file from arguments +RELEASE_VERSION="$1" +TEST_FILE="$2" +shift 2 + +# Check if the branch exists +if ! branch_exists "$RELEASE_VERSION"; then + log "Error: Branch $RELEASE_VERSION does not exist." $COLOR_RED + exit 1 +fi + +# Extract test names from remaining arguments +TEST_NAMES="$@" + +# Function to generate test names +generate_test_names() { + local names="" + for name in $TEST_NAMES; do + names+="test_$name " + done + echo $names +} + +# Generate the test names +FORMATTED_TEST_NAMES=$(generate_test_names) + +# Create log directory if it doesn't exist +LOG_DIR="$(pwd)/../logs/tests" +mkdir -p "$LOG_DIR" + +# Generate a unique log file name +TIMESTAMP=$(date +"%Y%m%d_%H%M%S") +LOG_FILE="${LOG_DIR}/test_${TEST_FILE}_log_${TIMESTAMP}.txt" + +# When you're ready to use RELEASE_VERSION: +log "Checking out branch: $RELEASE_VERSION" $COLOR_CYAN + +# Function to check if a command exists +command_exists() { + command -v "$1" >/dev/null 2>&1 +} + +# Install QEMU if not already installed +INSTALLED_PACKAGES_FOR_TESTS="qemu-system-x86 qemu-system-arm qemu-utils cloud-image-utils" +INSTALLED_COMMON_PACKAGES="socat wget openssh-client tar gzip git curl gpg coreutils" + +# Create a temporary directory for the test +TEMP_DIR=$HOME/tmp +mkdir -p $TEMP_DIR +TEST_DIR=$(mktemp -d -p $TEMP_DIR) + +# Function to clean up resources +cleanup() { + local clean_temp=false + local clean_packages=false + + if [ "$CLEAN_TEMP" = true ] || [ "$CLEAN_ALL" = true ]; then + clean_temp=true + fi + + if [ "$CLEAN_ALL" = true ]; then + clean_packages=true + fi + + if [ "$CLEAN_TEMP" = false ] && [ "$CLEAN_ALL" = false ]; then + echo -e "\n\033[1;33mCleanup Confirmation\033[0m" + read -p "Do you want to remove temporary files and VM? (y/n): " temp_response + if [[ "$temp_response" == "y" ]]; then + clean_temp=true + fi + + read -p $'Do you want to uninstall the packages installed for testing? +Installed packages for testing: ('"$INSTALLED_PACKAGES_FOR_TESTS"$') +Installed common packages: ('"$INSTALLED_COMMON_PACKAGES"$') +Only installed packages for testing will be removed, common packages will be left untouched. +You may consider removing these packages by hand. +Type your answer (y/n): ' packages_response + + if [[ "$packages_response" == "y" ]]; then + clean_packages=true + fi + fi + + if [ "$clean_temp" = true ]; then + log "Cleaning up temporary files and VM..." $COLOR_CYAN + # Send powerdown command to QEMU monitor + echo "system_powerdown" | sudo socat - UNIX-CONNECT:/tmp/qemu-monitor.sock 2>/dev/null || true + + # Wait for VM to stop (with timeout) + for i in {1..15}; do + if ! pgrep -f "qemu-system-.*$VM_NAME" > /dev/null; then + log "VM stopped successfully" $COLOR_GREEN + break + fi + sleep 1 + done + + # Force stop if VM is still running + if pgrep -f "qemu-system-.*$VM_NAME" > /dev/null; then + log "Forcing VM to stop..." $COLOR_RED + sudo pkill -f "qemu-system-.*$VM_NAME" || true + fi + + if [[ "$TEST_DIR" == "$HOME/tmp/"* ]]; then + log "Removing temporary directory..." $COLOR_CYAN + rm -rf "$TEST_DIR" + log "Temporary directory removed." $COLOR_GREEN + else + log "Error: TEST_DIR is not in $HOME/tmp. Skipping directory removal for safety." $COLOR_RED + fi + fi + + if [ "$clean_packages" = true ]; then + log "Uninstalling packages..." $COLOR_CYAN + sudo apt-get remove -y $INSTALLED_PACKAGES_FOR_TESTS + sudo apt-get autoremove -y + log "Packages uninstalled." $COLOR_GREEN + fi + + log "Cleanup completed." $COLOR_GREEN +} + +# Set trap to ensure cleanup on script exit (normal or abnormal) +trap 'log "Interruption detected."; cleanup; log "Exiting script."; exit 130' INT TERM EXIT + +# Function to get the image filename based on architecture +get_image_filename() { + if [ "$ARCH" = "amd64" ]; then + echo "debian-12-generic-amd64.qcow2" + elif [ "$ARCH" = "arm64" ]; then + echo "debian-12-generic-arm64.qcow2" + else + log "Unsupported architecture: $ARCH" $COLOR_RED + exit 1 + fi +} + +# Get the image filename +IMAGE_FILENAME=$(get_image_filename) + +# Check if the image already exists in TEMP_DIR +if [ -f "$TEMP_DIR/$IMAGE_FILENAME" ]; then + cp "$TEMP_DIR/$IMAGE_FILENAME" "$TEST_DIR/$IMAGE_FILENAME" + log "Debian 12 image for $ARCH found in $TEMP_DIR. Using existing image." $COLOR_GREEN +else + # Download appropriate Debian 12 cloud image + log "Downloading Debian 12 cloud image for $ARCH..." $COLOR_CYAN + if [ "$ARCH" = "amd64" ]; then + wget -q https://cloud.debian.org/images/cloud/bookworm/latest/debian-12-generic-amd64.qcow2 -O "$TEST_DIR/$IMAGE_FILENAME" + elif [ "$ARCH" = "arm64" ]; then + wget -q https://cloud.debian.org/images/cloud/bookworm/latest/debian-12-generic-arm64.qcow2 -O "$TEST_DIR/$IMAGE_FILENAME" + fi + + if [ $? -eq 0 ]; then + log "Debian 12 image for $ARCH downloaded successfully." $COLOR_GREEN + else + log "Failed to download Debian 12 image for $ARCH." $COLOR_RED + exit 1 + fi +fi + +# Create a temporary file for communication +TEMP_FILE="$TEST_DIR/subshell_status.txt" + +# Create a named pipe for logging +PIPE_FILE="$TEST_DIR/log_pipe" +mkfifo "$PIPE_FILE" + +# Start tee in the background to handle logging +tee -a "$LOG_FILE" < "$PIPE_FILE" & +TEE_PID=$! + +# Execute the tests in a subshell +( + # Redirect all output to the named pipe + exec > "$PIPE_FILE" 2>&1 + + # Install QEMU & dependencies + log "Installing QEMU..." $COLOR_CYAN + sudo apt-get update + sudo apt-get install -y $INSTALLED_PACKAGES_FOR_TESTS $INSTALLED_COMMON_PACKAGES + + # Copy project files to the temporary directory + log "Copying project files to temporary directory..." $COLOR_CYAN + + # Compress the project directory + log "Compressing project files..." $COLOR_CYAN + (cd .. && tar -czf "$TEST_DIR/rengine-project.tar.gz" --exclude='docker/secrets' .) + + cd "$TEST_DIR" + + # Create a larger disk image + qemu-img create -f qcow2 -o preallocation=metadata "$TEST_DIR/large-debian.qcow2" $VM_DISK_SIZE + + # Resize the downloaded image + qemu-img resize --shrink "$TEST_DIR/$IMAGE_FILENAME" $VM_DISK_SIZE + + # Combine the two images + qemu-img convert -O qcow2 -o preallocation=metadata "$TEST_DIR/$IMAGE_FILENAME" "$TEST_DIR/large-debian.qcow2" + + # Create a copy of the image for testing + mv large-debian.qcow2 test-debian.qcow2 + + # Generate SSH key pair + log "Generating SSH key pair..." $COLOR_CYAN + ssh-keygen -t ssh-keygen -t ed25519 -f ./id_ed25519 -N "" + + # Create a cloud-init configuration file + cat > cloud-init.yml </dev/null; then + log "SSH is now available" $COLOR_GREEN + break + fi + if [ $i -eq 30 ]; then + log "Timed out waiting for SSH" $COLOR_RED + exit 1 + fi + sleep 10 + done + + # Run setup commands in the VM + log "Setting up locales in the VM..." $COLOR_CYAN + ssh -p 2222 $SSH_OPTIONS -i ./id_ed25519 rengine@localhost << EOF + # Update and install dependencies + sudo apt-get update + sudo apt-get install -y locales-all +EOF + + # Copy compressed project files to the VM + log "Copying compressed project files to the VM..." $COLOR_CYAN + scp -P 2222 $SSH_OPTIONS -i ./id_ed25519 "$TEST_DIR/rengine-project.tar.gz" rengine@localhost:~ + + log "Decompressing project files on the VM..." $COLOR_CYAN + ssh -p 2222 $SSH_OPTIONS -i ./id_ed25519 rengine@localhost << EOF + sudo apt-get install git -y + mkdir -p $RENGINE_ROOT + tar -xzf ~/rengine-project.tar.gz -C $RENGINE_ROOT + rm ~/rengine-project.tar.gz + cd $RENGINE_ROOT + cat > $RENGINE_ROOT/.git/config << EOG +[core] + repositoryformatversion = 0 + filemode = true + bare = false + logallrefupdates = true +[remote "origin"] + url = https://github.com/Security-Tools-Alliance/rengine-ng.git + fetch = +refs/heads/*:refs/remotes/origin/* +[branch "master"] + remote = origin + merge = refs/heads/master + vscode-merge-base = origin/master +EOG + cp $RENGINE_ROOT/.env-dist $RENGINE_ROOT/.env +EOF + + # Run setup commands in the VM + log "Setting up Docker and the application in the VM..." $COLOR_CYAN + ssh -p 2222 $SSH_OPTIONS -i ./id_ed25519 rengine@localhost << EOF + # Update and install dependencies + sudo apt-get install -y ca-certificates curl gnupg make htop iftop net-tools + + # Add Docker's official GPG key + sudo install -m 0755 -d /etc/apt/keyrings + curl -fsSL https://download.docker.com/linux/debian/gpg | sudo gpg --dearmor -o /etc/apt/keyrings/docker.gpg + sudo chmod a+r /etc/apt/keyrings/docker.gpg + + # Set up Docker repository + echo \ + "deb [arch=\$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.gpg] https://download.docker.com/linux/debian \ + \$(. /etc/os-release && echo "\$VERSION_CODENAME") stable" | \ + sudo tee /etc/apt/sources.list.d/docker.list > /dev/null + + # Install Docker Engine, Docker Compose and python libs + sudo apt-get update + sudo apt-get install -y docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin python3-docker python3-parameterized + + # Add rengine user to docker group + sudo usermod -aG docker rengine + newgrp docker + + # Run tests + cd $RENGINE_ROOT + if [ "$WITHOUT_BUILD" = true ]; then + python3 tests/test_$TEST_FILE.py ${FORMATTED_TEST_NAMES:+--tests $FORMATTED_TEST_NAMES} --exclude-build + else + python3 tests/test_$TEST_FILE.py ${FORMATTED_TEST_NAMES:+--tests $FORMATTED_TEST_NAMES} + fi +EOF + + # Get the test status + TEST_STATUS=$? + + # Write the test status to the temporary file + echo $TEST_STATUS > "$TEMP_FILE" + + # Signal that the subshell has finished + echo "DONE" >> "$TEMP_FILE" + + log "Tests completed with status: $TEST_STATUS" $COLOR_GREEN + +) & + +SUBSHELL_PID=$! + +log "Waiting for tests to complete..." $COLOR_CYAN + +# Wait for the subshell to finish (with a timeout of 2 hours) +for i in {1..7200}; do + if [ -f "$TEMP_FILE" ] && grep -q "DONE" "$TEMP_FILE"; then + log "Tests finished" $COLOR_GREEN + break + fi + sleep 1 + if [ $((i % 60)) -eq 0 ]; then + log "Still waiting for tests to complete... (${i}s)" $COLOR_YELLOW + fi +done + +# Check if the subshell completed +if [ ! -f "$TEMP_FILE" ] || ! grep -q "DONE" "$TEMP_FILE"; then + log "Error: Tests did not complete within the allocated time" $COLOR_RED + TEST_STATUS=1 +else + # Get the test status from the temporary file + TEST_STATUS=$(head -n 1 "$TEMP_FILE") +fi + +# Clean up +rm -f "$TEMP_FILE" +rm -f "$PIPE_FILE" +kill $TEE_PID +wait $SUBSHELL_PID + +# Exit with the status +exit $TEST_STATUS \ No newline at end of file diff --git a/scripts/uninstall.sh b/scripts/uninstall.sh index 3daf01a03..c36bbb6a0 100755 --- a/scripts/uninstall.sh +++ b/scripts/uninstall.sh @@ -1,27 +1,7 @@ #!/bin/bash -# Define color codes. -# Using `tput setaf` at some places because the variable only works with log/echo - -COLOR_BLACK=0 -COLOR_RED=1 -COLOR_GREEN=2 -COLOR_YELLOW=3 -COLOR_BLUE=4 -COLOR_MAGENTA=5 -COLOR_CYAN=6 -COLOR_WHITE=7 -COLOR_DEFAULT=$COLOR_WHITE # Use white as default for clarity - -# Log messages in different colors -log() { - local color=${2:-$COLOR_DEFAULT} # Use default color if $2 is not set - if [ "$color" -ne $COLOR_DEFAULT ]; then - tput setaf "$color" - fi - printf "$1\r\n" - tput sgr0 # Reset text color -} +# Import common functions +source "$(pwd)/common_functions.sh" cat ../web/art/reNgine.txt @@ -38,7 +18,7 @@ log "" log "Uninstalling reNgine-ng..." $COLOR_CYAN log "" -tput setaf 1 +tput setaf $COLOR_RED; read -p "This action will stop and remove all containers, volumes and networks of reNgine-ng. Do you want to continue? [y/n] " -n 1 log "" @@ -47,23 +27,56 @@ then log "" log "Stopping reNgine-ng..." $COLOR_CYAN - docker stop rengine-web-1 rengine-db-1 rengine-celery-1 rengine-celery-beat-1 rengine-redis-1 rengine-proxy-1 - log "Stopped reNgine-ng" $COLOR_GREEN - log "" - - log "Removing all containers related to reNgine-ng..." $COLOR_CYAN - docker rm rengine-web-1 rengine-db-1 rengine-celery-1 rengine-celery-beat-1 rengine-redis-1 rengine-proxy-1 - log "Removed all containers related to reNgine-ng" $COLOR_GREEN + if (cd .. && make down); then + log "Stopped reNgine-ng" $COLOR_GREEN + else + log "Failed to stop reNgine-ng" $COLOR_RED + exit 1 + fi log "" log "Removing all volumes related to reNgine-ng..." $COLOR_CYAN - docker volume rm rengine_gf_patterns rengine_github_repos rengine_nuclei_templates rengine_postgres_data rengine_scan_results rengine_tool_config rengine_static_volume rengine_wordlist - log "Removed all volumes related to reNgine-ng" $COLOR_GREEN + if docker volume rm $(docker volume ls -q --filter name=rengine_) 2>/dev/null || true; then + log "Removed all volumes related to reNgine-ng" $COLOR_GREEN + else + log "Warning: Failed to remove some or all volumes" $COLOR_YELLOW + fi log "" log "Removing all networks related to reNgine-ng..." $COLOR_CYAN - docker network rm rengine_rengine_network rengine_default - log "Removed all networks related to reNgine-ng" $COLOR_GREEN + if docker network rm rengine_network; then + log "Removed all networks related to reNgine-ng" $COLOR_GREEN + else + log "Warning: Failed to remove rengine_network" $COLOR_YELLOW + fi + log "" + + log "Removing static files and secrets from reNgine-ng..." $COLOR_CYAN + + # Remove web/staticfiles directory + if [ -d "../web/staticfiles" ]; then + log "Removing web/staticfiles directory..." $COLOR_CYAN + if (cd .. && rm -rf web/staticfiles); then + log "Removed web/staticfiles directory" $COLOR_GREEN + else + log "Warning: Failed to remove web/staticfiles directory" $COLOR_YELLOW + fi + else + log "web/staticfiles directory not found, skipping..." $COLOR_YELLOW + fi + + # Remove docker/secrets directory + if [ -d "../docker/secrets" ]; then + log "Removing docker/secrets directory..." $COLOR_CYAN + if (cd .. && rm -rf docker/secrets); then + log "Removed docker/secrets directory" $COLOR_GREEN + else + log "Warning: Failed to remove docker/secrets directory" $COLOR_YELLOW + fi + else + log "docker/secrets directory not found, skipping..." $COLOR_YELLOW + fi + log "" else log "" @@ -71,7 +84,7 @@ else exit 1 fi -tput setaf 1; +tput setaf $COLOR_RED; read -p "Do you want to remove Docker images related to reNgine-ng? [y/n] " -n 1 -r log "" @@ -79,15 +92,18 @@ if [[ $REPLY =~ ^[Yy]$ ]] then log "" log "Removing all Docker images related to reNgine-ng..." $COLOR_CYAN - docker image rm rengine-celery rengine-celery-beat rengine-certs docker.pkg.github.com/yogeshojha/rengine/rengine nginx:alpine redis:alpine postgres:12.3-alpine - log "Removed all Docker images" $COLOR_GREEN + if (cd .. && make remove_images); then + log "Removed all Docker images" $COLOR_GREEN + else + log "Warning: Failed to remove some or all Docker images" $COLOR_YELLOW + fi log "" else log "" log "Skipping removal of Docker images" $COLOR_CYAN fi -tput setaf 1; +tput setaf $COLOR_RED; read -p "Do you want to remove all Docker-related leftovers? [y/n] " -n 1 -r log "" @@ -95,8 +111,11 @@ if [[ $REPLY =~ ^[Yy]$ ]] then log "" log "Removing all Docker-related leftovers..." $COLOR_CYAN - docker system prune -a -f - log "Removed all Docker-related leftovers" $COLOR_GREEN + if docker system prune -a -f; then + log "Removed all Docker-related leftovers" $COLOR_GREEN + else + log "Warning: Failed to remove some or all Docker-related leftovers" $COLOR_YELLOW + fi log "" else log "" @@ -104,4 +123,4 @@ else log "" fi -log "Finished uninstalling." $COLOR_GREEN +log "Finished uninstalling." $COLOR_GREEN \ No newline at end of file diff --git a/scripts/update.sh b/scripts/update.sh new file mode 100755 index 000000000..85f248556 --- /dev/null +++ b/scripts/update.sh @@ -0,0 +1,149 @@ +#!/bin/bash + +# Import common functions +source "$(pwd)/common_functions.sh" + +# Check for root privileges +if [ "$(whoami)" != "root" ]; then + log "Error updating reNgine-ng: please run this script as root!" $COLOR_RED + log "Example: sudo ./update.sh" $COLOR_RED + exit 1 +fi + +# Function to compare version strings +version_compare() { + if [[ $1 == $2 ]] + then + return 0 + fi + local IFS=. + local i ver1=($1) ver2=($2) + for ((i=${#ver1[@]}; i<${#ver2[@]}; i++)) + do + ver1[i]=0 + done + for ((i=0; i<${#ver1[@]}; i++)) + do + if [[ -z ${ver2[i]} ]] + then + ver2[i]=0 + fi + if ((10#${ver1[i]} > 10#${ver2[i]})) + then + return 1 + fi + if ((10#${ver1[i]} < 10#${ver2[i]})) + then + return 2 + fi + done + return 0 +} + +# Get current version +CURRENT_VERSION=$(cat ../web/reNgine/version.txt) + +# Get latest release version from GitHub +LATEST_VERSION=$(curl -s https://api.github.com/repos/Security-Tools-Alliance/rengine-ng/releases/latest | grep '"tag_name":' | sed -E 's/.*"([^"]+)".*/\1/' | sed 's/v//') + +cat ../web/art/reNgine.txt + +# Compare versions +version_compare $CURRENT_VERSION $LATEST_VERSION +comparison_result=$? + +log "\n" $COLOR_DEFAULT +log "Current version: $CURRENT_VERSION" $COLOR_CYAN +log "Latest version: $LATEST_VERSION" $COLOR_CYAN +log "\n" $COLOR_DEFAULT + +case $comparison_result in + 0) log "You are already on the latest version." $COLOR_GREEN + exit 0 + ;; + 1) log "Your version is newer than the latest release. No update needed." $COLOR_YELLOW + exit 0 + ;; + 2) log "An update is available." $COLOR_CYAN + ;; + *) log "Error comparing versions." $COLOR_RED + exit 1 + ;; +esac + +read -p "Do you want to update to the latest version? (y/n) " answer + +if [[ $answer == "y" ]]; then + while true; do + read -p "Do you want to update from pre-built images or build from source? (pre-built/source, default is pre-built): " install_type + install_type=${install_type:-pre-built} # Set default to pre-built if empty + if [[ $install_type == "pre-built" || $install_type == "source" ]]; then + break + else + log "Invalid input. Please enter 'pre-built' or 'source'." $COLOR_YELLOW + fi + done + + log "Selected installation type: $install_type" $COLOR_CYAN + + while true; do + read -p "Do you want to apply your local changes after updating? (y/n) " apply_changes + if [[ $apply_changes == "y" || $apply_changes == "n" ]]; then + break + else + log "Invalid input. Please enter 'y' or 'n'." $COLOR_YELLOW + fi + done + + if [[ $apply_changes == "y" ]]; then + if ! (cd .. && make down); then + log "Failed to stop reNgine-ng" $COLOR_RED + exit 1 + fi + if ! sudo -u rengine git stash save && sudo -u rengine git pull && sudo -u rengine git stash apply; then + log "Failed to update and apply local changes" $COLOR_RED + exit 1 + fi + if [[ $install_type == "pre-built" ]]; then + if ! (cd .. && make up); then + log "Failed to pull and start updated images" $COLOR_RED + exit 1 + fi + elif [[ $install_type == "source" ]]; then + if ! (cd .. && make build_up); then + log "Failed to build and start updated images" $COLOR_RED + exit 1 + fi + fi + log "Successfully updated to version $LATEST_VERSION and local changes have been reapplied" $COLOR_GREEN + elif [[ $apply_changes == "n" ]]; then + if ! (cd .. && make down); then + log "Failed to stop reNgine-ng" $COLOR_RED + exit 1 + fi + if ! sudo -u rengine git stash && sudo -u rengine git stash drop && sudo -u rengine git pull; then + log "Failed to update" $COLOR_RED + exit 1 + fi + if [[ $install_type == "pre-built" ]]; then + if ! (cd .. && make up); then + log "Failed to pull and start updated images" $COLOR_RED + exit 1 + fi + elif [[ $install_type == "source" ]]; then + if ! (cd .. && make build_up); then + log "Failed to build and start updated images" $COLOR_RED + exit 1 + fi + else + log "Invalid installation type. Update cancelled." $COLOR_RED + exit 1 + fi + log "Successfully updated to version $LATEST_VERSION" $COLOR_GREEN + else + log "Invalid input. Update cancelled." $COLOR_RED + exit 1 + fi +else + log "Update cancelled." $COLOR_YELLOW +fi \ No newline at end of file diff --git a/tests/test_makefile.py b/tests/test_makefile.py new file mode 100644 index 000000000..f3b117ff7 --- /dev/null +++ b/tests/test_makefile.py @@ -0,0 +1,533 @@ +""" +This module contains tests for the Makefile commands in the reNgine-ng project. +It verifies various make commands and their effects on the Docker environment. +""" + +import os +import unittest +import subprocess +import time +import signal +import sys +from functools import wraps +from docker import from_env as docker_from_env +from docker.errors import NotFound + +# Add these constants for colors +BLACK = '\033[30m' +RED = '\033[31m' +GREEN = '\033[32m' +YELLOW = '\033[33m' +BLUE = '\033[34m' +MAGENTA = '\033[35m' +CYAN = '\033[36m' +WHITE = '\033[37m' +ENDC = '\033[0m' + +print("Starting test_makefile.py") +print(f"Current working directory: {os.getcwd()}") + +RENGINE_PATH = "/home/rengine/rengine" + +# Read version from version.txt +with open( + f"{RENGINE_PATH}/web/reNgine/version.txt", "r", encoding="utf-8" +) as version_file: + RENGINE_VERSION = version_file.read().strip() + + +class TestMakefile(unittest.TestCase): + """ + A test suite for verifying the functionality of the Makefile commands in the reNgine-ng project. + This class tests various make commands and their effects on the Docker environment. + """ + + expected_services = [ + "rengine-web-1", + "rengine-db-1", + "rengine-celery-1", + "rengine-celery-beat-1", + "rengine-redis-1", + "rengine-proxy-1", + "rengine-ollama-1", + ] + expected_images = [ + f"ghcr.io/security-tools-alliance/rengine-ng:rengine-celery-v{RENGINE_VERSION}", + f"ghcr.io/security-tools-alliance/rengine-ng:rengine-web-v{RENGINE_VERSION}", + f"ghcr.io/security-tools-alliance/rengine-ng:rengine-postgres-v{RENGINE_VERSION}", + f"ghcr.io/security-tools-alliance/rengine-ng:rengine-redis-v{RENGINE_VERSION}", + f"ghcr.io/security-tools-alliance/rengine-ng:rengine-ollama-v{RENGINE_VERSION}", + f"ghcr.io/security-tools-alliance/rengine-ng:rengine-certs-v{RENGINE_VERSION}", + f"ghcr.io/security-tools-alliance/rengine-ng:rengine-proxy-v{RENGINE_VERSION}", + ] + + @classmethod + def setUpClass(cls): + """ + Set up the test environment before running any tests. + This method initializes the Docker client. + """ + cls.client = docker_from_env() + + # Search for the Makefile by traversing up the parent directories + cls.makefile_dir = cls.find_makefile_directory() + if not cls.makefile_dir: + raise FileNotFoundError("Makefile not found in the current directory or its parents") + + # Change the working directory to the one containing the Makefile + os.chdir(cls.makefile_dir) + print(f"Changed working directory to: {os.getcwd()}") + + @classmethod + def find_makefile_directory(cls): + """ + Search for the directory containing the Makefile by traversing up the directory tree. + """ + current_dir = os.path.abspath(os.getcwd()) + while current_dir != '/': + if os.path.exists(os.path.join(current_dir, 'Makefile')): + return current_dir + current_dir = os.path.dirname(current_dir) + return None + + @classmethod + def tearDownClass(cls): + """ + Clean up the test environment after all tests have been run. + This method stops all services. + """ + cls.run_make_command("down") + + @classmethod + def run_make_command(cls, command, capture_output=False, env_vars=None): + """ + Run a make command and optionally capture its output. + """ + cmd = f"make {command}" + if env_vars: + cmd = " ".join([f"{k}={v}" for k, v in env_vars.items()]) + " " + cmd + + print(f"{YELLOW}Executing command: {cmd}{ENDC}") + if capture_output: + make_result = subprocess.run( + cmd, shell=True, capture_output=True, text=True, check=False + ) + if make_result.returncode != 0: + print(f"Command failed. Stderr: {make_result.stderr}") + return make_result.stdout, make_result.stderr, make_result.returncode + make_result = subprocess.run(cmd, shell=True, check=False) + if make_result.returncode != 0: + print(f"Command failed. Returncode: {make_result.returncode}") + return make_result.returncode + + def assert_containers_running(self): + """ + Assert that all expected services are running. + """ + running_containers = self.client.containers.list() + for service in self.expected_services: + container = next((c for c in running_containers if service in c.name), None) + self.assertIsNotNone(container, f"Service {service} is not running") + self.assertEqual( + container.status, + "running", + f"Container {container.name} is not in 'running' state", + ) + + def clean_secrets(self): + """ + Clean up the secrets directory. + """ + secrets_path = f"{RENGINE_PATH}/docker/secrets" + if os.path.exists(secrets_path): + subprocess.run(f"sudo rm -rf {secrets_path}", shell=True, check=False) + + @staticmethod + def with_cleanup(func): + """ + Decorator to ensure cleanup after test execution. + """ + + @wraps(func) + def wrapper(self, *args, **kwargs): + try: + return func(self, *args, **kwargs) + finally: + self.clean_secrets() + return wrapper + + def test_pull(self): + """ + Test the `make pull` command. + This test verifies that all required Docker images can be pulled successfully. + """ + returncode = self.run_make_command("pull") + self.assertEqual(returncode, 0) + images = self.client.images.list() + for image in self.expected_images: + self.assertTrue( + any(image in img.tags[0] for img in images if img.tags), + f"Image {image} not found", + ) + + def test_images(self): + """ + Test the `make images` command. + This test verifies that all required Docker images are present and correctly tagged. + """ + self.run_make_command("pull") + stdout, _, returncode = self.run_make_command( + "images", capture_output=True + ) + self.assertEqual(returncode, 0) + for image in self.expected_images: + repo, tag = image.split(":") + self.assertIn(repo, stdout, f"Repository {repo} not found in output") + self.assertIn(tag, stdout, f"Tag {tag} not found in output") + + @with_cleanup + def test_start_services_up(self): + """ + Test the `make up` command. + This test verifies that the application can be started successfully with the 'up' command. + """ + print(f"{BLUE}test_start_services_up{ENDC}") + print(f"{CYAN}Test the 'up' make command. ... {ENDC}\n") + self._test_start_services("up", {}) + + @with_cleanup + def test_start_services_build(self): + """ + Test the `make build` command. + This test verifies that the application can be built and started successfully with the 'build' command. + """ + print(f"{BLUE}test_start_services_build{ENDC}") + print(f"{CYAN}Test the 'build' make command. ... {ENDC}\n") + self._test_start_services("build", {}) + + def _test_start_services(self, command, env_vars): + """ + Helper method to test start services. + This method contains the common logic for testing 'up' and 'build' commands. + """ + self.run_make_command("down") + self.run_make_command("certs") + + if "build" in command: + _, stderr, returncode = self.run_make_command( + command, capture_output=True, env_vars=env_vars + ) + self.assertEqual( + returncode, 0, f"Build command failed with error: {stderr}" + ) + _, stderr, returncode = self.run_make_command( + "up", capture_output=True, env_vars=env_vars + ) + else: + _, stderr, returncode = self.run_make_command( + command, capture_output=True, env_vars=env_vars + ) + + self.assertEqual( + returncode, 0, f"{command} command failed with error: {stderr}" + ) + self.assert_containers_running() + + @with_cleanup + def test_restart_services(self): + """ + Test the `make restart` command with various configurations. + This test verifies that services can be restarted successfully in different scenarios. + """ + print(f"{BLUE}test_restart_services (__main__.TestMakefile.test_restart_services){ENDC}") + print(f"{CYAN}Test the 'restart' make command with various configurations. ... {ENDC}") + scenarios = [ + ("restart", {}, []), + ("restart", {"DEV": "1"}, []), + ("restart", {"COLD": "1"}, []), + ("restart", {}, ["web"]), + ("restart", {}, ["celery"]), + ] + + for command, env_vars, services in scenarios: + with self.subTest(command=command, env_vars=env_vars, services=services): + self._test_restart_services(command, env_vars, services) + + def _test_restart_services(self, command, env_vars, services): + """ + Helper method to test restart services. + This method contains the common logic for testing various restart scenarios. + """ + self.run_make_command("certs") + self.run_make_command("up") + + restart_command = f"{command} {' '.join(services)}" + _, stderr, returncode = self.run_make_command( + restart_command.strip(), capture_output=True, env_vars=env_vars + ) + + self.assertEqual(returncode, 0, f"Restart command failed with error: {stderr}") + self.assert_containers_running() + + @with_cleanup + def test_logs(self): + """ + Test the `make logs` command. + This test verifies that logs can be retrieved and contain expected content. + It ensures services are up before checking logs and limits the log collection time. + """ + self.run_make_command("certs") + self.run_make_command("up") + + logs_process = subprocess.Popen( + "make logs", + shell=True, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + text=True, + start_new_session=True, + ) + time.sleep(5) + os.killpg(os.getpgid(logs_process.pid), signal.SIGTERM) + stdout, _ = logs_process.communicate(timeout=1) + + expected_services = [ + "redis-1", + "db-1", + "web-1", + "celery-1", + "celery-beat-1", + "ollama-1", + "proxy-1", + ] + for service in expected_services: + self.assertIn(service, stdout, f"Logs for {service} not found") + + @with_cleanup + def test_superuser(self): + """ + Test the superuser-related make commands. + This test verifies that a superuser can be created, its password changed, and then deleted. + """ + self.run_make_command("certs") + self.run_make_command("up") + + create_result = subprocess.run( + "make superuser_create isNonInteractive=true", + shell=True, + capture_output=True, + text=True, + check=False, + ) + + self.assertEqual( + create_result.returncode, + 0, + f"Superuser creation failed with error: {create_result.stderr}", + ) + self.assertIn("Superuser created successfully", create_result.stdout) + + changepassword_result = subprocess.run( + "make superuser_changepassword isNonInteractive=true", + shell=True, + capture_output=True, + text=True, + check=False, + ) + + self.assertEqual( + changepassword_result.returncode, + 0, + f"Superuser password change failed with error: {changepassword_result.stderr}", + ) + + delete_result = subprocess.run( + "make superuser_delete", + shell=True, + capture_output=True, + text=True, + check=False, + ) + + self.assertEqual( + delete_result.returncode, + 0, + f"Superuser deletion failed with error: {delete_result.stderr}", + ) + + @with_cleanup + def test_migrate(self): + """ + Test the `make migrate` command. + This test verifies that database migrations can be applied successfully. + """ + # First, generate certificates and start services + self.run_make_command("certs") + self.run_make_command("up") + + # Now run the migrate command + stdout, _, returncode = self.run_make_command( + "migrate", capture_output=True + ) + self.assertEqual(returncode, 0) + self.assertIn("Apply all migrations", stdout) + + @with_cleanup + def test_certs(self): + """ + Test the `make certs` command. + This test verifies that SSL certificates can be generated successfully. + """ + returncode = self.run_make_command("certs") + self.assertEqual(returncode, 0) + self.assertTrue( + os.path.exists(f"{RENGINE_PATH}/docker/secrets/certs/rengine_chain.pem") + ) + self.assertTrue( + os.path.exists(f"{RENGINE_PATH}/docker/secrets/certs/rengine_rsa.key") + ) + self.assertTrue( + os.path.exists(f"{RENGINE_PATH}/docker/secrets/certs/rengine.pem") + ) + + @with_cleanup + def test_down(self): + """ + Test the `make down` command. + This test verifies that all services can be stopped successfully. + """ + # First, generate certificates and start services + self.run_make_command("certs") + self.run_make_command("up") + + # Execute the 'down' command + returncode = self.run_make_command("down") + self.assertEqual(returncode, 0) + + # Verify that none of the expected services are running + running_containers = self.client.containers.list() + for service in self.expected_services: + self.assertFalse( + any(service in container.name for container in running_containers), + f"Service {service} is still running after 'down' command", + ) + + # Verify that all associated containers are stopped + all_containers = self.client.containers.list(all=True) + for container in all_containers: + if any(service in container.name for service in self.expected_services): + try: + container_info = container.attrs + self.assertIn( + container_info["State"]["Status"], + ["exited", "dead"], + f"Container {container.name} is not stopped after 'down' command", + ) + except NotFound: + # If the container is not found, it's considered stopped + pass + + def test_prune(self): + """ + Test the `make prune` command. + This test verifies that unused Docker volumes can be removed successfully. + """ + # Ensure all services are down before pruning + self.run_make_command("down") + + # Run the prune command + returncode = self.run_make_command("prune") + self.assertEqual(returncode, 0, "Prune command failed") + + # Check for reNgine-related volumes + volumes = self.client.volumes.list() + rengine_volumes = [v for v in volumes if v.name.startswith("rengine_")] + + if rengine_volumes: + volume_names = ", ".join([v.name for v in rengine_volumes]) + self.fail(f"reNgine volumes still exist after pruning: {volume_names}") + + print(f"Total volumes remaining: {len(volumes)}") + print("Volumes not removed:") + for volume in volumes: + print(f"- {volume.name}") + +def suite(tests_to_run=None, exclude_build=False): + """ + Create a test suite with specified or all tests. + + Args: + tests_to_run (list): List of test names to run. If None, all tests are run. + exclude_build (bool): If True, excludes the build test from the suite. + + Returns: + unittest.TestSuite: The test suite to run. + """ + all_tests = [ + "test_certs", + "test_pull", + "test_images", + "test_start_services_up", + "test_superuser", + "test_migrate", + "test_logs", + "test_restart_services", + "test_start_services_build", + "test_down", + "test_prune", + ] + + if exclude_build: + all_tests.remove("test_start_services_build") + + tests_to_execute = tests_to_run if tests_to_run else all_tests + + test_suite = unittest.TestSuite() + executed_tests = [] + skipped_tests = [] + + for test in tests_to_execute: + if test in all_tests: + test_method = getattr(TestMakefile, test, None) + if test_method and callable(test_method): + test_suite.addTest(TestMakefile(test)) + executed_tests.append(test) + else: + skipped_tests.append(test) + print(f"Warning: Test method '{test}' not found in TestMakefile. Skipping.") + else: + skipped_tests.append(test) + print(f"Warning: Test '{test}' not in the list of available tests. Skipping.") + + # Store test information for later display + test_info = { + 'executed': executed_tests, + 'skipped': skipped_tests + } + + return test_suite, test_info + + +if __name__ == "__main__": + import argparse + + parser = argparse.ArgumentParser(description="Run reNgine-ng Makefile tests") + parser.add_argument("--exclude-build", action="store_true", help="Exclude build test") + parser.add_argument("--tests", nargs="*", help="Specific tests to run") + args = parser.parse_args() + + runner = unittest.TextTestRunner(verbosity=1) + test_suite, test_info = suite(args.tests, args.exclude_build) + result = runner.run(test_suite) + + # Display test summary + print(f"\n{GREEN}Test Execution Summary:{ENDC}") + print(f"{YELLOW}Tests executed:{ENDC}") + for test in test_info['executed']: + print(f"- {test}") + if test_info['skipped']: + print(f"\n{RED}Tests skipped:{ENDC}") + for test in test_info['skipped']: + print(f"- {test}") + + sys.exit(not result.wasSuccessful()) diff --git a/update.sh b/update.sh deleted file mode 100755 index 32d957442..000000000 --- a/update.sh +++ /dev/null @@ -1,38 +0,0 @@ -#!/bin/bash - -# Define color codes. -# Using `tput setaf` at some places because the variable only works with log/echo - -COLOR_BLACK=0 -COLOR_RED=1 -COLOR_GREEN=2 -COLOR_YELLOW=3 -COLOR_BLUE=4 -COLOR_MAGENTA=5 -COLOR_CYAN=6 -COLOR_WHITE=7 -COLOR_DEFAULT=$COLOR_WHITE # Use white as default for clarity - -# Log messages in different colors -log() { - local color=${2:-$COLOR_DEFAULT} # Use default color if $2 is not set - if [ "$color" -ne $COLOR_DEFAULT ]; then - tput setaf "$color" - fi - printf "$1\r\n" - tput sgr0 # Reset text color -} - -read -p "Do you want to apply your local changes after updating? (y/n) " answer - -if [[ $answer == "y" ]]; then - make down && git stash save && git pull && git stash apply && make build && make up - tput setaf 2; - echo "Successfully updated" -elif [[ $answer == "n" ]]; then - make down && git stash && git stash drop && git pull && make build && make up - tput setaf 2; - echo "Successfully updated" -else - echo "Invalid input. Please enter 'y' or 'n'." -fi diff --git a/web/Dockerfile b/web/Dockerfile deleted file mode 100644 index 72b13433a..000000000 --- a/web/Dockerfile +++ /dev/null @@ -1,117 +0,0 @@ -# Base image -FROM --platform=linux/amd64 ubuntu:22.04 - -# Labels and Credits -LABEL \ - name="reNgine" \ - author="Yogesh Ojha " \ - description="reNgine is a automated pipeline of recon process, useful for information gathering during web application penetration testing." - -# Environment Variables -ENV DEBIAN_FRONTEND="noninteractive" \ - DATABASE="postgres" -ENV PYTHONDONTWRITEBYTECODE 1 -ENV PYTHONUNBUFFERED 1 -ENV GOROOT="/usr/local/go" -ENV GOPATH=$HOME/go -ENV PATH="${PATH}:${GOROOT}/bin:${GOPATH}/bin" - -# Install Python -RUN apt update -y && \ - apt update -y && \ - apt install -y \ - python3.10 \ - python3-dev \ - python3-pip - -# Install essential packages -RUN apt install -y --no-install-recommends \ - build-essential \ - cmake \ - geoip-bin \ - geoip-database \ - gcc \ - git \ - libpq-dev \ - libpango-1.0-0 \ - libpangoft2-1.0-0 \ - libpcap-dev \ - netcat \ - nmap \ - x11-utils \ - xvfb \ - wget \ - curl \ - python3-netaddr \ - software-properties-common - -RUN add-apt-repository ppa:mozillateam/ppa - -# Download and install go 1.20 -RUN wget https://golang.org/dl/go1.21.4.linux-amd64.tar.gz -RUN tar -xvf go1.21.4.linux-amd64.tar.gz -RUN rm go1.21.4.linux-amd64.tar.gz -RUN mv go /usr/local - -# Download geckodriver -RUN wget https://github.com/mozilla/geckodriver/releases/download/v0.32.0/geckodriver-v0.32.0-linux64.tar.gz -RUN tar -xvf geckodriver-v0.32.0-linux64.tar.gz -RUN rm geckodriver-v0.32.0-linux64.tar.gz -RUN mv geckodriver /usr/bin - -# Make directory for app -WORKDIR /usr/src/app - -# Set environment variables -ENV PYTHONDONTWRITEBYTECODE 1 -ENV PYTHONUNBUFFERED 1 - -# Download Go packages -RUN go install -v github.com/jaeles-project/gospider@latest -RUN go install -v github.com/tomnomnom/gf@latest -RUN go install -v github.com/tomnomnom/unfurl@latest -RUN go install -v github.com/tomnomnom/waybackurls@latest -RUN go install -v github.com/projectdiscovery/httpx/cmd/httpx@latest -RUN go install -v github.com/projectdiscovery/subfinder/v2/cmd/subfinder@latest -RUN go install -v github.com/projectdiscovery/nuclei/v3/cmd/nuclei@latest -RUN go install -v github.com/projectdiscovery/naabu/v2/cmd/naabu@latest -RUN go install -v github.com/hakluke/hakrawler@latest -RUN go install -v github.com/lc/gau/v2/cmd/gau@latest -RUN go install -v github.com/owasp-amass/amass/v3/...@latest -RUN go install -v github.com/ffuf/ffuf@latest -RUN go install -v github.com/projectdiscovery/tlsx/cmd/tlsx@latest -RUN go install -v github.com/hahwul/dalfox/v2@latest -RUN go install -v github.com/projectdiscovery/katana/cmd/katana@latest -RUN go install -v github.com/dwisiswant0/crlfuzz/cmd/crlfuzz@latest -RUN go install -v github.com/sa7mon/s3scanner@latest - -# Update Nuclei and Nuclei-Templates -RUN nuclei -update -RUN nuclei -update-templates - -# Update project discovery tools -RUN httpx -up -RUN naabu -up -RUN subfinder -up -RUN tlsx -up -RUN katana -up - -# Copy requirements -COPY ./requirements.txt /tmp/requirements.txt -RUN pip3 install --upgrade setuptools pip && \ - pip3 install -r /tmp/requirements.txt - - -# install eyewitness - -RUN python3 -m pip install fuzzywuzzy \ - selenium==4.9.1 \ - python-Levenshtein \ - pyvirtualdisplay \ - netaddr - -# Copy source code -COPY . /usr/src/app/ - -# httpx seems to have issue, use alias instead!!! -RUN echo 'alias httpx="/go/bin/httpx"' >> ~/.bashrc diff --git a/web/api/serializers.py b/web/api/serializers.py index 1fd0b7e91..eab398d4c 100644 --- a/web/api/serializers.py +++ b/web/api/serializers.py @@ -1,6 +1,6 @@ +from collections import defaultdict from dashboard.models import * -from django.contrib.humanize.templatetags.humanize import (naturalday, - naturaltime) +from django.contrib.humanize.templatetags.humanize import (naturalday, naturaltime) from django.db.models import F, JSONField, Value from recon_note.models import * from reNgine.common_func import * @@ -8,6 +8,7 @@ from scanEngine.models import * from startScan.models import * from targetApp.models import * +from dashboard.models import * class SearchHistorySerializer(serializers.ModelSerializer): @@ -573,11 +574,14 @@ def get_children(self, history): many=True, context={'scan_history': history}) + processed_subdomains = self.process_subdomains(subdomain_serializer.data) + email = Email.objects.filter(emails__in=scan_history) email_serializer = VisualiseEmailSerializer(email, many=True) dork = Dork.objects.filter(dorks__in=scan_history) dork_serializer = VisualiseDorkSerializer(dork, many=True) + processed_dorks = self.process_dorks(dork_serializer.data) employee = Employee.objects.filter(employees__in=scan_history) employee_serializer = VisualiseEmployeeSerializer(employee, many=True) @@ -587,69 +591,68 @@ def get_children(self, history): return_data = [] - if subdomain_serializer.data: + if processed_subdomains: return_data.append({ 'description': 'Subdomains', - 'children': subdomain_serializer.data}) - - if email_serializer.data or employee_serializer.data or dork_serializer.data or metainfo: - osint_data = [] - if email_serializer.data: - osint_data.append({ - 'description': 'Emails', - 'children': email_serializer.data}) - if employee_serializer.data: - osint_data.append({ - 'description': 'Employees', - 'children': employee_serializer.data}) - if dork_serializer.data: - osint_data.append({ - 'description': 'Dorks', - 'children': dork_serializer.data}) - - if metainfo: - metainfo_data = [] - usernames = ( - metainfo - .annotate(description=F('author')) - .values('description') - .distinct() - .annotate(children=Value([], output_field=JSONField())) - .filter(author__isnull=False) - ) - - if usernames: - metainfo_data.append({ - 'description': 'Usernames', - 'children': usernames}) - - software = ( - metainfo - .annotate(description=F('producer')) - .values('description') - .distinct() - .annotate(children=Value([], output_field=JSONField())) - .filter(producer__isnull=False) - ) - - if software: - metainfo_data.append({ - 'description': 'Software', - 'children': software}) - - os = ( - metainfo - .annotate(description=F('os')) - .values('description') - .distinct() - .annotate(children=Value([], output_field=JSONField())) - .filter(os__isnull=False) - ) - - if os: - metainfo_data.append({ - 'description': 'OS', - 'children': os}) + 'children': processed_subdomains}) + + osint_data = [] + if email_serializer.data: + osint_data.append({ + 'description': 'Emails', + 'children': email_serializer.data}) + if employee_serializer.data: + osint_data.append({ + 'description': 'Employees', + 'children': employee_serializer.data}) + if processed_dorks: + osint_data.append({ + 'description': 'Dorks', + 'children': processed_dorks}) + + if metainfo: + metainfo_data = [] + usernames = ( + metainfo + .annotate(description=F('author')) + .values('description') + .distinct() + .annotate(children=Value([], output_field=JSONField())) + .filter(author__isnull=False) + ) + + if usernames: + metainfo_data.append({ + 'description': 'Usernames', + 'children': usernames}) + + software = ( + metainfo + .annotate(description=F('producer')) + .values('description') + .distinct() + .annotate(children=Value([], output_field=JSONField())) + .filter(producer__isnull=False) + ) + + if software: + metainfo_data.append({ + 'description': 'Software', + 'children': software}) + + os = ( + metainfo + .annotate(description=F('os')) + .values('description') + .distinct() + .annotate(children=Value([], output_field=JSONField())) + .filter(os__isnull=False) + ) + + if os: + metainfo_data.append({ + 'description': 'OS', + 'children': os}) if metainfo: osint_data.append({ @@ -660,8 +663,54 @@ def get_children(self, history): 'description':'OSINT', 'children': osint_data}) + if osint_data: + return_data.append({ + 'description':'OSINT', + 'children': osint_data}) + return return_data + def process_subdomains(self, subdomains): + for subdomain in subdomains: + if 'children' in subdomain: + vuln_dict = defaultdict(list) + for child in subdomain['children']: + if child.get('description') == 'Vulnerabilities': + for vuln_severity in child['children']: + severity = vuln_severity['description'] + for vuln in vuln_severity['children']: + vuln_key = (vuln['description'], severity) + if vuln_key not in vuln_dict: + vuln_dict[vuln_key] = vuln + + # Reconstruct vulnerabilities structure without duplicates + new_vuln_structure = [] + for severity in ['Critical', 'High', 'Medium', 'Low', 'Informational', 'Unknown']: + severity_vulns = [v for k, v in vuln_dict.items() if k[1] == severity] + if severity_vulns: + new_vuln_structure.append({ + 'description': severity, + 'children': severity_vulns + }) + + # Replace old structure with new + subdomain['children'] = [child for child in subdomain['children'] if child.get('description') != 'Vulnerabilities'] + if new_vuln_structure: + subdomain['children'].append({ + 'description': 'Vulnerabilities', + 'children': new_vuln_structure + }) + + return subdomains + + def process_dorks(self, dorks): + unique_dorks = {} + for dork in dorks: + dork_key = (dork['description'], dork.get('dork_type', '')) + if dork_key not in unique_dorks: + unique_dorks[dork_key] = dork + + return list(unique_dorks.values()) class SubdomainChangesSerializer(serializers.ModelSerializer): @@ -923,3 +972,8 @@ class Meta: model = Vulnerability fields = '__all__' depth = 2 + +class ProjectSerializer(serializers.ModelSerializer): + class Meta: + model = Project + fields = ['id', 'name', 'slug', 'description', 'insert_date'] diff --git a/web/api/tests/__init__.py b/web/api/tests/__init__.py new file mode 100644 index 000000000..3cdbffbbd --- /dev/null +++ b/web/api/tests/__init__.py @@ -0,0 +1,9 @@ +from utils.test_base import * +from .test_vulnerability import * +from .test_subdomain import * +from .test_scan import * +from .test_tools import * +from .test_endpoint import * +from .test_project import * +from .test_organization import * +from .test_search import * diff --git a/web/api/tests/test_endpoint.py b/web/api/tests/test_endpoint.py new file mode 100644 index 000000000..adbb0c8c0 --- /dev/null +++ b/web/api/tests/test_endpoint.py @@ -0,0 +1,108 @@ +""" +This file contains the test cases for the API views. +""" + +from django.urls import reverse +from rest_framework import status +from utils.test_base import BaseTestCase + +__all__ = [ + 'TestEndPointViewSet', + 'TestEndPointChangesViewSet', + 'TestInterestingEndpointViewSet' +] + +class TestEndPointViewSet(BaseTestCase): + """Test case for the EndPoint ViewSet API.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + self.data_generator.create_endpoint() + + def test_list_endpoints(self): + """Test listing endpoints.""" + api_url = reverse("api:endpoints-list") + response = self.client.get( + api_url, + { + "project": self.data_generator.project.slug, + "scan_id": self.data_generator.scan_history.id, + "subdomain_id": self.data_generator.subdomain.id, + "target_id": self.data_generator.domain.id, + }, + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertGreaterEqual(len(response.data["results"]), 1) + self.assertEqual( + response.data["results"][0]["http_url"], + self.data_generator.endpoint.http_url, + ) + + def test_list_endpoints_by_subdomain(self): + """Test listing endpoints by subdomain.""" + api_url = reverse("api:endpoints-list") + response = self.client.get( + api_url, + { + "subdomain_id": self.data_generator.subdomain.id, + "scan_id": self.data_generator.scan_history.id, + "project": self.data_generator.project.slug, + "target_id": self.data_generator.domain.id, + }, + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertGreaterEqual(len(response.data["results"]), 1) + self.assertEqual( + response.data["results"][0]["http_url"], + self.data_generator.endpoint.http_url, + ) + +class TestEndPointChangesViewSet(BaseTestCase): + """Test case for endpoint changes viewset.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + self.data_generator.create_endpoint() + self.data_generator.create_scan_history() + self.data_generator.create_endpoint(name="endpoint2") + + def test_endpoint_changes_viewset(self): + """Test the EndPoint Changes ViewSet.""" + url = reverse("api:endpoint-changes-list") + response = self.client.get( + url, {"scan_id": self.data_generator.scan_history.id, "changes": "added"} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertGreaterEqual(len(response.data), 1) + self.assertEqual( + response.data["results"][0]["http_url"], + self.data_generator.endpoint.http_url, + ) + self.assertEqual(response.data["results"][0]["change"], "added") + +class TestInterestingEndpointViewSet(BaseTestCase): + """Test case for interesting endpoint viewset.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + self.data_generator.create_endpoint() + + def test_interesting_endpoint_viewset(self): + """Test retrieving interesting endpoints for a scan.""" + url = reverse("api:interesting-endpoints-list") + response = self.client.get( + url, {"scan_id": self.data_generator.scan_history.id} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertGreaterEqual(len(response.data), 1) + self.assertEqual( + response.data["results"][0]["http_url"], + self.data_generator.endpoint.http_url, + ) + diff --git a/web/api/tests/test_ip.py b/web/api/tests/test_ip.py new file mode 100644 index 000000000..06a33d1f6 --- /dev/null +++ b/web/api/tests/test_ip.py @@ -0,0 +1,197 @@ +""" +This file contains the test cases for the API views. +""" + +from unittest.mock import patch +from django.urls import reverse +from rest_framework import status +from utils.test_base import BaseTestCase +import socket + +__all__ = [ + 'TestIpAddressViewSet', + 'TestIPToDomain', + 'TestDomainIPHistory', + 'TestListIPs', + 'TestListPorts', + 'TestWhois', + 'TestReverseWhois' +] + +class TestIpAddressViewSet(BaseTestCase): + """Test case for IP address viewset.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + + def test_ip_address_viewset(self): + """Test retrieving IP addresses for a scan.""" + url = reverse("api:ip-addresses-list") + response = self.client.get( + url, {"scan_id": self.data_generator.scan_history.id} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertGreaterEqual(len(response.data), 1) + self.assertEqual( + response.data["results"][0]["ip_addresses"][0]["address"], + self.data_generator.ip_address.address, + ) + +class TestIPToDomain(BaseTestCase): + """Test case for IP to domain resolution.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + + @patch("api.views.socket.gethostbyaddr") + def test_ip_to_domain(self, mock_gethostbyaddr): + """Test resolving an IP address to a domain name.""" + mock_gethostbyaddr.return_value = ( + self.data_generator.domain.name, + [self.data_generator.domain.name], + [self.data_generator.subdomain.ip_addresses.first().address], + ) + url = reverse("api:ip_to_domain") + response = self.client.get( + url, + {"ip_address": self.data_generator.subdomain.ip_addresses.first().address}, + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + self.assertEqual( + response.data["ip_address"][0]["domain"], self.data_generator.domain.name + ) + + @patch("api.views.socket.gethostbyaddr") + def test_ip_to_domain_failure(self, mock_gethostbyaddr): + """Test IP to domain resolution when it fails.""" + mock_gethostbyaddr.side_effect = socket.herror + url = reverse("api:ip_to_domain") + response = self.client.get(url, {"ip_address": "192.0.2.1"}) + self.assertEqual(response.status_code, 200) + self.assertTrue(response.data["status"]) + self.assertEqual(response.data["ip_address"][0]["domain"], "192.0.2.1") + + @patch("api.views.socket.gethostbyaddr") + def test_ip_to_domain_multiple(self, mock_gethostbyaddr): + """Test IP to domain resolution with multiple domains.""" + mock_domains = ["example.com", "example.org"] + mock_gethostbyaddr.return_value = (mock_domains[0], mock_domains, ["192.0.2.1"]) + url = reverse("api:ip_to_domain") + response = self.client.get(url, {"ip_address": "192.0.2.1"}) + self.assertEqual(response.status_code, 200) + self.assertIn("domains", response.data["ip_address"][0]) + self.assertEqual(response.data["ip_address"][0]["domains"], mock_domains) + +class TestDomainIPHistory(BaseTestCase): + """Test case for domain IP history lookup.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + + @patch("api.views.query_ip_history.apply_async") + def test_domain_ip_history(self, mock_apply_async): + """Test domain IP history lookup.""" + mock_apply_async.return_value.wait.return_value = { + "status": True, + "data": "IP History data", + } + url = reverse("api:domain_ip_history") + response = self.client.get(url, {"domain": self.data_generator.domain.name}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + self.assertEqual(response.data["data"], "IP History data") + +class TestListIPs(BaseTestCase): + """Test case for listing IP addresses.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + + def test_list_ips(self): + """Test listing IP addresses for a target.""" + url = reverse("api:listIPs") + response = self.client.get(url, {"target_id": self.data_generator.domain.id}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn("ips", response.data) + self.assertGreaterEqual(len(response.data["ips"]), 1) + self.assertEqual( + response.data["ips"][0]["address"], self.data_generator.ip_address.address + ) + +class TestListPorts(BaseTestCase): + """Test case for listing ports.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + + def test_list_ports(self): + """Test listing ports for a target and scan.""" + url = reverse("api:listPorts") + response = self.client.get( + url, + { + "target_id": self.data_generator.domain.id, + "scan_id": self.data_generator.scan_history.id, + "ip_address": "1.1.1.1", + }, + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn("ports", response.data) + self.assertGreaterEqual(len(response.data["ports"]), 1) + self.assertEqual(response.data["ports"][0]["number"], 80) + self.assertEqual(response.data["ports"][0]["service_name"], "http") + +class TestWhois(BaseTestCase): + """Test case for WHOIS lookup.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + + @patch("api.views.query_whois.apply_async") + def test_whois(self, mock_apply_async): + """Test WHOIS lookup for a domain.""" + mock_apply_async.return_value.wait.return_value = { + "status": True, + "data": "Whois data", + } + url = reverse("api:whois") + response = self.client.get(url, {"ip_domain": self.data_generator.domain.name}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + self.assertEqual(response.data["data"], "Whois data") + +class TestReverseWhois(BaseTestCase): + """Test case for Reverse WHOIS lookup.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + + @patch("api.views.query_reverse_whois.apply_async") + def test_reverse_whois(self, mock_apply_async): + """Test Reverse WHOIS lookup for a domain.""" + mock_apply_async.return_value.wait.return_value = { + "status": True, + "data": "Reverse Whois data", + } + url = reverse("api:reverse_whois") + response = self.client.get( + url, {"lookup_keyword": self.data_generator.domain.name} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + self.assertEqual(response.data["data"], "Reverse Whois data") diff --git a/web/api/tests/test_organization.py b/web/api/tests/test_organization.py new file mode 100644 index 000000000..2649607f5 --- /dev/null +++ b/web/api/tests/test_organization.py @@ -0,0 +1,81 @@ +""" +This file contains the test cases for the API views. +""" + +from django.urls import reverse +from rest_framework import status +from utils.test_base import BaseTestCase +from targetApp.models import Organization + +__all__ = [ + 'TestListOrganizations', + 'TestListTargetsInOrganization', + 'TestListTargetsWithoutOrganization' +] + +class TestListOrganizations(BaseTestCase): + """Test case for listing organizations.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + + def test_list_empty_organizations(self): + """Test listing organizations when the database is empty.""" + Organization.objects.all().delete() + url = reverse("api:listOrganizations") + response = self.client.get(url) + self.assertEqual(response.status_code, 200) + self.assertEqual(len(response.json()['organizations']), 0) + + def test_list_organizations(self): + """Test listing all organizations.""" + url = reverse("api:listOrganizations") + response = self.client.get(url) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn("organizations", response.data) + self.assertGreaterEqual(len(response.data["organizations"]), 1) + self.assertEqual( + response.data["organizations"][0]["name"], + self.data_generator.organization.name, + ) + +class TestListTargetsInOrganization(BaseTestCase): + """Test case for listing targets in an organization.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + + def test_list_targets_in_organization(self): + """Test listing targets for a specific organization.""" + url = reverse("api:queryTargetsInOrganization") + response = self.client.get( + url, {"organization_id": self.data_generator.organization.id} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn("organization", response.data) + self.assertIn("domains", response.data) + self.assertGreaterEqual(len(response.data["domains"]), 1) + self.assertEqual( + response.data["domains"][0]["name"], self.data_generator.domain.name + ) + +class TestListTargetsWithoutOrganization(BaseTestCase): + """Test case for listing targets without an organization.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + + def test_list_targets_without_organization(self): + """Test listing targets that are not associated with any organization.""" + url = reverse("api:queryTargetsWithoutOrganization") + response = self.client.get(url) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn("domains", response.data) + self.assertGreaterEqual(len(response.data["domains"]), 1) + self.assertEqual(response.data["domains"][0]["name"], "vulnweb.com") diff --git a/web/api/tests/test_osint.py b/web/api/tests/test_osint.py new file mode 100644 index 000000000..8bd6ec3e0 --- /dev/null +++ b/web/api/tests/test_osint.py @@ -0,0 +1,156 @@ +""" +This file contains the test cases for the API views. +""" + +from django.urls import reverse +from rest_framework import status +from utils.test_base import BaseTestCase + +__all__ = [ + 'TestListDorkTypes', + 'TestListEmails', + 'TestListDorks', + 'TestListEmployees', + 'TestListOsintUsers', + 'TestListMetadata' +] + +class TestListDorkTypes(BaseTestCase): + """Test case for listing dork types.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + + def test_list_dork_types(self): + """Test listing dork types for a scan.""" + url = reverse("api:queryDorkTypes") + response = self.client.get( + url, {"scan_id": self.data_generator.scan_history.id} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn("dorks", response.data) + self.assertGreaterEqual(len(response.data["dorks"]), 1) + self.assertEqual( + response.data["dorks"][0]["type"], self.data_generator.dork.type + ) + +class TestListEmails(BaseTestCase): + """Test case for listing emails.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + + def test_list_emails(self): + """Test listing emails for a scan.""" + url = reverse("api:queryEmails") + response = self.client.get( + url, {"scan_id": self.data_generator.scan_history.id} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn("emails", response.data) + self.assertGreaterEqual(len(response.data["emails"]), 1) + self.assertEqual( + response.data["emails"][0]["address"], self.data_generator.email.address + ) + +class TestListDorks(BaseTestCase): + """Test case for listing dorks.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + + def test_list_dorks(self): + """Test listing dorks for a scan.""" + url = reverse("api:queryDorks") + response = self.client.get( + url, {"scan_id": self.data_generator.scan_history.id} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn("dorks", response.data) + self.assertIn("Test Dork", response.data["dorks"]) + self.assertGreaterEqual(len(response.data["dorks"]["Test Dork"]), 1) + self.assertEqual( + response.data["dorks"]["Test Dork"][0]["type"], + self.data_generator.dork.type, + ) + +class TestListEmployees(BaseTestCase): + """Test case for listing employees.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + + def test_list_employees(self): + """Test listing employees for a scan.""" + url = reverse("api:queryEmployees") + response = self.client.get( + url, {"scan_id": self.data_generator.scan_history.id} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn("employees", response.data) + self.assertGreaterEqual(len(response.data["employees"]), 1) + self.assertEqual( + response.data["employees"][0]["name"], self.data_generator.employee.name + ) + +class TestListOsintUsers(BaseTestCase): + """Test case for listing OSINT users.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + self.data_generator.create_metafinder_document() + + def test_list_osint_users(self): + """Test listing OSINT users for a scan.""" + url = reverse("api:queryMetadata") + response = self.client.get( + url, {"scan_id": self.data_generator.scan_history.id} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn("metadata", response.data) + self.assertGreaterEqual(len(response.data["metadata"]), 1) + self.assertEqual( + response.data["metadata"][0]["author"], + self.data_generator.metafinder_document.author, + ) + +class TestListMetadata(BaseTestCase): + """Test case for listing metadata.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + self.data_generator.create_metafinder_document() + + def test_list_metadata(self): + """Test listing metadata for a scan.""" + url = reverse("api:queryMetadata") + response = self.client.get( + url, {"scan_id": self.data_generator.scan_history.id} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn("metadata", response.data) + self.assertGreaterEqual(len(response.data["metadata"]), 1) + self.assertEqual( + response.data["metadata"][0]["doc_name"], + self.data_generator.metafinder_document.doc_name, + ) + self.assertEqual( + response.data["metadata"][0]["url"], + self.data_generator.metafinder_document.url, + ) + self.assertEqual( + response.data["metadata"][0]["title"], + self.data_generator.metafinder_document.title, + ) diff --git a/web/api/tests/test_project.py b/web/api/tests/test_project.py new file mode 100644 index 000000000..3522fcadb --- /dev/null +++ b/web/api/tests/test_project.py @@ -0,0 +1,131 @@ +""" +This file contains the test cases for the API views. +""" + +from unittest.mock import patch +from django.utils import timezone +from django.urls import reverse +from rest_framework import status +from utils.test_base import BaseTestCase + +__all__ = [ + 'TestCreateProjectApi', + 'TestAddReconNote', + 'TestListTodoNotes', + 'TestGPTAttackSuggestion' +] + +class TestCreateProjectApi(BaseTestCase): + """Tests for the Create Project API.""" + + def test_create_project_success(self): + """Test successful project creation.""" + api_url = reverse("api:create_project") + response = self.client.get( + api_url, + { + "name": "New Project", + "insert_date": timezone.now(), + "slug": "new-project", + }, + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + self.assertEqual(response.data["project_name"], "New Project") + + def test_create_project_failure(self): + """Test project creation failure when no name is provided.""" + api_url = reverse("api:create_project") + response = self.client.get(api_url) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertFalse(response.data["status"]) + +class TestAddReconNote(BaseTestCase): + """Test case for the Add Recon Note API.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + + def test_add_recon_note(self): + """Test adding a recon note.""" + api_url = reverse("api:addReconNote") + data = { + "subdomain_id": self.data_generator.subdomain.id, + "scan_history_id": self.data_generator.scan_history.id, + "title": "Test Note", + "description": "This is a test note", + "project": self.data_generator.project.slug, + } + response = self.client.post(api_url, data) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + + def test_add_recon_note_missing_data(self): + """Test adding a recon note with missing data.""" + api_url = reverse("api:addReconNote") + data = {"title": "Test Note", "slug": "test-project"} + response = self.client.post(api_url, data) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertFalse(response.data["status"]) + +class TestListTodoNotes(BaseTestCase): + """Test case for listing todo notes.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + self.data_generator.create_todo_note() + + def test_list_todo_notes(self): + """Test listing todo notes for a project.""" + url = reverse("api:listTodoNotes") + response = self.client.get(url, {"project": self.data_generator.project.slug}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertGreaterEqual(len(response.data["notes"]), 1) + self.assertEqual( + response.data["notes"][0]["id"], self.data_generator.todo_note.id + ) + self.assertEqual( + response.data["notes"][0]["title"], self.data_generator.todo_note.title + ) + self.assertEqual( + response.data["notes"][0]["description"], + self.data_generator.todo_note.description, + ) + self.assertEqual( + response.data["notes"][0]["project"], + self.data_generator.todo_note.project.id, + ) + self.assertEqual( + response.data["notes"][0]["subdomain"], + self.data_generator.todo_note.subdomain.id, + ) + self.assertEqual( + response.data["notes"][0]["scan_history"], + self.data_generator.todo_note.scan_history.id, + ) + +class TestGPTAttackSuggestion(BaseTestCase): + """Tests for the GPT Attack Suggestion API.""" + + def setUp(self): + super().setUp() + self.data_generator.create_project_base() + + @patch("reNgine.gpt.GPTAttackSuggestionGenerator.get_attack_suggestion") + def test_get_attack_suggestion(self, mock_get_suggestion): + """Test getting an attack suggestion for a subdomain.""" + mock_get_suggestion.return_value = { + "status": True, + "description": "Test attack suggestion", + } + api_url = reverse("api:gpt_get_possible_attacks") + response = self.client.get( + api_url, {"subdomain_id": self.data_generator.subdomain.id} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + self.assertEqual(response.data["description"], "Test attack suggestion") diff --git a/web/api/tests/test_scan.py b/web/api/tests/test_scan.py new file mode 100644 index 000000000..a9e8994d7 --- /dev/null +++ b/web/api/tests/test_scan.py @@ -0,0 +1,306 @@ +""" +This file contains the test cases for the API views. +""" +import json +from unittest.mock import patch +from django.urls import reverse +from rest_framework import status +from utils.test_base import BaseTestCase + +__all__ = [ + 'TestScanStatus', + 'TestListScanHistory', + 'TestListActivityLogsViewSet', + 'TestListScanLogsViewSet', + 'TestStopScan', + 'TestInitiateSubTask', + 'TestListEngines', + 'TestVisualiseData', + 'TestListTechnology', + 'TestDirectoryViewSet', + 'TestListSubScans', + 'TestFetchSubscanResults', + 'TestListInterestingKeywords' +] + +class TestScanStatus(BaseTestCase): + """Test case for checking scan status.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + + def test_scan_status(self): + """Test checking the status of a scan.""" + url = reverse("api:scan_status") + response = self.client.get(url, {"project": self.data_generator.project.slug}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn("scans", response.data) + self.assertIn("tasks", response.data) + self.assertIsInstance(response.data["scans"], dict) + self.assertIsInstance(response.data["tasks"], dict) + if response.data["scans"]: + self.assertIn("id", response.data["scans"]["completed"][0]) + self.assertIn("scan_status", response.data["scans"]["completed"][0]) + +class TestListScanHistory(BaseTestCase): + """Test case for listing scan history.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + + def test_list_scan_history(self): + """Test listing scan history for a project.""" + url = reverse("api:listScanHistory") + response = self.client.get(url, {"project": self.data_generator.project.slug}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertGreaterEqual(len(response.data), 1) + self.assertEqual(response.data[0]["id"], self.data_generator.scan_history.id) + +class TestListActivityLogsViewSet(BaseTestCase): + """Tests for the ListActivityLogsViewSet.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + self.data_generator.create_scan_history() + self.data_generator.create_scan_activity() + self.data_generator.create_command() + + def test_get_queryset(self): + """Test retrieving activity logs.""" + url = reverse('api:activity-logs-list') + response = self.client.get(url, {'activity_id': self.data_generator.scan_activity.id}) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn('results', response.data) + self.assertGreaterEqual(len(response.data['results']), 1) + self.assertEqual(response.data['results'][0]['command'], self.data_generator.command.command) + + def test_get_queryset_no_logs(self): + """Test retrieving activity logs when there are none.""" + non_existent_activity_id = 9999 # An ID that doesn't exist + url = reverse('api:activity-logs-list') + response = self.client.get(url, {'activity_id': non_existent_activity_id}) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn('results', response.data) + self.assertEqual(len(response.data['results']), 0) + +class TestListScanLogsViewSet(BaseTestCase): + """Tests for the ListScanLogsViewSet class.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + + def test_list_scan_logs(self): + """Test retrieving scan logs.""" + url = reverse("api:scan-logs-list") + response = self.client.get( + url, {"scan_id": self.data_generator.scan_history.id} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn("results", response.data) + +class TestStopScan(BaseTestCase): + """Tests for the StopScan class.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + + @patch("api.views.StopScan") + def test_stop_scan(self, mock_stop_scan): + """Test stopping a scan.""" + mock_stop_scan.return_value = True + url = reverse("api:stop_scan") + data = {"scan_id": self.data_generator.scan_history.id} + response = self.client.post(url, data) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + +class TestInitiateSubTask(BaseTestCase): + """Tests for the InitiateSubTask class.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + + @patch("api.views.initiate_subscan") + def test_initiate_subtask(self, mock_initiate_subscan): + """Test initiating a subtask.""" + mock_initiate_subscan.return_value = True + url = reverse("api:initiate_subscan") + data = { + "subdomain_ids": [self.data_generator.subdomain.id,self.data_generator.subdomain.id], + "tasks": ['httpcrawl','osint'], + "engine_id": "1", + } + response = self.client.post(url, data=json.dumps(data), content_type='application/json') + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + +class TestListEngines(BaseTestCase): + """Test case for listing engines.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + + def test_list_engines(self): + """Test listing all available engines.""" + url = reverse("api:listEngines") + response = self.client.get(url) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn("engines", response.data) + self.assertGreaterEqual(len(response.data["engines"]), 1) + + + + +class TestVisualiseData(BaseTestCase): + """Test case for visualising scan data.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + + def test_visualise_data(self): + """Test retrieving visualisation data for a scan.""" + url = reverse("api:queryAllScanResultVisualise") + response = self.client.get( + url, {"scan_id": self.data_generator.scan_history.id} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertGreaterEqual(len(response.data), 1) + self.assertEqual(response.data["description"], self.data_generator.domain.name) + + +class TestListTechnology(BaseTestCase): + """Test case for listing technologies.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + + def test_list_technology(self): + """Test listing technologies for a target.""" + url = reverse("api:listTechnologies") + response = self.client.get(url, {"target_id": self.data_generator.domain.id}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn("technologies", response.data) + self.assertGreaterEqual(len(response.data["technologies"]), 1) + self.assertEqual( + response.data["technologies"][0]["name"], + self.data_generator.technology.name, + ) + +class TestDirectoryViewSet(BaseTestCase): + """Tests for the Directory ViewSet API.""" + + def setUp(self): + super().setUp() + self.data_generator.create_project_base() + self.data_generator.create_directory_scan() + self.data_generator.create_directory_file() + self.data_generator.directory_scan.directory_files.add( + self.data_generator.directory_file + ) + self.data_generator.subdomain.directories.add( + self.data_generator.directory_scan + ) + + def test_get_directory_files(self): + """Test retrieving directory files.""" + api_url = reverse("api:directories-list") + response = self.client.get( + api_url, {"scan_history": self.data_generator.scan_history.id} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn('results', response.data) + self.assertGreaterEqual(len(response.data["results"]), 1) + self.assertEqual( + response.data["results"][0]["name"], self.data_generator.directory_file.name + ) + + def test_get_directory_files_by_subdomain(self): + """Test retrieving directory files by subdomain.""" + api_url = reverse("api:directories-list") + response = self.client.get( + api_url, {"subdomain_id": self.data_generator.subdomain.id} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn('results', response.data) + self.assertGreaterEqual(len(response.data["results"]), 1) + self.assertEqual( + response.data["results"][0]["name"], self.data_generator.directory_file.name + ) + +class TestListSubScans(BaseTestCase): + """Test case for listing subscans.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + self.subscans = self.data_generator.create_subscan() + self.subscans[-1].celery_ids = ["test_celery_id"] + self.subscans[-1].save() + + def test_list_subscans(self): + """Test listing all subscans.""" + api_url = reverse("api:listSubScans") + response = self.client.post( + api_url, {"scan_history_id": self.data_generator.scan_history.id} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn('results', response.data) + self.assertTrue(response.data["status"]) + self.assertGreaterEqual(len(response.data["results"]), 1) + + # Test if the created subscan is in the results + found_subscan = next((s for s in response.data["results"] if s["celery_ids"][0] == "test_celery_id"), None) + self.assertIsNotNone(found_subscan, "Le subscan créé n'a pas été trouvé dans les résultats") + self.assertEqual(found_subscan["id"], self.subscans[-1].id) + +class TestFetchSubscanResults(BaseTestCase): + """Test case for fetching subscan results.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + self.data_generator.create_subscan() + + def test_fetch_subscan_results(self): + """Test fetching results of a subscan.""" + api_url = reverse("api:fetch_subscan_results") + response = self.client.get( + api_url, {"subscan_id": self.data_generator.subscans[-1].id} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn("subscan", response.data) + self.assertIn("result", response.data) + +class TestListInterestingKeywords(BaseTestCase): + """Tests for listing interesting keywords.""" + + @patch("api.views.get_lookup_keywords") + def test_list_interesting_keywords(self, mock_get_keywords): + """Test listing interesting keywords.""" + mock_get_keywords.return_value = ["keyword1", "keyword2"] + api_url = reverse("api:listInterestingKeywords") + response = self.client.get(api_url) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data, ["keyword1", "keyword2"]) diff --git a/web/api/tests/test_search.py b/web/api/tests/test_search.py new file mode 100644 index 000000000..6ed841176 --- /dev/null +++ b/web/api/tests/test_search.py @@ -0,0 +1,73 @@ +""" +This file contains the test cases for the API views. +""" + +from django.urls import reverse +from rest_framework import status +from utils.test_base import BaseTestCase + +__all__ = [ + 'TestSearchHistoryView', + 'TestUniversalSearch' +] + +class TestSearchHistoryView(BaseTestCase): + """Tests for the Search History API.""" + + def setUp(self): + super().setUp() + self.data_generator.create_search_history() + + def test_get_search_history(self): + """Test retrieving search history.""" + api_url = reverse("api:search_history") + response = self.client.get(api_url) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + self.assertGreaterEqual(len(response.data["results"]), 1) + self.assertEqual( + response.data["results"][0]["query"], + self.data_generator.search_history.query, + ) + +class TestUniversalSearch(BaseTestCase): + """Test case for the Universal Search API.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + self.data_generator.create_endpoint() + self.data_generator.create_vulnerability() + + def test_universal_search(self): + """Test the universal search functionality.""" + api_url = reverse("api:search") + response = self.client.get(api_url, {"query": "admin"}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + self.assertIn( + "admin.example.com", + [sub["name"] for sub in response.data["results"]["subdomains"]], + ) + self.assertIn( + "https://admin.example.com/endpoint", + [ep["http_url"] for ep in response.data["results"]["endpoints"]], + ) + + def test_universal_search_no_query(self): + """Test the universal search with no query parameter.""" + api_url = reverse("api:search") + response = self.client.get(api_url) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertFalse(response.data["status"]) + self.assertEqual(response.data["message"], "No query parameter provided!") + + def test_universal_search_with_special_characters(self): + """Test the universal search functionality with special characters.""" + api_url = reverse("api:search") + special_query = "admin'; DROP TABLE users;--" + response = self.client.get(api_url, {"query": special_query}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertFalse(response.data["status"]) + self.assertNotIn("users", response.data["results"]) diff --git a/web/api/tests/test_subdomain.py b/web/api/tests/test_subdomain.py new file mode 100644 index 000000000..0ceebb9c4 --- /dev/null +++ b/web/api/tests/test_subdomain.py @@ -0,0 +1,222 @@ +""" +This file contains the test cases for the API views. +""" + +from django.urls import reverse +from rest_framework import status +from startScan.models import Subdomain +from utils.test_base import BaseTestCase + +__all__ = [ + 'TestQueryInterestingSubdomains', + 'TestDeleteSubdomain', + 'TestListSubdomains', + 'TestSubdomainsViewSet', + 'TestSubdomainChangesViewSet', + 'TestToggleSubdomainImportantStatus', + 'TestSubdomainDatatableViewSet', + 'TestInterestingSubdomainViewSet' +] + +class TestQueryInterestingSubdomains(BaseTestCase): + """Tests for querying interesting subdomains.""" + + def setUp(self): + super().setUp() + self.data_generator.create_project_base() + self.data_generator.create_interesting_lookup_model() + + def test_query_interesting_subdomains(self): + """Test querying interesting subdomains for a given sca + n.""" + api_url = reverse("api:queryInterestingSubdomains") + response = self.client.get( + api_url, {"scan_id": self.data_generator.scan_history.id} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn("admin.example.com", [sub["name"] for sub in response.data]) + +class TestDeleteSubdomain(BaseTestCase): + """Tests for deleting subdomains.""" + + def setUp(self): + super().setUp() + self.data_generator.create_project_base() + + def test_delete_subdomain(self): + """Test deleting a subdomain.""" + api_url = reverse("api:delete_subdomain") + data = {"subdomain_ids": [str(self.data_generator.subdomain.id)]} + response = self.client.post(api_url, data) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + self.assertFalse( + Subdomain.objects.filter(id=self.data_generator.subdomain.id).exists() + ) + + def test_delete_nonexistent_subdomain(self): + """Test deleting a non-existent subdomain.""" + api_url = reverse("api:delete_subdomain") + data = {"subdomain_ids": ["nonexistent_id"]} + response = self.client.post(api_url, data) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + +class TestListSubdomains(BaseTestCase): + """Test case for listing subdomains.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + + def test_list_subdomains(self): + """Test listing subdomains for a target.""" + url = reverse("api:querySubdomains") + response = self.client.get(url, {"target_id": self.data_generator.domain.id}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn("subdomains", response.data) + self.assertGreaterEqual(len(response.data["subdomains"]), 1) + self.assertEqual( + response.data["subdomains"][0]["name"], self.data_generator.subdomain.name + ) + +class TestSubdomainsViewSet(BaseTestCase): + """Test case for subdomains viewset.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + + def test_subdomains_viewset(self): + """Test retrieving subdomains for a scan.""" + url = reverse("api:subdomains-list") + response = self.client.get( + url, {"scan_id": self.data_generator.scan_history.id} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertGreaterEqual(len(response.data), 1) + self.assertEqual( + response.data["results"][0]["name"], self.data_generator.subdomain.name + ) + +class TestSubdomainChangesViewSet(BaseTestCase): + """Test case for subdomain changes viewset.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + self.data_generator.create_scan_history() + self.data_generator.create_subdomain("admin1.example.com") + + def test_subdomain_changes_viewset(self): + """Test retrieving subdomain changes for a scan.""" + url = reverse("api:subdomain-changes-list") + response = self.client.get( + url, {"scan_id": self.data_generator.scan_history.id, "changes": "added"} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertGreaterEqual(len(response.data), 1) + self.assertEqual( + response.data["results"][0]["name"], self.data_generator.subdomain.name + ) + self.assertEqual(response.data["results"][0]["change"], "added") + +class TestToggleSubdomainImportantStatus(BaseTestCase): + """Test case for toggling subdomain important status.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + + def test_toggle_subdomain_important_status(self): + """Test toggling the important status of a subdomain.""" + api_url = reverse("api:toggle_subdomain") + initial_status = self.data_generator.subdomain.is_important + response = self.client.post( + api_url, {"subdomain_id": self.data_generator.subdomain.id} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + self.data_generator.subdomain.refresh_from_db() + self.assertNotEqual(initial_status, self.data_generator.subdomain.is_important) + +class TestSubdomainDatatableViewSet(BaseTestCase): + """Tests for the Subdomain Datatable ViewSet API.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + + def test_list_subdomains(self): + """Test listing subdomains.""" + api_url = reverse("api:subdomain-datatable-list") + response = self.client.get( + api_url, {"project": self.data_generator.project.slug} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertGreaterEqual(len(response.data["results"]), 1) + self.assertEqual( + response.data["results"][0]["name"], self.data_generator.subdomain.name + ) + + def test_list_subdomains_by_domain(self): + """Test listing subdomains by domain.""" + api_url = reverse("api:subdomain-datatable-list") + response = self.client.get( + api_url, + { + "target_id": self.data_generator.domain.id, + "project": self.data_generator.project.slug, + }, + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertGreaterEqual(len(response.data["results"]), 1) + self.assertEqual( + response.data["results"][0]["name"], self.data_generator.subdomain.name + ) + +class TestInterestingSubdomainViewSet(BaseTestCase): + """Test case for the Interesting Subdomain ViewSet API.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + self.data_generator.create_interesting_lookup_model() + + def test_list_interesting_subdomains(self): + """Test listing interesting subdomains.""" + api_url = reverse("api:interesting-subdomains-list") + response = self.client.get( + api_url, + { + "project": self.data_generator.project.slug, + "scan_id": self.data_generator.scan_history.id, + }, + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertGreaterEqual(len(response.data["results"]), 1) + self.assertEqual( + response.data["results"][0]["name"], self.data_generator.subdomain.name + ) + + def test_list_interesting_subdomains_by_domain(self): + """Test listing interesting subdomains by domain.""" + api_url = reverse("api:interesting-subdomains-list") + response = self.client.get( + api_url, + { + "target_id": self.data_generator.domain.id, + "project": self.data_generator.project.slug, + "scan_id": self.data_generator.scan_history.id, + }, + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertGreaterEqual(len(response.data["results"]), 1) + self.assertEqual( + response.data["results"][0]["name"], self.data_generator.subdomain.name + ) diff --git a/web/api/tests/test_target.py b/web/api/tests/test_target.py new file mode 100644 index 000000000..ac9f9a267 --- /dev/null +++ b/web/api/tests/test_target.py @@ -0,0 +1,72 @@ +""" +This file contains the test cases for the API views. +""" + +from django.urls import reverse +from rest_framework import status +from targetApp.models import Domain +from utils.test_base import BaseTestCase + +__all__ = [ + 'TestAddTarget', + 'TestListTargetsDatatableViewSet' +] + +class TestAddTarget(BaseTestCase): + """Test case for adding a target.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + Domain.objects.all().delete() + + def test_add_target(self): + """Test adding a new target.""" + api_url = reverse("api:addTarget") + data = { + "domain_name": "example.com", + "h1_team_handle": "team_handle", + "description": "Test description", + "organization": "Test Org", + "slug": self.data_generator.project.slug, + } + response = self.client.post(api_url, data) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + self.assertEqual(response.data["domain_name"], self.data_generator.domain.name) + self.assertTrue( + Domain.objects.filter(name=self.data_generator.domain.name).exists() + ) + + # Test adding duplicate target + response = self.client.post(api_url, data) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertFalse(response.data["status"]) + +class TestListTargetsDatatableViewSet(BaseTestCase): + """Tests for the List Targets Datatable API.""" + + def setUp(self): + super().setUp() + self.data_generator.create_project_base() + + def test_list_targets(self): + """Test listing targets.""" + api_url = reverse("api:targets-list") + response = self.client.get(api_url) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertGreaterEqual(len(response.data["results"]), 1) + self.assertEqual( + response.data["results"][0]["name"], self.data_generator.domain.name + ) + + def test_list_targets_with_slug(self): + """Test listing targets with project slug.""" + api_url = reverse("api:targets-list") + response = self.client.get(api_url, {"slug": "test-project"}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertGreaterEqual(len(response.data["results"]), 1) + self.assertEqual( + response.data["results"][0]["name"], self.data_generator.domain.name + ) diff --git a/web/api/tests/test_tools.py b/web/api/tests/test_tools.py new file mode 100644 index 000000000..60bef6a10 --- /dev/null +++ b/web/api/tests/test_tools.py @@ -0,0 +1,285 @@ +""" +This file contains the test cases for the API views. +""" + +from unittest.mock import patch +from django.urls import reverse +from rest_framework import status +from startScan.models import SubScan +from utils.test_base import BaseTestCase + +__all__ = [ + 'TestOllamaManager', + 'TestWafDetector', + 'TestCMSDetector', + 'TestGfList', + 'TestUpdateTool', + 'TestUninstallTool', + 'TestGetExternalToolCurrentVersion', + 'TestRengineUpdateCheck', + 'TestGithubToolCheckGetLatestRelease', + 'TestGetFileContents', + 'TestDeleteMultipleRows' +] + +class TestOllamaManager(BaseTestCase): + """Tests for the OllamaManager API endpoints.""" + + @patch("requests.post") + def test_get_download_model(self, mock_post): + """Test downloading an Ollama model.""" + mock_post.return_value.json.return_value = {"status": "success"} + api_url = reverse("api:ollama_manager") + response = self.client.get(api_url, data={"model": "llama2"}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + + @patch("requests.post") + def test_get_download_model_failure(self, mock_post): + """Test failed downloading of an Ollama model.""" + mock_post.return_value.json.return_value = {"error": "pull model manifest: file does not exist"} + api_url = reverse("api:ollama_manager") + response = self.client.get(api_url, data={"model": "invalid-model"}) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertEqual(response.data["message"], "pull model manifest: file does not exist") + self.assertFalse(response.data["status"]) + + @patch("requests.delete") + def test_delete_model(self, mock_delete): + """Test deleting an Ollama model.""" + mock_delete.return_value.json.return_value = {"status": "success"} + api_url = reverse("api:ollama_manager") + response = self.client.delete( + api_url, data={"model": "gpt-4"}, content_type="application/json" + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + + def test_put_update_model(self): + """Test updating the selected Ollama model.""" + api_url = reverse("api:ollama_manager") + response = self.client.put( + api_url, data={"model": "gpt-4"}, content_type="application/json" + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + +class TestWafDetector(BaseTestCase): + """Tests for the WAF Detector API.""" + + @patch("api.views.run_wafw00f") + def test_waf_detection_success(self, mock_run_wafw00f): + """Test successful WAF detection.""" + mock_run_wafw00f.delay.return_value.get.return_value = ( + "WAF Detected: CloudFlare" + ) + api_url = reverse("api:waf_detector") + response = self.client.get(api_url, {"url": "https://www.cloudflare.com"}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + self.assertEqual(response.data["results"], "WAF Detected: CloudFlare") + + @patch("api.views.run_wafw00f") + def test_waf_detection_no_waf(self, mock_run_wafw00f): + """Test WAF detection when no WAF is detected.""" + mock_run_wafw00f.delay.return_value.get.return_value = "No WAF detected" + api_url = reverse("api:waf_detector") + response = self.client.get(api_url, {"url": "https://www.cloudflare.com"}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertFalse(response.data["status"]) + self.assertEqual(response.data["message"], "Could not detect any WAF!") + + def test_waf_detection_missing_url(self): + """Test WAF detection with missing URL parameter.""" + api_url = reverse("api:waf_detector") + response = self.client.get(api_url) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertFalse(response.data["status"]) + self.assertEqual(response.data["message"], "URL parameter is missing") + +class TestCMSDetector(BaseTestCase): + """Test case for CMS detection functionality.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + + @patch("api.views.run_cmseek.delay") + def test_cms_detector(self, mock_run_cmseek): + """Test CMS detection for a given URL.""" + mock_run_cmseek.return_value.get.return_value = { + "status": True, + "cms": "WordPress", + } + url = reverse("api:cms_detector") + response = self.client.get(url, {"url": self.data_generator.domain.name}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + self.assertEqual(response.data["cms"], "WordPress") + +class TestGfList(BaseTestCase): + """Test case for retrieving GF patterns.""" + + @patch("api.views.run_gf_list.delay") + def test_gf_list(self, mock_run_gf_list): + """Test retrieving a list of GF patterns.""" + mock_run_gf_list.return_value.get.return_value = { + "status": True, + "output": ["pattern1", "pattern2"], + } + url = reverse("api:gf_list") + response = self.client.get(url) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data, ["pattern1", "pattern2"]) + +class TestUpdateTool(BaseTestCase): + """Test case for updating a tool.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_installed_external_tool() + + @patch("api.views.run_command") + def test_update_tool(self, mock_run_command): + """Test updating a tool.""" + api_url = reverse("api:update_tool") + response = self.client.get( + api_url, {"tool_id": self.data_generator.installed_external_tool.id} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + mock_run_command.assert_called() + mock_run_command.apply_async.assert_called_once() + +class TestUninstallTool(BaseTestCase): + """Tests for the UninstallTool class.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_installed_external_tool() + + @patch("api.views.UninstallTool") + def test_uninstall_tool(self, mock_uninstall_tool): + """Test uninstalling a tool.""" + mock_uninstall_tool.return_value = True + url = reverse("api:uninstall_tool") + data = {"tool_id": self.data_generator.installed_external_tool.id} + response = self.client.get(url, data) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + +class TestGetExternalToolCurrentVersion(BaseTestCase): + """Test case for getting the current version of an external tool.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.tool = self.data_generator.create_installed_external_tool() + self.tool.version_lookup_command = "echo 'v1.0.0'" + self.tool.version_match_regex = r"v\d+\.\d+\.\d+" + self.tool.save() + + @patch("api.views.run_command") + def test_get_external_tool_current_version(self, mock_run_command): + """Test getting the current version of an external tool.""" + mock_run_command.return_value = (None, "v1.0.0") + url = reverse("api:external_tool_get_current_release") + response = self.client.get(url, {"tool_id": self.tool.id}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + self.assertEqual(response.data["version_number"], "v1.0.0") + self.assertEqual(response.data["tool_name"], self.tool.name) + +class TestRengineUpdateCheck(BaseTestCase): + """Tests for checking reNgine updates.""" + + @patch("requests.get") + def test_rengine_update_check(self, mock_get): + """Test checking for reNgine updates.""" + mock_get.return_value.json.return_value = [ + {"name": "v2.0.0", "body": "Changelog"} + ] + api_url = reverse("api:check_rengine_update") + response = self.client.get(api_url) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + self.assertIn("latest_version", response.data) + self.assertIn("current_version", response.data) + self.assertIn("update_available", response.data) + +class TestGithubToolCheckGetLatestRelease(BaseTestCase): + """Test case for checking the latest release of a GitHub tool.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.tool = self.data_generator.create_installed_external_tool() + self.tool.github_url = "https://github.com/example/tool" + self.tool.save() + + @patch("api.views.requests.get") + def test_github_tool_check_get_latest_release(self, mock_get): + """Test checking the latest release of a GitHub tool.""" + mock_get.return_value.json.return_value = [ + { + "url": "https://api.github.com/repos/example/tool/releases/1", + "id": 1, + "name": "v1.0.0", + "body": "Release notes", + } + ] + url = reverse("api:github_tool_latest_release") + response = self.client.get(url, {"tool_id": self.tool.id}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + self.assertEqual(response.data["name"], "v1.0.0") + +class TestGetFileContents(BaseTestCase): + """Test case for retrieving file contents.""" + + @patch("api.views.os.path.exists") + @patch("api.views.run_command") + def test_get_file_contents(self, mock_run_command, mock_exists): + """Test retrieving contents of a file.""" + mock_exists.return_value = True + mock_run_command.return_value = (0, "test content") + url = reverse("api:getFileContents") + response = self.client.get(url, {"nuclei_config": True}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + self.assertGreaterEqual(len(response.data["content"]), 1) + +class TestDeleteMultipleRows(BaseTestCase): + """Test case for deleting multiple rows.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + self.data_generator.create_subscan() + self.data_generator.create_subscan() + + def test_delete_multiple_rows(self): + """Test deleting multiple rows.""" + api_url = reverse("api:delete_rows") + data = { + "type": "subscan", + "rows": [ + int(self.data_generator.subscans[0].id), + int(self.data_generator.subscans[1].id), + ], + } + response = self.client.post(api_url, data) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + self.assertFalse( + SubScan.objects.filter( + id__in=[ + self.data_generator.subscans[0].id, + self.data_generator.subscans[1].id, + ] + ).exists() + ) diff --git a/web/api/tests/test_vulnerability.py b/web/api/tests/test_vulnerability.py new file mode 100644 index 000000000..c3cf416a9 --- /dev/null +++ b/web/api/tests/test_vulnerability.py @@ -0,0 +1,231 @@ +""" +This file contains the test cases for the API views. +""" + +from unittest.mock import patch, MagicMock +from django.urls import reverse +from rest_framework import status +from startScan.models import Vulnerability +from utils.test_base import BaseTestCase + +__all__ = [ + 'TestVulnerabilityViewSet', + 'TestGPTVulnerabilityReportGenerator', + 'TestDeleteVulnerability', + 'TestVulnerabilityReport', + 'TestFetchMostCommonVulnerability', + 'TestCVEDetails', + 'TestFetchMostVulnerable' +] + +class TestVulnerabilityViewSet(BaseTestCase): + """Tests for the Vulnerability ViewSet API.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + self.data_generator.create_endpoint() + self.data_generator.create_vulnerability() + + def test_list_vulnerabilities(self): + """Test listing vulnerabilities.""" + api_url = reverse("api:vulnerabilities-list") + response = self.client.get(api_url) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn('count', response.data) + self.assertIn('next', response.data) + self.assertIn('previous', response.data) + self.assertIn('results', response.data) + self.assertIsInstance(response.data['results'][0], dict) + self.assertGreaterEqual(len(response.data["results"]), 1) + self.assertEqual( + response.data["results"][0]["name"], + self.data_generator.vulnerabilities[0].name, + ) + + def test_list_vulnerabilities_by_scan(self): + """Test listing vulnerabilities by scan history.""" + api_url = reverse("api:vulnerabilities-list") + response = self.client.get( + api_url, {"scan_history": self.data_generator.scan_history.id} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertGreaterEqual(len(response.data["results"]), 1) + self.assertEqual( + response.data["results"][0]["name"], + self.data_generator.vulnerabilities[0].name, + ) + + def test_list_vulnerabilities_by_domain(self): + """Test listing vulnerabilities by domain.""" + api_url = reverse("api:vulnerabilities-list") + response = self.client.get(api_url, {"domain": "example.com"}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertGreaterEqual(len(response.data["results"]), 1) + self.assertEqual( + response.data["results"][0]["name"], + self.data_generator.vulnerabilities[0].name, + ) + + def test_list_vulnerabilities_by_severity(self): + """Test listing vulnerabilities by severity.""" + api_url = reverse("api:vulnerabilities-list") + response = self.client.get(api_url, {"severity": 1}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertGreaterEqual(len(response.data["results"]), 1) + self.assertEqual( + response.data["results"][0]["name"], + self.data_generator.vulnerabilities[0].name, + ) + +class TestGPTVulnerabilityReportGenerator(BaseTestCase): + """Tests for the GPT Vulnerability Report Generator API.""" + + def setUp(self): + super().setUp() + self.data_generator.create_project_base() + self.data_generator.create_endpoint() + self.data_generator.create_vulnerability() + + @patch("reNgine.tasks.gpt_vulnerability_description.apply_async") + def test_get_vulnerability_report(self, mock_apply_async): + """Test generating a vulnerability report.""" + mock_task = MagicMock() + mock_task.wait.return_value = { + "status": True, + "description": "Test vulnerability report", + } + mock_apply_async.return_value = mock_task + api_url = reverse("api:gpt_vulnerability_report_generator") + response = self.client.get( + api_url, {"id": self.data_generator.vulnerabilities[0].id} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + self.assertEqual(response.data["description"], "Test vulnerability report") + +class TestDeleteVulnerability(BaseTestCase): + """Tests for deleting vulnerabilities.""" + + def setUp(self): + super().setUp() + self.data_generator.create_project_base() + self.data_generator.create_endpoint() + self.data_generator.create_vulnerability() + + def test_delete_vulnerability(self): + """Test deleting a vulnerability.""" + api_url = reverse("api:delete_vulnerability") + data = {"vulnerability_ids": [self.data_generator.vulnerabilities[0].id]} + response = self.client.post(api_url, data) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + self.assertFalse( + Vulnerability.objects.filter( + id=self.data_generator.vulnerabilities[0].id + ).exists() + ) + +class TestVulnerabilityReport(BaseTestCase): + """Test case for vulnerability reporting functionality.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + self.data_generator.create_endpoint() + self.data_generator.create_vulnerability() + + @patch("api.views.send_hackerone_report") + def test_vulnerability_report(self, mock_send_report): + """Test sending a vulnerability report.""" + mock_send_report.return_value = True + url = reverse("api:vulnerability_report") + response = self.client.get( + url, {"vulnerability_id": self.data_generator.vulnerabilities[0].id} + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + +class TestFetchMostCommonVulnerability(BaseTestCase): + """Test case for the Fetch Most Common Vulnerability API.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + self.data_generator.create_endpoint() + self.data_generator.create_vulnerability() + self.data_generator.create_vulnerability() + + def test_fetch_most_common_vulnerability(self): + """Test fetching the most common vulnerability.""" + api_url = reverse("api:fetch_most_common_vulnerability") + data = { + "target_id": int(self.data_generator.domain.id), + "scan_history_id": int(self.data_generator.scan_history.id), + "slug": self.data_generator.project.slug, + "limit": 10, + } + response = self.client.post(api_url, data) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + self.assertEqual( + response.data["result"][0]["name"], + self.data_generator.vulnerabilities[0].name, + ) + self.assertEqual(response.data["result"][0]["count"], 2) + +class TestCVEDetails(BaseTestCase): + """Test case for the CVE Details API.""" + + @patch("requests.get") + def test_get_cve_details(self, mock_get): + """Test getting CVE details.""" + mock_get.return_value.status_code = 200 + mock_get.return_value.json.return_value = { + "id": "CVE-2021-44228", + "summary": "Log4j vulnerability", + } + api_url = reverse("api:cve_details") + response = self.client.get(api_url, {"cve_id": "CVE-2021-44228"}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + self.assertEqual(response.data["result"]["id"], "CVE-2021-44228") + + def test_get_cve_details_missing_id(self): + """Test getting CVE details with missing ID.""" + api_url = reverse("api:cve_details") + response = self.client.get(api_url) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertFalse(response.data["status"]) + self.assertEqual(response.data["message"], "CVE ID not provided") + +class TestFetchMostVulnerable(BaseTestCase): + """Test case for the Fetch Most Vulnerable API.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_base() + self.data_generator.create_endpoint() + self.data_generator.create_vulnerability() + self.data_generator.create_vulnerability() + + def test_fetch_most_vulnerable(self): + """Test fetching the most vulnerable subdomains.""" + api_url = reverse("api:fetch_most_vulnerable") + data = { + "target_id": int(self.data_generator.domain.id), + "scan_history_id": int(self.data_generator.scan_history.id), + "slug": self.data_generator.project.slug, + "limit": 10, + } + response = self.client.post(api_url, data) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.data["status"]) + self.assertEqual( + response.data["result"][0]["name"], self.data_generator.subdomain.name + ) + self.assertEqual(response.data["result"][0]["vuln_count"], 2) diff --git a/web/api/urls.py b/web/api/urls.py index f3a09b54d..ec9e1e108 100644 --- a/web/api/urls.py +++ b/web/api/urls.py @@ -6,19 +6,19 @@ app_name = 'api' router = routers.DefaultRouter() -router.register(r'listDatatableSubdomain', SubdomainDatatableViewSet) -router.register(r'listTargets', ListTargetsDatatableViewSet) -router.register(r'listSubdomains', SubdomainsViewSet) -router.register(r'listEndpoints', EndPointViewSet) -router.register(r'listDirectories', DirectoryViewSet) -router.register(r'listVulnerability', VulnerabilityViewSet) -router.register(r'listInterestingSubdomains', InterestingSubdomainViewSet) -router.register(r'listInterestingEndpoints', InterestingEndpointViewSet) -router.register(r'listSubdomainChanges', SubdomainChangesViewSet) -router.register(r'listEndPointChanges', EndPointChangesViewSet) -router.register(r'listIps', IpAddressViewSet) -router.register(r'listActivityLogs', ListActivityLogsViewSet) -router.register(r'listScanLogs', ListScanLogsViewSet) +router.register(r'listDatatableSubdomain', SubdomainDatatableViewSet, basename='subdomain-datatable') +router.register(r'listTargets', ListTargetsDatatableViewSet, basename='targets') +router.register(r'listSubdomains', SubdomainsViewSet, basename='subdomains') +router.register(r'listEndpoints', EndPointViewSet, basename='endpoints') +router.register(r'listDirectories', DirectoryViewSet, basename='directories') +router.register(r'listVulnerability', VulnerabilityViewSet, basename='vulnerabilities') +router.register(r'listInterestingSubdomains', InterestingSubdomainViewSet, basename='interesting-subdomains') +router.register(r'listInterestingEndpoints', InterestingEndpointViewSet, basename='interesting-endpoints') +router.register(r'listSubdomainChanges', SubdomainChangesViewSet, basename='subdomain-changes') +router.register(r'listEndPointChanges', EndPointChangesViewSet, basename='endpoint-changes') +router.register(r'listIps', IpAddressViewSet, basename='ip-addresses') +router.register(r'listActivityLogs', ListActivityLogsViewSet, basename='activity-logs') +router.register(r'listScanLogs', ListScanLogsViewSet, basename='scan-logs') urlpatterns = [ url('^', include(router.urls)), @@ -154,6 +154,10 @@ 'tools/waf_detector/', WafDetector.as_view(), name='waf_detector'), + path( + 'tools/gf_list/', + GfList.as_view(), + name='gf_list'), path( 'tools/gpt_vulnerability_report/', GPTVulnerabilityReportGenerator.as_view(), @@ -178,6 +182,10 @@ 'tool/uninstall/', UninstallTool.as_view(), name='uninstall_tool'), + path( + 'tool/ollama/', + OllamaManager.as_view(), + name='ollama_manager'), path( 'rengine/update/', RengineUpdateCheck.as_view(), diff --git a/web/api/views.py b/web/api/views.py index 4737612cb..66fe3a8f8 100644 --- a/web/api/views.py +++ b/web/api/views.py @@ -1,43 +1,186 @@ import logging import re +import os.path +from pathlib import Path import socket -import subprocess +from ipaddress import IPv4Network +from collections import defaultdict import requests import validators -from dashboard.models import * +from django.urls import reverse +from dashboard.models import OllamaSettings, Project, SearchHistory from django.db.models import CharField, Count, F, Q, Value from django.shortcuts import get_object_or_404 from django.utils import timezone from packaging import version from django.template.defaultfilters import slugify from rest_framework import viewsets +from rest_framework.permissions import IsAuthenticated +from rest_framework.exceptions import PermissionDenied from rest_framework.response import Response from rest_framework.views import APIView from rest_framework.status import HTTP_400_BAD_REQUEST +from rest_framework.parsers import JSONParser -from recon_note.models import * +from recon_note.models import TodoNote from reNgine.celery import app -from reNgine.common_func import * -from reNgine.definitions import ABORTED_TASK -from reNgine.settings import RENGINE_CURRENT_VERSION -from reNgine.tasks import * +from reNgine.common_func import ( + get_data_from_post_request, + get_interesting_endpoints, + get_interesting_subdomains, + get_lookup_keywords, + safe_int_cast +) +from reNgine.definitions import ( + ABORTED_TASK, + OLLAMA_INSTANCE, + NUCLEI_SEVERITY_MAP, + DEFAULT_GPT_MODELS, + RUNNING_TASK, + SUCCESS_TASK +) +from reNgine.settings import ( + RENGINE_CURRENT_VERSION, + RENGINE_TOOL_GITHUB_PATH +) +from reNgine.tasks import ( + create_scan_activity, + gpt_vulnerability_description, + initiate_subscan, + query_ip_history, + query_reverse_whois, + query_whois, + run_cmseek, + run_command, + run_gf_list, + run_wafw00f, + send_hackerone_report +) from reNgine.gpt import GPTAttackSuggestionGenerator -from reNgine.utilities import is_safe_path -from scanEngine.models import * -from startScan.models import * -from startScan.models import EndPoint -from targetApp.models import * - -from .serializers import * +from reNgine.utilities import is_safe_path, remove_lead_and_trail_slash +from scanEngine.models import EngineType, InstalledExternalTool +from startScan.models import ( + Command, + DirectoryFile, + DirectoryScan, + Dork, + Email, + Employee, + EndPoint, + IpAddress, + MetaFinderDocument, + Port, + ScanActivity, + ScanHistory, + Subdomain, + SubScan, + Technology, + Vulnerability, +) +from targetApp.models import Domain, Organization + +from .serializers import ( + CommandSerializer, + DirectoryFileSerializer, + DirectoryScanSerializer, + DomainSerializer, + DorkCountSerializer, + DorkSerializer, + EmailSerializer, + EmployeeSerializer, + EndpointOnlyURLsSerializer, + EndpointSerializer, + EndPointChangesSerializer, + EngineSerializer, + InterestingEndPointSerializer, + InterestingSubdomainSerializer, + IpSerializer, + IpSubdomainSerializer, + MetafinderDocumentSerializer, + MetafinderUserSerializer, + OnlySubdomainNameSerializer, + OrganizationSerializer, + OrganizationTargetsSerializer, + PortSerializer, + ProjectSerializer, + ReconNoteSerializer, + ScanHistorySerializer, + SearchHistorySerializer, + SubdomainChangesSerializer, + SubdomainSerializer, + SubScanResultSerializer, + SubScanSerializer, + TechnologyCountSerializer, + VisualiseDataSerializer, + VulnerabilitySerializer +) logger = logging.getLogger(__name__) +class OllamaManager(APIView): + def get(self, request): + model_name = request.query_params.get('model') + if not model_name: + return Response({'status': False, 'message': 'Model name is required'}, status=400) + + try: + pull_model_api = f'{OLLAMA_INSTANCE}/api/pull' + _response = requests.post( + pull_model_api, + json={'name': model_name, 'stream': False} + ).json() + if _response.get('error'): + return Response({'status': False, 'message': _response.get('error')}, status=400) + return Response({'status': True}) + except Exception as e: + logger.error(f"Error in OllamaManager GET: {str(e)}") + return Response({'status': False, 'message': 'An error occurred while pulling the model.'}, status=500) + + def delete(self, request): + model_name = get_data_from_post_request(request, 'model') + if not model_name: + return Response({'status': False, 'message': 'Model name is required'}, status=400) + + try: + delete_model_api = f'{OLLAMA_INSTANCE}/api/delete' + _response = requests.delete( + delete_model_api, + json={'name': model_name} + ).json() + if _response.get('error'): + return Response({'status': False, 'message': _response.get('error')}, status=400) + return Response({'status': True}) + except Exception as e: + logger.error(f"Error in OllamaManager DELETE: {str(e)}") + return Response({'status': False, 'message': 'An error occurred while deleting the model.'}, status=500) + + def put(self, request): + model_name = request.data.get('model') + if not model_name: + return Response({'status': False, 'message': 'Model name is required'}, status=400) + + use_ollama = all(model['name'] != model_name for model in DEFAULT_GPT_MODELS) + + try: + OllamaSettings.objects.update_or_create( + id=1, + defaults={ + 'selected_model': model_name, + 'use_ollama': use_ollama, + 'selected': True + } + ) + return Response({'status': True}) + except Exception as e: + logger.error(f"Error in OllamaManager PUT: {str(e)}") + return Response({'status': False, 'message': 'An error occurred while updating Ollama settings.'}, status=500) + class GPTAttackSuggestion(APIView): def get(self, request): req = self.request - subdomain_id = req.query_params.get('subdomain_id') + subdomain_id = safe_int_cast(req.query_params.get('subdomain_id')) if not subdomain_id: return Response({ 'status': False, @@ -45,49 +188,49 @@ def get(self, request): }) try: subdomain = Subdomain.objects.get(id=subdomain_id) - except Exception as e: + except Subdomain.DoesNotExist: return Response({ 'status': False, - 'error': 'Subdomain not found with id ' + subdomain_id + 'error': f'Subdomain not found with id {subdomain_id}' }) + if subdomain.attack_surface: return Response({ 'status': True, 'subdomain_name': subdomain.name, 'description': subdomain.attack_surface }) + ip_addrs = subdomain.ip_addresses.all() - open_ports_str = '' - for ip in ip_addrs: - ports = ip.ports.all() - for port in ports: - open_ports_str += f'{port.number}/{port.service_name}, ' - tech_used = '' - for tech in subdomain.technologies.all(): - tech_used += f'{tech.name}, ' - input = f''' + open_ports = ', '.join(f'{port.number}/{port.service_name}' for ip in ip_addrs for port in ip.ports.all()) + tech_used = ', '.join(tech.name for tech in subdomain.technologies.all()) + + input_data = f''' Subdomain Name: {subdomain.name} Subdomain Page Title: {subdomain.page_title} - Open Ports: {open_ports_str} + Open Ports: {open_ports} HTTP Status: {subdomain.http_status} Technologies Used: {tech_used} Content type: {subdomain.content_type} Web Server: {subdomain.webserver} Page Content Length: {subdomain.content_length} ''' + gpt = GPTAttackSuggestionGenerator() - response = gpt.get_attack_suggestion(input) + response = gpt.get_attack_suggestion(input_data) response['subdomain_name'] = subdomain.name + if response.get('status'): subdomain.attack_surface = response.get('description') subdomain.save() + return Response(response) class GPTVulnerabilityReportGenerator(APIView): def get(self, request): req = self.request - vulnerability_id = req.query_params.get('id') + vulnerability_id = safe_int_cast(req.query_params.get('id')) if not vulnerability_id: return Response({ 'status': False, @@ -99,37 +242,27 @@ def get(self, request): class CreateProjectApi(APIView): - def get(self, request): - req = self.request - project_name = req.query_params.get('name') - slug = slugify(project_name) - insert_date = timezone.now() - - try: - project = Project.objects.create( - name=project_name, - slug=slug, - insert_date =insert_date - ) - response = { - 'status': True, - 'project_name': project_name - } - return Response(response) - except Exception as e: - response = { - 'status': False, - 'error': str(e) - } - return Response(response, status=HTTP_400_BAD_REQUEST) - - + def get(self, request): + project_name = request.query_params.get('name') + slug = slugify(project_name) + insert_date = timezone.now() + + try: + Project.objects.create( + name=project_name, + slug=slug, + insert_date=insert_date + ) + return Response({'status': True, 'project_name': project_name}) + except Exception as e: + logger.error(f"Error in CreateProjectApi: {str(e)}") + return Response({'status': False, 'message': 'Failed to create project.'}, status=HTTP_400_BAD_REQUEST) class QueryInterestingSubdomains(APIView): def get(self, request): req = self.request - scan_id = req.query_params.get('scan_id') - domain_id = req.query_params.get('target_id') + scan_id = safe_int_cast(req.query_params.get('scan_id')) + domain_id = safe_int_cast(req.query_params.get('target_id')) if scan_id: queryset = get_interesting_subdomains(scan_history=scan_id) @@ -172,7 +305,7 @@ def filter_queryset(self, qs): if _order_direction == 'desc': - order_col = '-{}'.format(order_col) + order_col = f'-{order_col}' qs = self.queryset.filter( Q(name__icontains=search_value) | @@ -188,24 +321,35 @@ def filter_queryset(self, qs): class WafDetector(APIView): def get(self, request): req = self.request - url= req.query_params.get('url') - response = {} - response['status'] = False + url = req.query_params.get('url') + response = { + 'status': False, + 'message': '', + 'results': None + } - wafw00f_command = f'wafw00f {url}' - output = subprocess.check_output(wafw00f_command, shell=True) - # use regex to get the waf - regex = "behind \\\\x1b\[1;96m(.*)\\\\x1b" - group = re.search(regex, str(output)) + if not url: + response['message'] = 'URL parameter is missing' + return Response(response) - if group: - response['status'] = True - response['results'] = group.group(1) - else: - response['message'] = 'Could not detect any WAF!' + try: + logger.debug(f"Initiating WAF detection for URL: {url}") + result = run_wafw00f.delay(url).get(timeout=30) - return Response(response) + if result.startswith("Unexpected error"): + response['message'] = result + elif result != "No WAF detected": + response['status'] = True + response['results'] = result + else: + response['message'] = 'Could not detect any WAF!' + + logger.debug(f"WAF detection result: {response}") + except Exception as e: + logger.error(f"Error during WAF detection: {str(e)}") + response['message'] = "An unexpected error occurred. Please try again later." + return Response(response) class SearchHistoryView(APIView): def get(self, request): @@ -276,89 +420,52 @@ def get(self, request): class FetchMostCommonVulnerability(APIView): - def post(self, request): - req = self.request - data = req.data - - try: - limit = data.get('limit', 20) - project_slug = data.get('slug') - scan_history_id = data.get('scan_history_id') - target_id = data.get('target_id') - is_ignore_info = data.get('ignore_info', False) - - response = {} - response['status'] = False - - if project_slug: - project = Project.objects.get(slug=project_slug) - vulnerabilities = Vulnerability.objects.filter(target_domain__project=project) - else: - vulnerabilities = Vulnerability.objects.all() - - - if scan_history_id: - vuln_query = ( - vulnerabilities - .filter(scan_history__id=scan_history_id) - .values("name", "severity") - ) - if is_ignore_info: - most_common_vulnerabilities = ( - vuln_query - .exclude(severity=0) - .annotate(count=Count('name')) - .order_by("-count")[:limit] - ) - else: - most_common_vulnerabilities = ( - vuln_query - .annotate(count=Count('name')) - .order_by("-count")[:limit] - ) - - elif target_id: - vuln_query = vulnerabilities.filter(target_domain__id=target_id).values("name", "severity") - if is_ignore_info: - most_common_vulnerabilities = ( - vuln_query - .exclude(severity=0) - .annotate(count=Count('name')) - .order_by("-count")[:limit] - ) - else: - most_common_vulnerabilities = ( - vuln_query - .annotate(count=Count('name')) - .order_by("-count")[:limit] - ) - - else: - vuln_query = vulnerabilities.values("name", "severity") - if is_ignore_info: - most_common_vulnerabilities = ( - vuln_query.exclude(severity=0) - .annotate(count=Count('name')) - .order_by("-count")[:limit] - ) - else: - most_common_vulnerabilities = ( - vuln_query.annotate(count=Count('name')) - .order_by("-count")[:limit] - ) - - - most_common_vulnerabilities = [vuln for vuln in most_common_vulnerabilities] - - if most_common_vulnerabilities: - response['status'] = True - response['result'] = most_common_vulnerabilities - except Exception as e: - print(str(e)) - response = {} - - return Response(response) - + def post(self, request): + data = request.data + response = {'status': False} + + try: + limit = safe_int_cast(data.get('limit', 20)) + project_slug = data.get('slug') + scan_history_id = safe_int_cast(data.get('scan_history_id')) + target_id = safe_int_cast(data.get('target_id')) + is_ignore_info = data.get('ignore_info', False) + + vulnerabilities = ( + Vulnerability.objects.filter(target_domain__project__slug=project_slug) + if project_slug else Vulnerability.objects.all() + ) + + if scan_history_id: + vuln_query = vulnerabilities.filter(scan_history__id=scan_history_id).values("name", "severity") + elif target_id: + vuln_query = vulnerabilities.filter(target_domain__id=target_id).values("name", "severity") + else: + vuln_query = vulnerabilities.values("name", "severity") + + if is_ignore_info: + most_common_vulnerabilities = ( + vuln_query.exclude(severity=0) + .annotate(count=Count('name')) + .order_by("-count")[:limit] + ) + else: + most_common_vulnerabilities = ( + vuln_query.annotate(count=Count('name')) + .order_by("-count")[:limit] + ) + + most_common_vulnerabilities = list(most_common_vulnerabilities) + + if most_common_vulnerabilities: + response['status'] = True + response['result'] = most_common_vulnerabilities + + except Exception as e: + logger.error(f"Error in FetchMostCommonVulnerability: {str(e)}") + response['message'] = 'An error occurred while fetching vulnerabilities.' + + return Response(response) class FetchMostVulnerable(APIView): def post(self, request): @@ -366,9 +473,9 @@ def post(self, request): data = req.data project_slug = data.get('slug') - scan_history_id = data.get('scan_history_id') - target_id = data.get('target_id') - limit = data.get('limit', 20) + scan_history_id = safe_int_cast(data.get('scan_history_id')) + target_id = safe_int_cast(data.get('target_id')) + limit = safe_int_cast(data.get('limit', 20)) is_ignore_info = data.get('ignore_info', False) response = {} @@ -488,11 +595,17 @@ def post(self, request): req = self.request data = req.data - subdomain_id = data.get('subdomain_id') - scan_history_id = data.get('scan_history_id') + subdomain_id = safe_int_cast(data.get('subdomain_id')) + scan_history_id = safe_int_cast(data.get('scan_history_id')) title = data.get('title') description = data.get('description') project = data.get('project') + + if not title: + return Response({"status": False, "error": "Title is required."}, status=400) + if not project: + return Response({"status": False, "error": "Project is required."}, status=400) + try: project = Project.objects.get(slug=project) @@ -516,19 +629,17 @@ def post(self, request): note.project = project note.save() - response = {'status': True} + return Response({"status": True, "error": False, "id": note.id}, status=200) except Exception as e: - response = {'status': False, 'message': str(e)} - - return Response(response) - + logger.error(e) + return Response({"status": False, "error": "An error occurred."}, status=400) class ToggleSubdomainImportantStatus(APIView): def post(self, request): req = self.request data = req.data - subdomain_id = data.get('subdomain_id') + subdomain_id = safe_int_cast(data.get('subdomain_id')) response = {'status': False, 'message': 'No subdomain_id provided'} @@ -548,14 +659,19 @@ def post(self, request): h1_team_handle = data.get('h1_team_handle') description = data.get('description') domain_name = data.get('domain_name') + organization_name = data.get('organization') slug = data.get('slug') # Validate domain name if not validators.domain(domain_name): - return Response({'status': False, 'message': 'Invalid domain or IP'}) + return Response({'status': False, 'message': 'Invalid domain or IP'}, status=400) project = Project.objects.get(slug=slug) + # Check if the domain already exists + if Domain.objects.filter(name=domain_name, project=project).exists(): + return Response({'status': False, 'message': 'Domain already exists as a target!'}, status=400) + # Create domain object in DB domain, _ = Domain.objects.get_or_create(name=domain_name) domain.project = project @@ -564,11 +680,26 @@ def post(self, request): if not domain.insert_date: domain.insert_date = timezone.now() domain.save() + + # Create org object in DB + if organization_name: + organization_obj = None + organization_query = Organization.objects.filter(name=organization_name) + if organization_query.exists(): + organization_obj = organization_query[0] + else: + organization_obj = Organization.objects.create( + name=organization_name, + project=project, + insert_date=timezone.now()) + organization_obj.domains.add(domain) + return Response({ 'status': True, - 'message': 'Domain successfully added as target !', + 'message': 'Domain successfully added as target!', 'domain_name': domain_name, - 'domain_id': domain.id + 'domain_id': domain.id, + 'initiate_scan_url': reverse('start_scan', kwargs={'slug': slug, 'domain_id': domain.id}) }) @@ -576,7 +707,7 @@ class FetchSubscanResults(APIView): def get(self, request): req = self.request # data = req.data - subscan_id = req.query_params.get('subscan_id') + subscan_id = safe_int_cast(req.query_params.get('subscan_id')) subscan = SubScan.objects.filter(id=subscan_id) if not subscan.exists(): return Response({ @@ -615,16 +746,16 @@ def get(self, request): logger.info(subscan_data) logger.info(subscan_results) - return Response({'subscan': subscan_data, 'result': subscan_results}) + return Response({'subscan': subscan_data, 'result': subscan_results, 'endpoint_url': reverse('api:endpoints-list'), 'vulnerability_url': reverse('api:vulnerabilities-list')}) class ListSubScans(APIView): def post(self, request): req = self.request data = req.data - subdomain_id = data.get('subdomain_id', None) - scan_history = data.get('scan_history_id', None) - domain_id = data.get('domain_id', None) + subdomain_id = safe_int_cast(data.get('subdomain_id', None)) + scan_history = safe_int_cast(data.get('scan_history_id', None)) + domain_id = safe_int_cast(data.get('domain_id', None)) response = {} response['status'] = False @@ -669,24 +800,23 @@ class DeleteMultipleRows(APIView): def post(self, request): req = self.request data = req.data - + subscan_ids = get_data_from_post_request(request, 'rows') try: if data['type'] == 'subscan': - for row in data['rows']: - SubScan.objects.get(id=row).delete() - response = True + subscan_ids = [int(id) for id in subscan_ids] + SubScan.objects.filter(id__in=subscan_ids).delete() + return Response({'status': True}) + except ValueError: + return Response({'status': False, 'message': 'Invalid subscan ID provided'}, status=400) except Exception as e: - response = False - - return Response({'status': response}) - + return Response({'status': False, 'message': logger.debug(e)}, status=500) class StopScan(APIView): def post(self, request): req = self.request data = req.data - scan_id = data.get('scan_id') - subscan_id = data.get('subscan_id') + scan_id = safe_int_cast(data.get('scan_id')) + subscan_id = safe_int_cast(data.get('subscan_id')) response = {} task_ids = [] scan = None @@ -713,6 +843,7 @@ def post(self, request): task_ids = scan.celery_ids scan.scan_status = ABORTED_TASK scan.stop_scan_date = timezone.now() + scan.aborted_by = request.user scan.save() create_scan_activity( scan.id, @@ -746,12 +877,21 @@ def post(self, request): class InitiateSubTask(APIView): + parser_classes = [JSONParser] + def post(self, request): - req = self.request - data = req.data - engine_id = data.get('engine_id') - scan_types = data['tasks'] - for subdomain_id in data['subdomain_ids']: + data = request.data + engine_id = safe_int_cast(data.get('engine_id')) + scan_types = data.get('tasks', []) + subdomain_ids = safe_int_cast(data.get('subdomain_ids', [])) + + if not scan_types or not subdomain_ids: + return Response({'status': False, 'error': 'Missing tasks or subdomain_ids'}, status=400) + + if isinstance(subdomain_ids, int): + subdomain_ids = [subdomain_ids] + + for subdomain_id in subdomain_ids: logger.info(f'Running subscans {scan_types} on subdomain "{subdomain_id}" ...') for stype in scan_types: ctx = { @@ -766,19 +906,32 @@ def post(self, request): class DeleteSubdomain(APIView): def post(self, request): - req = self.request - for id in req.data['subdomain_ids']: - Subdomain.objects.get(id=id).delete() - return Response({'status': True}) - + subdomain_ids = get_data_from_post_request(request, 'subdomain_ids') + try: + subdomain_ids = [int(id) for id in subdomain_ids] + Subdomain.objects.filter(id__in=subdomain_ids).delete() + return Response({'status': True}) + except ValueError: + return Response({'status': False, 'message': 'Invalid subdomain ID provided'}, status=400) + except Exception as e: + return Response({'status': False, 'message': logger.debug(e)}, status=500) class DeleteVulnerability(APIView): def post(self, request): - req = self.request - for id in req.data['vulnerability_ids']: - Vulnerability.objects.get(id=id).delete() - return Response({'status': True}) + vulnerability_ids = get_data_from_post_request(request, 'vulnerability_ids') + # Check if vulnerability_ids is iterable + if not isinstance(vulnerability_ids, (list, tuple)): + return Response({'status': False, 'message': 'vulnerability_ids must be a list or tuple'}, status=400) + + try: + # Convert to integers + vulnerability_ids = [int(id) for id in vulnerability_ids] + # Delete vulnerabilities + Vulnerability.objects.filter(id__in=vulnerability_ids).delete() + return Response({'status': True}) + except ValueError: + return Response({'status': False, 'message': 'Invalid vulnerability ID provided'}, status=400) class ListInterestingKeywords(APIView): def get(self, request, format=None): @@ -804,8 +957,8 @@ def get(self, request): # for consistency remove v from both if exists latest_version = re.search(r'v(\d+\.)?(\d+\.)?(\*|\d+)', - ((response[0]['name' - ])[1:] if response[0]['name'][0] == 'v' + ((response[0]['name' + ])[1:] if response[0]['name'][0] == 'v' else response[0]['name'])) latest_version = latest_version.group(0) if latest_version else None @@ -831,7 +984,7 @@ def get(self, request): class UninstallTool(APIView): def get(self, request): req = self.request - tool_id = req.query_params.get('tool_id') + tool_id = safe_int_cast(req.query_params.get('tool_id')) tool_name = req.query_params.get('name') if tool_id: @@ -869,7 +1022,7 @@ def get(self, request): class UpdateTool(APIView): def get(self, request): req = self.request - tool_id = req.query_params.get('tool_id') + tool_id = safe_int_cast(req.query_params.get('tool_id')) tool_name = req.query_params.get('name') if tool_id: @@ -887,7 +1040,7 @@ def get(self, request): elif update_command == 'git pull': tool_name = tool.install_command[:-1] if tool.install_command[-1] == '/' else tool.install_command tool_name = tool_name.split('/')[-1] - update_command = 'cd /usr/src/github/' + tool_name + ' && git pull && cd -' + update_command = 'cd ' + str(Path(RENGINE_TOOL_GITHUB_PATH) / tool_name) + ' && git pull && cd -' run_command(update_command) run_command.apply_async(args=(update_command,)) @@ -898,7 +1051,7 @@ class GetExternalToolCurrentVersion(APIView): def get(self, request): req = self.request # toolname is also the command - tool_id = req.query_params.get('tool_id') + tool_id = safe_int_cast(req.query_params.get('tool_id')) tool_name = req.query_params.get('name') # can supply either tool id or tool_name @@ -930,7 +1083,7 @@ class GithubToolCheckGetLatestRelease(APIView): def get(self, request): req = self.request - tool_id = req.query_params.get('tool_id') + tool_id = safe_int_cast(req.query_params.get('tool_id')) tool_name = req.query_params.get('name') if not InstalledExternalTool.objects.filter(id=tool_id).exists(): @@ -947,7 +1100,7 @@ def get(self, request): # if tool_github_url has https://github.com/ remove and also remove trailing / tool_github_url = tool.github_url.replace('http://github.com/', '').replace('https://github.com/', '') tool_github_url = remove_lead_and_trail_slash(tool_github_url) - github_api = 'https://api.github.com/repos/{}/releases'.format(tool_github_url) + github_api = f'https://api.github.com/repos/{tool_github_url}/releases' response = requests.get(github_api).json() # check if api rate limit exceeded if 'message' in response and response['message'] == 'RateLimited': @@ -956,7 +1109,7 @@ def get(self, request): return Response({'status': False, 'message': 'Not Found'}) elif not response: return Response({'status': False, 'message': 'Not Found'}) - + # only send latest release response = response[0] @@ -1060,59 +1213,27 @@ def get(self, request): class CMSDetector(APIView): def get(self, request): - req = self.request - url = req.query_params.get('url') - #save_db = True if 'save_db' in req.query_params else False - response = {'status': False} + url = request.query_params.get('url') + if not url: + return Response({'status': False, 'message': 'URL parameter is missing'}) + try: - # response = get_cms_details(url) - response = {} - cms_detector_command = f'python3 /usr/src/github/CMSeeK/cmseek.py' - cms_detector_command += ' --random-agent --batch --follow-redirect' - cms_detector_command += f' -u {url}' - - _, output = run_command(cms_detector_command, remove_ansi_sequence=True) - - response['message'] = 'Could not detect CMS!' - - parsed_url = urlparse(url) - - domain_name = parsed_url.hostname - port = parsed_url.port - - find_dir = domain_name - - if port: - find_dir += '_{}'.format(port) - # look for result path in output - path_regex = r"Result: (\/usr\/src[^\"\s]*)" - match = re.search(path_regex, output) - if match: - cms_json_path = match.group(1) - if os.path.isfile(cms_json_path): - cms_file_content = json.loads(open(cms_json_path, 'r').read()) - if not cms_file_content.get('cms_id'): - return response - response = {} - response = cms_file_content - response['status'] = True - try: - # remove results - cms_dir_path = os.path.dirname(cms_json_path) - shutil.rmtree(cms_dir_path) - except Exception as e: - logger.error(e) - return Response(response) - return Response(response) - except Exception as e: - response = {'status': False, 'message': str(e)} - return Response(response) + task = run_cmseek.delay(url) + result = task.get(timeout=300) # 5 minutes timeout + if result['status']: + return Response(result) + else: + return Response({'status': False, 'message': 'Could not detect CMS!'}) + except Exception as e: + logger.error(f"Error in CMSDetector: {str(e)}") + return Response({'status': False, 'message': 'An unexpected error occurred.'}, status=500) class IPToDomain(APIView): def get(self, request): req = self.request ip_address = req.query_params.get('ip_address') + response = {} if not ip_address: return Response({ 'status': False, @@ -1120,27 +1241,29 @@ def get(self, request): }) try: logger.info(f'Resolving IP address {ip_address} ...') - domain, domains, ips = socket.gethostbyaddr(ip_address) - response = { - 'status': True, - 'ip_address': ip_address, - 'domains': domains or [domain], - 'resolves_to': domain - } - except socket.herror: # ip does not have a PTR record - logger.info(f'No PTR record for {ip_address}') + resolved_ips = [] + for ip in IPv4Network(ip_address, False): + domains = [] + ips = [] + try: + (domain, domains, ips) = socket.gethostbyaddr(str(ip)) + except socket.herror: + logger.info(f'No PTR record for {ip_address}') + domain = str(ip) + if domain not in domains: + domains.append(domain) + resolved_ips.append({'ip': str(ip),'domain': domain, 'domains': domains, 'ips': ips}) response = { 'status': True, - 'ip_address': ip_address, - 'domains': [ip_address], - 'resolves_to': ip_address + 'orig': ip_address, + 'ip_address': resolved_ips, } except Exception as e: logger.exception(e) response = { 'status': False, 'ip_address': ip_address, - 'message': 'Exception {}'.format(e) + 'message': f'Exception {e}' } finally: return Response(response) @@ -1149,7 +1272,7 @@ def get(self, request): class VulnerabilityReport(APIView): def get(self, request): req = self.request - vulnerability_id = req.query_params.get('vulnerability_id') + vulnerability_id = safe_int_cast(req.query_params.get('vulnerability_id')) return Response({"status": send_hackerone_report(vulnerability_id)}) @@ -1162,60 +1285,61 @@ def get(self, request, format=None): response['status'] = False if 'nuclei_config' in req.query_params: - path = "/root/.config/nuclei/config.yaml" + path = str(Path.home() / ".config" / "nuclei" / "config.yaml") if not os.path.exists(path): run_command(f'touch {path}') response['message'] = 'File Created!' - f = open(path, "r") - response['status'] = True - response['content'] = f.read() + with open(path, "r") as f: + response['status'] = True + response['content'] = f.read() return Response(response) if 'subfinder_config' in req.query_params: - path = "/root/.config/subfinder/config.yaml" + path = str(Path.home() / ".config" / "subfinder" / "config.yaml") if not os.path.exists(path): run_command(f'touch {path}') response['message'] = 'File Created!' - f = open(path, "r") - response['status'] = True - response['content'] = f.read() + with open(path, "r") as f: + response['status'] = True + response['content'] = f.read() return Response(response) if 'naabu_config' in req.query_params: - path = "/root/.config/naabu/config.yaml" + path = str(Path.home() / ".config" / "naabu" / "config.yaml") if not os.path.exists(path): run_command(f'touch {path}') response['message'] = 'File Created!' - f = open(path, "r") - response['status'] = True - response['content'] = f.read() + with open(path, "r") as f: + response['status'] = True + response['content'] = f.read() return Response(response) if 'theharvester_config' in req.query_params: - path = "/usr/src/github/theHarvester/api-keys.yaml" + path = str(Path.home() / ".config" / 'theHarvester' / 'api-keys.yaml') if not os.path.exists(path): run_command(f'touch {path}') response['message'] = 'File Created!' - f = open(path, "r") - response['status'] = True - response['content'] = f.read() + with open(path, "r") as f: + response['status'] = True + response['content'] = f.read() return Response(response) if 'amass_config' in req.query_params: - path = "/root/.config/amass.ini" + path = str(Path.home() / ".config" / "amass.ini") if not os.path.exists(path): run_command(f'touch {path}') response['message'] = 'File Created!' - f = open(path, "r") - response['status'] = True - response['content'] = f.read() + with open(path, "r") as f: + response['status'] = True + response['content'] = f.read() return Response(response) if 'gf_pattern' in req.query_params: - basedir = '/root/.gf' - path = f'/root/.gf/{name}.json' + basedir = str(Path.home() / '.gf') + path = str(Path.home() / '.gf' / f'{name}.json') if is_safe_path(basedir, path) and os.path.exists(path): - content = open(path, "r").read() + with open(path, "r") as f: + content = f.read() response['status'] = True response['content'] = content else: @@ -1225,10 +1349,11 @@ def get(self, request, format=None): if 'nuclei_template' in req.query_params: - safe_dir = '/root/nuclei-templates' - path = f'/root/nuclei-templates/{name}' + safe_dir = str(Path.home() / 'nuclei-templates') + path = str(Path.home() / 'nuclei-templates' / f'{name}') if is_safe_path(safe_dir, path) and os.path.exists(path): - content = open(path.format(name), "r").read() + with open(path.format(name), "r") as f: + content = f.read() response['status'] = True response['content'] = content else: @@ -1236,21 +1361,44 @@ def get(self, request, format=None): response['status'] = False return Response(response) + if 'gau_config' in req.query_params: + path = str(Path.home() / ".config" / '.gau.toml') + if not os.path.exists(path): + run_command(f'touch {path}') + response['message'] = 'File Created!' + with open(path, "r") as f: + response['status'] = True + response['content'] = f.read() + return Response(response) + response['message'] = 'Invalid Query Params' return Response(response) +class GfList(APIView): + def get(self, request): + try: + task = run_gf_list.delay() + result = task.get(timeout=30) # 30 seconds timeout + + if result['status']: + return Response(result['output']) + else: + return Response({'error': result['message']}, status=500) + except Exception as e: + logger.error(f"Error in GfList: {str(e)}") # Log the exception for internal tracking + return Response({'error': 'An unexpected error occurred. Please try again later.'}, status=500) class ListTodoNotes(APIView): def get(self, request, format=None): req = self.request notes = TodoNote.objects.all().order_by('-id') - scan_id = req.query_params.get('scan_id') + scan_id = safe_int_cast(req.query_params.get('scan_id')) project = req.query_params.get('project') if project: notes = notes.filter(project__slug=project) - target_id = req.query_params.get('target_id') + target_id = safe_int_cast(req.query_params.get('target_id')) todo_id = req.query_params.get('todo_id') - subdomain_id = req.query_params.get('subdomain_id') + subdomain_id = safe_int_cast(req.query_params.get('subdomain_id')) if target_id: notes = notes.filter(scan_history__in=ScanHistory.objects.filter(domain__id=target_id)) elif scan_id: @@ -1284,7 +1432,6 @@ def get(self, request, format=None): class ListOrganizations(APIView): def get(self, request, format=None): - req = self.request organizations = Organization.objects.all() organization_serializer = OrganizationSerializer(organizations, many=True) return Response({'organizations': organization_serializer.data}) @@ -1293,7 +1440,7 @@ def get(self, request, format=None): class ListTargetsInOrganization(APIView): def get(self, request, format=None): req = self.request - organization_id = req.query_params.get('organization_id') + organization_id = safe_int_cast(req.query_params.get('organization_id')) organization = Organization.objects.filter(id=organization_id) targets = Domain.objects.filter(domains__in=organization) organization_serializer = OrganizationSerializer(organization, many=True) @@ -1312,47 +1459,71 @@ def get(self, request, format=None): class VisualiseData(APIView): def get(self, request, format=None): req = self.request - scan_id = req.query_params.get('scan_id') - if scan_id: + if scan_id := safe_int_cast(req.query_params.get('scan_id')): mitch_data = ScanHistory.objects.filter(id=scan_id) serializer = VisualiseDataSerializer(mitch_data, many=True) - return Response(serializer.data) + + # Data processing to remove duplicates + processed_data = self.process_visualisation_data(serializer.data) + + return Response(processed_data) else: return Response() + def process_visualisation_data(self, data): + if not data: + return [] + + processed_data = data[0] # Assuming there's only one element in data + subdomains = processed_data.get('subdomains', []) + + # Use a dictionary to group vulnerabilities by subdomain + vuln_by_subdomain = defaultdict(list) + + for subdomain in subdomains: + subdomain_name = subdomain['name'] + vulnerabilities = subdomain.get('vulnerabilities', []) + + # Group unique vulnerabilities + unique_vulns = {} + for vuln in vulnerabilities: + vuln_key = (vuln['name'], vuln['severity']) + if vuln_key not in unique_vulns: + unique_vulns[vuln_key] = vuln + + vuln_by_subdomain[subdomain_name].extend(unique_vulns.values()) + + # Update subdomains with unique vulnerabilities + for subdomain in subdomains: + subdomain['vulnerabilities'] = vuln_by_subdomain[subdomain['name']] + + return processed_data class ListTechnology(APIView): def get(self, request, format=None): req = self.request - scan_id = req.query_params.get('scan_id') - target_id = req.query_params.get('target_id') + scan_id = safe_int_cast(req.query_params.get('scan_id')) - if target_id: - tech = Technology.objects.filter( - technologies__in=Subdomain.objects.filter( - target_domain__id=target_id)).annotate( - count=Count('name')).order_by('-count') - serializer = TechnologyCountSerializer(tech, many=True) - return Response({"technologies": serializer.data}) + # Determine the queryset based on the presence of target_id or scan_id + if target_id := safe_int_cast(req.query_params.get('target_id')): + subdomain_filter = Subdomain.objects.filter(target_domain__id=target_id) elif scan_id: - tech = Technology.objects.filter( - technologies__in=Subdomain.objects.filter( - scan_history__id=scan_id)).annotate( - count=Count('name')).order_by('-count') - serializer = TechnologyCountSerializer(tech, many=True) - return Response({"technologies": serializer.data}) + subdomain_filter = Subdomain.objects.filter(scan_history__id=scan_id) else: - tech = Technology.objects.filter( - technologies__in=Subdomain.objects.all()).annotate( - count=Count('name')).order_by('-count') - serializer = TechnologyCountSerializer(tech, many=True) - return Response({"technologies": serializer.data}) + subdomain_filter = Subdomain.objects.all() + + # Fetch technologies and serialize the results + tech = Technology.objects.filter(technologies__in=subdomain_filter).annotate( + count=Count('name')).order_by('-count') + serializer = TechnologyCountSerializer(tech, many=True) + + return Response({"technologies": serializer.data}) class ListDorkTypes(APIView): def get(self, request, format=None): req = self.request - scan_id = req.query_params.get('scan_id') + scan_id = safe_int_cast(req.query_params.get('scan_id')) if scan_id: dork = Dork.objects.filter( dorks__in=ScanHistory.objects.filter(id=scan_id) @@ -1370,7 +1541,7 @@ def get(self, request, format=None): class ListEmails(APIView): def get(self, request, format=None): req = self.request - scan_id = req.query_params.get('scan_id') + scan_id = safe_int_cast(req.query_params.get('scan_id')) if scan_id: email = Email.objects.filter( emails__in=ScanHistory.objects.filter(id=scan_id)).order_by('password') @@ -1381,7 +1552,7 @@ def get(self, request, format=None): class ListDorks(APIView): def get(self, request, format=None): req = self.request - scan_id = req.query_params.get('scan_id') + scan_id = safe_int_cast(req.query_params.get('scan_id')) type = req.query_params.get('type') if scan_id: dork = Dork.objects.filter( @@ -1404,7 +1575,7 @@ def get(self, request, format=None): class ListEmployees(APIView): def get(self, request, format=None): req = self.request - scan_id = req.query_params.get('scan_id') + scan_id = safe_int_cast(req.query_params.get('scan_id')) if scan_id: employee = Employee.objects.filter( employees__in=ScanHistory.objects.filter(id=scan_id)) @@ -1415,8 +1586,8 @@ def get(self, request, format=None): class ListPorts(APIView): def get(self, request, format=None): req = self.request - scan_id = req.query_params.get('scan_id') - target_id = req.query_params.get('target_id') + scan_id = safe_int_cast(req.query_params.get('scan_id')) + target_id = safe_int_cast(req.query_params.get('target_id')) ip_address = req.query_params.get('ip_address') if target_id: @@ -1444,9 +1615,9 @@ def get(self, request, format=None): class ListSubdomains(APIView): def get(self, request, format=None): req = self.request - scan_id = req.query_params.get('scan_id') + scan_id = safe_int_cast(req.query_params.get('scan_id')) project = req.query_params.get('project') - target_id = req.query_params.get('target_id') + target_id = safe_int_cast(req.query_params.get('target_id')) ip_address = req.query_params.get('ip_address') port = req.query_params.get('port') tech = req.query_params.get('tech') @@ -1503,7 +1674,7 @@ def post(self, req): class ListOsintUsers(APIView): def get(self, request, format=None): req = self.request - scan_id = req.query_params.get('scan_id') + scan_id = safe_int_cast(req.query_params.get('scan_id')) if scan_id: documents = MetaFinderDocument.objects.filter(scan_history__id=scan_id).exclude(author__isnull=True).values('author').distinct() serializer = MetafinderUserSerializer(documents, many=True) @@ -1513,7 +1684,7 @@ def get(self, request, format=None): class ListMetadata(APIView): def get(self, request, format=None): req = self.request - scan_id = req.query_params.get('scan_id') + scan_id = safe_int_cast(req.query_params.get('scan_id')) if scan_id: documents = MetaFinderDocument.objects.filter(scan_history__id=scan_id).distinct() serializer = MetafinderDocumentSerializer(documents, many=True) @@ -1523,8 +1694,8 @@ def get(self, request, format=None): class ListIPs(APIView): def get(self, request, format=None): req = self.request - scan_id = req.query_params.get('scan_id') - target_id = req.query_params.get('target_id') + scan_id = safe_int_cast(req.query_params.get('scan_id')) + target_id = safe_int_cast(req.query_params.get('target_id')) port = req.query_params.get('port') @@ -1553,10 +1724,11 @@ def get(self, request, format=None): class IpAddressViewSet(viewsets.ModelViewSet): queryset = Subdomain.objects.none() serializer_class = IpSubdomainSerializer + ordering = ('name',) def get_queryset(self): req = self.request - scan_id = req.query_params.get('scan_id') + scan_id = safe_int_cast(req.query_params.get('scan_id')) if scan_id: self.queryset = Subdomain.objects.filter( @@ -1571,16 +1743,17 @@ def paginate_queryset(self, queryset, view=None): if 'no_page' in self.request.query_params: return None return self.paginator.paginate_queryset( - queryset, self.request, view=self) + queryset.order_by(*self.ordering), self.request, view=self) class SubdomainsViewSet(viewsets.ModelViewSet): queryset = Subdomain.objects.none() serializer_class = SubdomainSerializer + ordering = ('name',) def get_queryset(self): req = self.request - scan_id = req.query_params.get('scan_id') + scan_id = safe_int_cast(req.query_params.get('scan_id')) if scan_id: if 'only_screenshot' in self.request.query_params: return ( @@ -1593,7 +1766,7 @@ def paginate_queryset(self, queryset, view=None): if 'no_page' in self.request.query_params: return None return self.paginator.paginate_queryset( - queryset, self.request, view=self) + queryset.order_by(*self.ordering), self.request, view=self) class SubdomainChangesViewSet(viewsets.ModelViewSet): @@ -1605,12 +1778,13 @@ class SubdomainChangesViewSet(viewsets.ModelViewSet): ''' queryset = Subdomain.objects.none() serializer_class = SubdomainChangesSerializer + ordering = ('name',) def get_queryset(self): req = self.request - scan_id = req.query_params.get('scan_id') + scan_id = safe_int_cast(req.query_params.get('scan_id')) changes = req.query_params.get('changes') - domain_id = ScanHistory.objects.filter(id=scan_id)[0].domain.id + domain_id = safe_int_cast(ScanHistory.objects.filter(id=safe_int_cast(scan_id)).first().domain.id) scan_history_query = ( ScanHistory.objects .filter(domain=domain_id) @@ -1675,7 +1849,7 @@ def paginate_queryset(self, queryset, view=None): if 'no_page' in self.request.query_params: return None return self.paginator.paginate_queryset( - queryset, self.request, view=self) + queryset.order_by(*self.ordering), self.request, view=self) class EndPointChangesViewSet(viewsets.ModelViewSet): @@ -1684,12 +1858,13 @@ class EndPointChangesViewSet(viewsets.ModelViewSet): ''' queryset = EndPoint.objects.none() serializer_class = EndPointChangesSerializer + ordering = ('http_url',) def get_queryset(self): req = self.request - scan_id = req.query_params.get('scan_id') + scan_id = safe_int_cast(req.query_params.get('scan_id')) changes = req.query_params.get('changes') - domain_id = ScanHistory.objects.filter(id=scan_id).first().domain.id + domain_id = safe_int_cast(ScanHistory.objects.filter(id=scan_id).first().domain.id) scan_history = ( ScanHistory.objects .filter(domain=domain_id) @@ -1746,17 +1921,18 @@ def paginate_queryset(self, queryset, view=None): if 'no_page' in self.request.query_params: return None return self.paginator.paginate_queryset( - queryset, self.request, view=self) + queryset.order_by(*self.ordering), self.request, view=self) class InterestingSubdomainViewSet(viewsets.ModelViewSet): queryset = Subdomain.objects.none() serializer_class = SubdomainSerializer + ordering = ('name',) def get_queryset(self): req = self.request - scan_id = req.query_params.get('scan_id') - domain_id = req.query_params.get('target_id') + scan_id = safe_int_cast(req.query_params.get('scan_id')) + domain_id = safe_int_cast(req.query_params.get('target_id')) if 'only_subdomains' in self.request.query_params: self.serializer_class = InterestingSubdomainSerializer @@ -1786,7 +1962,7 @@ def filter_queryset(self, qs): order_col = 'content_length' if _order_direction == 'desc': - order_col = '-{}'.format(order_col) + order_col = f'-{order_col}' if search_value: qs = self.queryset.filter( @@ -1800,17 +1976,19 @@ def paginate_queryset(self, queryset, view=None): if 'no_page' in self.request.query_params: return None return self.paginator.paginate_queryset( - queryset, self.request, view=self) + queryset.order_by(*self.ordering), self.request, view=self) class InterestingEndpointViewSet(viewsets.ModelViewSet): queryset = EndPoint.objects.none() serializer_class = EndpointSerializer + ordering = ('http_url',) def get_queryset(self): req = self.request - scan_id = req.query_params.get('scan_id') - target_id = req.query_params.get('target_id') + scan_id = safe_int_cast(req.query_params.get('scan_id')) + target_id = safe_int_cast(req.query_params.get('target_id')) + if 'only_endpoints' in self.request.query_params: self.serializer_class = InterestingEndPointSerializer if scan_id: @@ -1824,7 +2002,7 @@ def paginate_queryset(self, queryset, view=None): if 'no_page' in self.request.query_params: return None return self.paginator.paginate_queryset( - queryset, self.request, view=self) + queryset.order_by(*self.ordering), self.request, view=self) class SubdomainDatatableViewSet(viewsets.ModelViewSet): @@ -1833,8 +2011,8 @@ class SubdomainDatatableViewSet(viewsets.ModelViewSet): def get_queryset(self): req = self.request - scan_id = req.query_params.get('scan_id') - target_id = req.query_params.get('target_id') + scan_id = safe_int_cast(req.query_params.get('scan_id')) + target_id = safe_int_cast(req.query_params.get('target_id')) url_query = req.query_params.get('query_param') ip_address = req.query_params.get('ip_address') name = req.query_params.get('name') @@ -1842,6 +2020,9 @@ def get_queryset(self): subdomains = Subdomain.objects.filter(target_domain__project__slug=project) + if 'is_important' in req.query_params: + subdomains = subdomains.filter(is_important=True) + if target_id: self.queryset = ( subdomains @@ -1893,7 +2074,7 @@ def filter_queryset(self, qs): elif _order_col == '10': order_col = 'response_time' if _order_direction == 'desc': - order_col = '-{}'.format(order_col) + order_col = f'-{order_col}' # if the search query is separated by = means, it is a specific lookup # divide the search query into two half and lookup if search_value: @@ -2086,8 +2267,8 @@ class ListActivityLogsViewSet(viewsets.ModelViewSet): queryset = Command.objects.none() def get_queryset(self): req = self.request - activity_id = req.query_params.get('activity_id') - self.queryset = Command.objects.filter(activity__id=activity_id) + activity_id = safe_int_cast(req.query_params.get('activity_id')) + self.queryset = Command.objects.filter(activity__id=activity_id).order_by('id') return self.queryset @@ -2096,8 +2277,8 @@ class ListScanLogsViewSet(viewsets.ModelViewSet): queryset = Command.objects.none() def get_queryset(self): req = self.request - scan_id = req.query_params.get('scan_id') - self.queryset = Command.objects.filter(scan_history__id=scan_id) + scan_id = safe_int_cast(req.query_params.get('scan_id')) + self.queryset = Command.objects.filter(scan_history__id=scan_id).order_by('id') return self.queryset @@ -2105,8 +2286,8 @@ class ListEndpoints(APIView): def get(self, request, format=None): req = self.request - scan_id = req.query_params.get('scan_id') - target_id = req.query_params.get('target_id') + scan_id = safe_int_cast(req.query_params.get('scan_id')) + target_id = safe_int_cast(req.query_params.get('target_id')) subdomain_name = req.query_params.get('subdomain_name') pattern = req.query_params.get('pattern') @@ -2146,10 +2327,10 @@ class EndPointViewSet(viewsets.ModelViewSet): def get_queryset(self): req = self.request - scan_id = req.query_params.get('scan_history') - target_id = req.query_params.get('target_id') + scan_id = safe_int_cast(req.query_params.get('scan_history')) + target_id = safe_int_cast(req.query_params.get('target_id')) url_query = req.query_params.get('query_param') - subdomain_id = req.query_params.get('subdomain_id') + subdomain_id = safe_int_cast(req.query_params.get('subdomain_id')) project = req.query_params.get('project') endpoints_obj = EndPoint.objects.filter(scan_history__domain__project__slug=project) @@ -2162,15 +2343,17 @@ def get_queryset(self): endpoints_obj .filter(scan_history__id=scan_id) .distinct() + .order_by('id') ) else: - endpoints = endpoints_obj.distinct() + endpoints = endpoints_obj.distinct().order_by('id') if url_query: endpoints = ( endpoints .filter(Q(target_domain__name=url_query)) .distinct() + .order_by('id') ) if gf_tag: @@ -2223,7 +2406,7 @@ def filter_queryset(self, qs): elif _order_col == '9': order_col = 'response_time' if _order_direction == 'desc': - order_col = '-{}'.format(order_col) + order_col = f'-{order_col}' # if the search query is separated by = means, it is a specific lookup # divide the search query into two half and lookup if '=' in search_value or '&' in search_value or '|' in search_value or '>' in search_value or '<' in search_value or '!' in search_value: @@ -2248,13 +2431,13 @@ def filter_queryset(self, qs): def general_lookup(self, search_value): return \ self.queryset.filter(Q(http_url__icontains=search_value) | - Q(page_title__icontains=search_value) | - Q(http_status__icontains=search_value) | - Q(content_type__icontains=search_value) | - Q(webserver__icontains=search_value) | - Q(techs__name__icontains=search_value) | - Q(content_type__icontains=search_value) | - Q(matched_gf_patterns__icontains=search_value)) + Q(page_title__icontains=search_value) | + Q(http_status__icontains=search_value) | + Q(content_type__icontains=search_value) | + Q(webserver__icontains=search_value) | + Q(techs__name__icontains=search_value) | + Q(content_type__icontains=search_value) | + Q(matched_gf_patterns__icontains=search_value)) def special_lookup(self, search_value): qs = self.queryset.filter() @@ -2381,34 +2564,43 @@ def special_lookup(self, search_value): print(e) return qs - class DirectoryViewSet(viewsets.ModelViewSet): - queryset = DirectoryFile.objects.none() - serializer_class = DirectoryFileSerializer + queryset = DirectoryFile.objects.none() + serializer_class = DirectoryFileSerializer + + def get_queryset(self): + req = self.request + scan_id = safe_int_cast(req.query_params.get('scan_history')) + subdomain_id = safe_int_cast(req.query_params.get('subdomain_id')) + + if not (scan_id or subdomain_id): + return Response({ + 'status': False, + 'message': 'Scan id or subdomain id must be provided.' + }) + + subdomains = Subdomain.objects.filter(scan_history__id=scan_id) if scan_id else \ + Subdomain.objects.filter(id=subdomain_id) + dirs_scans = DirectoryScan.objects.filter(directories__in=subdomains) + + return DirectoryFile.objects.filter(directory_files__in=dirs_scans) \ + .distinct() \ + .order_by('id') + +class ProjectViewSet(viewsets.ModelViewSet): + serializer_class = ProjectSerializer + permission_classes = [IsAuthenticated] def get_queryset(self): - req = self.request - scan_id = req.query_params.get('scan_history') - subdomain_id = req.query_params.get('subdomain_id') - subdomains = None - if not (scan_id or subdomain_id): - return Response({ - 'status': False, - 'message': 'Scan id or subdomain id must be provided.' - }) - elif scan_id: - subdomains = Subdomain.objects.filter(scan_history__id=scan_id) - elif subdomain_id: - subdomains = Subdomain.objects.filter(id=subdomain_id) - dirs_scans = DirectoryScan.objects.filter(directories__in=subdomains) - qs = ( - DirectoryFile.objects - .filter(directory_files__in=dirs_scans) - .distinct() - ) - self.queryset = qs - return self.queryset + return Project.objects.filter(user=self.request.user) + + def perform_create(self, serializer): + serializer.save(user=self.request.user) + def perform_update(self, serializer): + if serializer.instance.user != self.request.user: + raise PermissionDenied("You don't have permission to modify this project.") + serializer.save() class VulnerabilityViewSet(viewsets.ModelViewSet): queryset = Vulnerability.objects.none() @@ -2416,11 +2608,11 @@ class VulnerabilityViewSet(viewsets.ModelViewSet): def get_queryset(self): req = self.request - scan_id = req.query_params.get('scan_history') - target_id = req.query_params.get('target_id') + scan_id = safe_int_cast(req.query_params.get('scan_history')) + target_id = safe_int_cast(req.query_params.get('target_id')) domain = req.query_params.get('domain') severity = req.query_params.get('severity') - subdomain_id = req.query_params.get('subdomain_id') + subdomain_id = safe_int_cast(req.query_params.get('subdomain_id')) subdomain_name = req.query_params.get('subdomain') vulnerability_name = req.query_params.get('vulnerability_name') slug = self.request.GET.get('project', None) @@ -2673,8 +2865,8 @@ def special_lookup(self, search_value): qs = ( self.queryset .exclude(Q(description__icontains=lookup_content) | - Q(template__icontains=lookup_content) | - Q(extracted_results__icontains=lookup_content)) + Q(template__icontains=lookup_content) | + Q(extracted_results__icontains=lookup_content)) ) elif '>' in search_value: diff --git a/web/beat-entrypoint-dev.sh b/web/beat-entrypoint-dev.sh deleted file mode 100755 index 9aa31406d..000000000 --- a/web/beat-entrypoint-dev.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/bash - -if [ "$CELERY_DEBUG" == "1" ]; then - # Django debug toolbar - pip install django-debug-toolbar==4.3.0 - python3 manage.py collectstatic --noinput -fi - -# Check if remote debugging is enabled and set concurrency to 1 for easier debug -if [ "$CELERY_REMOTE_DEBUG" == "1" ]; then - # Live debug - pip install debugpy - - # To debug opened port with netstat - apt install net-tools -y -fi - -./beat-entrypoint.sh \ No newline at end of file diff --git a/web/beat-entrypoint.sh b/web/beat-entrypoint.sh deleted file mode 100755 index a7fbe7738..000000000 --- a/web/beat-entrypoint.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash - -python3 manage.py migrate - -exec "$@" diff --git a/web/celery-entrypoint.sh b/web/celery-entrypoint.sh deleted file mode 100755 index 223c5cf7c..000000000 --- a/web/celery-entrypoint.sh +++ /dev/null @@ -1,217 +0,0 @@ -#!/bin/bash - -python3 manage.py makemigrations -python3 manage.py migrate -python3 manage.py collectstatic --no-input --clear - -# Load default engines, keywords, and external tools -python3 manage.py loaddata fixtures/default_scan_engines.yaml --app scanEngine.EngineType -python3 manage.py loaddata fixtures/default_keywords.yaml --app scanEngine.InterestingLookupModel -python3 manage.py loaddata fixtures/external_tools.yaml --app scanEngine.InstalledExternalTool - -# install firefox https://askubuntu.com/a/1404401 -echo ' -Package: * -Pin: release o=LP-PPA-mozillateam -Pin-Priority: 1001 - -Package: firefox -Pin: version 1:1snap1-0ubuntu2 -Pin-Priority: -1 -' | tee /etc/apt/preferences.d/mozilla-firefox -apt update -apt install firefox -y - -# Temporary fix for whatportis bug - See https://github.com/yogeshojha/rengine/issues/984 -sed -i 's/purge()/truncate()/g' /usr/local/lib/python3.10/dist-packages/whatportis/cli.py - -# update whatportis -yes | whatportis --update - -# clone dirsearch default wordlist -if [ ! -d "/usr/src/wordlist" ] -then - echo "Making Wordlist directory" - mkdir /usr/src/wordlist -fi - -if [ ! -f "/usr/src/wordlist/" ] -then - echo "Downloading Default Directory Bruteforce Wordlist" - wget https://raw.githubusercontent.com/maurosoria/dirsearch/master/db/dicc.txt -O /usr/src/wordlist/dicc.txt -fi - -# check if default wordlist for amass exists -if [ ! -f /usr/src/wordlist/deepmagic.com-prefixes-top50000.txt ]; -then - echo "Downloading Deepmagic top 50000 Wordlist" - wget https://raw.githubusercontent.com/danielmiessler/SecLists/master/Discovery/DNS/deepmagic.com-prefixes-top50000.txt -O /usr/src/wordlist/deepmagic.com-prefixes-top50000.txt -fi - -# clone Sublist3r -if [ ! -d "/usr/src/github/Sublist3r" ] -then - echo "Cloning Sublist3r" - git clone https://github.com/aboul3la/Sublist3r /usr/src/github/Sublist3r --quiet -else - echo "Updating Sublist3r" - git -C /usr/src/github/Sublist3r/ pull --quiet -fi -echo "Installing dependencies..." -python3 -m pip -q install -r /usr/src/github/Sublist3r/requirements.txt - -# clone OneForAll -if [ ! -d "/usr/src/github/OneForAll" ] -then - echo "Cloning OneForAll" - git clone https://github.com/shmilylty/OneForAll /usr/src/github/OneForAll --quiet -else - echo "Updating OneForAll" - git -C /usr/src/github/OneForAll/ pull --quiet -fi -echo "Installing dependencies..." -python3 -m pip -q install -r /usr/src/github/OneForAll/requirements.txt - -# clone eyewitness -if [ ! -d "/usr/src/github/EyeWitness" ] -then - echo "Cloning EyeWitness" - git clone https://github.com/FortyNorthSecurity/EyeWitness /usr/src/github/EyeWitness --quiet -else - echo "Updating EyeWitness" - git -C /usr/src/github/EyeWitness/ pull --quiet -fi - -# clone theHarvester -if [ ! -d "/usr/src/github/theHarvester" ] -then - echo "Cloning theHarvester" - git clone https://github.com/laramies/theHarvester /usr/src/github/theHarvester --quiet -else - echo "Updating theHarvester" - git -C /usr/src/github/theHarvester/ pull --quiet -fi -echo "Installing dependencies..." -python3 -m pip -q install -r /usr/src/github/theHarvester/requirements/base.txt - -# clone vulscan -if [ ! -d "/usr/src/github/scipag_vulscan" ] -then - echo "Cloning Nmap Vulscan script" - git clone https://github.com/scipag/vulscan /usr/src/github/scipag_vulscan --quiet - echo "Symlinking to nmap script dir" - ln -s /usr/src/github/scipag_vulscan /usr/share/nmap/scripts/vulscan -else - echo "Updating Nmap Vulscan script" - git -C /usr/src/github/scipag_vulscan/ pull --quiet -fi -echo "Usage in reNgine, set vulscan/vulscan.nse in nmap_script scanEngine port_scan config parameter" - -# install h8mail -echo "Installing dependencies..." -python3 -m pip -q install h8mail - -# install gf patterns -if [ ! -d "/root/Gf-Patterns" ]; -then - echo "Installing GF Patterns" - mkdir -p ~/.gf - cp -r $GOPATH/pkg/mod/github.com/tomnomnom/gf*/examples/*.json ~/.gf - git clone https://github.com/1ndianl33t/Gf-Patterns ~/Gf-Patterns --quiet -else - echo "Updating GF Patterns" - git -C /root/Gf-Patterns/ pull --quiet -fi -mv ~/Gf-Patterns/*.json ~/.gf - -# store scan_results -if [ ! -d "/usr/src/scan_results" ] -then - mkdir /usr/src/scan_results -fi - -# test tools, required for configuration -naabu;subfinder;amass;nuclei - -if [ ! -d "/root/nuclei-templates/geeknik_nuclei_templates" ]; -then - echo "Installing Geeknik Nuclei templates" - git clone https://github.com/geeknik/the-nuclei-templates.git ~/nuclei-templates/geeknik_nuclei_templates --quiet -else - echo "Removing old Geeknik Nuclei templates and updating new one" - rm -rf ~/nuclei-templates/geeknik_nuclei_templates - git clone https://github.com/geeknik/the-nuclei-templates.git ~/nuclei-templates/geeknik_nuclei_templates --quiet -fi - -echo "Downloading ssrf_nagli for Nuclei" -wget https://raw.githubusercontent.com/NagliNagli/BountyTricks/main/ssrf.yaml -O ~/nuclei-templates/ssrf_nagli.yaml - -if [ ! -d "/usr/src/github/CMSeeK" ] -then - echo "Cloning CMSeeK" - git clone https://github.com/Tuhinshubhra/CMSeeK /usr/src/github/CMSeeK --quiet -else - echo "Updating CMSeeK" - git -C /usr/src/github/CMSeeK/ pull --quiet -fi -echo "Installing dependencies..." -python3 -m pip -q install -r /usr/src/github/CMSeeK/requirements.txt - -# clone ctfr -if [ ! -d "/usr/src/github/ctfr" ] -then - echo "Cloning CTFR" - git clone https://github.com/UnaPibaGeek/ctfr /usr/src/github/ctfr --quiet -else - echo "Updating CTFR" - git -C /usr/src/github/ctfr/ pull --quiet -fi - -# clone gooFuzz -if [ ! -d "/usr/src/github/goofuzz" ] -then - echo "Cloning GooFuzz" - git clone https://github.com/m3n0sd0n4ld/GooFuzz.git /usr/src/github/goofuzz --quiet -else - echo "Updating GooFuzz" - git -C /usr/src/github/goofuzz/ pull --quiet -fi -chmod +x /usr/src/github/goofuzz/GooFuzz - -exec "$@" - -# httpx seems to have issue, use alias instead!!! -echo 'alias httpx="/go/bin/httpx"' >> ~/.bashrc - -# TEMPORARY FIX, httpcore is causing issues with celery, removing it as temp fix -python3 -m pip -q uninstall -y httpcore - -if [ ! "$CELERY_LOGLEVEL" ]; then - export CELERY_LOGLEVEL='info' -fi - -# watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --autoscale=10,0 -l INFO -Q scan_queue & -echo "Starting Workers..." -echo "Starting Main Scan Worker with Concurrency: $MAX_CONCURRENCY,$MIN_CONCURRENCY" -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --loglevel=$CELERY_LOGLEVEL --autoscale=$MAX_CONCURRENCY,$MIN_CONCURRENCY -Q main_scan_queue & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=30 --loglevel=$CELERY_LOGLEVEL -Q initiate_scan_queue -n initiate_scan_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=30 --loglevel=$CELERY_LOGLEVEL -Q subscan_queue -n subscan_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=20 --loglevel=$CELERY_LOGLEVEL -Q report_queue -n report_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$CELERY_LOGLEVEL -Q send_notif_queue -n send_notif_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$CELERY_LOGLEVEL -Q send_scan_notif_queue -n send_scan_notif_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$CELERY_LOGLEVEL -Q send_task_notif_queue -n send_task_notif_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=5 --loglevel=$CELERY_LOGLEVEL -Q send_file_to_discord_queue -n send_file_to_discord_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=5 --loglevel=$CELERY_LOGLEVEL -Q send_hackerone_report_queue -n send_hackerone_report_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$CELERY_LOGLEVEL -Q parse_nmap_results_queue -n parse_nmap_results_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=20 --loglevel=$CELERY_LOGLEVEL -Q geo_localize_queue -n geo_localize_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$CELERY_LOGLEVEL -Q query_whois_queue -n query_whois_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=30 --loglevel=$CELERY_LOGLEVEL -Q remove_duplicate_endpoints_queue -n remove_duplicate_endpoints_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=50 --loglevel=$CELERY_LOGLEVEL -Q run_command_queue -n run_command_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$CELERY_LOGLEVEL -Q query_reverse_whois_queue -n query_reverse_whois_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$CELERY_LOGLEVEL -Q query_ip_history_queue -n query_ip_history_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=30 --loglevel=$CELERY_LOGLEVEL -Q gpt_queue -n gpt_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$CELERY_LOGLEVEL -Q dorking_queue -n dorking_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$CELERY_LOGLEVEL -Q osint_discovery_queue -n osint_discovery_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$CELERY_LOGLEVEL -Q h8mail_queue -n h8mail_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$CELERY_LOGLEVEL -Q theHarvester_queue -n theHarvester_worker -exec "$@" diff --git a/web/scanEngine/templatetags/__init__.py b/web/commonFilters/__init__.py similarity index 100% rename from web/scanEngine/templatetags/__init__.py rename to web/commonFilters/__init__.py diff --git a/web/commonFilters/apps.py b/web/commonFilters/apps.py new file mode 100644 index 000000000..e6d8c370a --- /dev/null +++ b/web/commonFilters/apps.py @@ -0,0 +1,6 @@ +from django.apps import AppConfig + + +class CommonfiltersConfig(AppConfig): + default_auto_field = 'django.db.models.BigAutoField' + name = 'commonFilters' diff --git a/web/startScan/templatetags/__init__.py b/web/commonFilters/templatetags/__init__.py similarity index 100% rename from web/startScan/templatetags/__init__.py rename to web/commonFilters/templatetags/__init__.py diff --git a/web/startScan/templatetags/custom_tags.py b/web/commonFilters/templatetags/custom_filters.py similarity index 71% rename from web/startScan/templatetags/custom_tags.py rename to web/commonFilters/templatetags/custom_filters.py index 7e3474371..a1a6c201d 100644 --- a/web/startScan/templatetags/custom_tags.py +++ b/web/commonFilters/templatetags/custom_filters.py @@ -9,6 +9,9 @@ def split(value, key): return [x.strip() for x in value.split(key)] +@register.filter(name='map') +def map_filter(value, arg): + return [getattr(item, arg) for item in value] @register.filter(name='count') def count(value): @@ -19,7 +22,7 @@ def count(value): def getpath(value): parsed_url = urlparse(value) if parsed_url.query: - return parsed_url.path + '?' + parsed_url.query + return f"{parsed_url.path}?{parsed_url.query}" else: return parsed_url.path @@ -51,3 +54,14 @@ def previous(some_list, current_index): return some_list[int(current_index) - 1] # access the previous element except: return '' # return empty string in case of exception + +@register.filter(name='get_user_role') +def get_user_role(user): + if user.groups.filter(name='sys_admin').exists(): + return 'sys_admin' + elif user.groups.filter(name='auditor').exists(): + return 'auditor' + elif user.groups.filter(name='penetration_tester').exists(): + return 'penetration_tester' + else: + return 'unknown' diff --git a/default_yaml_config.yaml b/web/config/default_yaml_config.yaml similarity index 94% rename from default_yaml_config.yaml rename to web/config/default_yaml_config.yaml index 058d7c61d..4e31bce86 100644 --- a/default_yaml_config.yaml +++ b/web/config/default_yaml_config.yaml @@ -81,15 +81,16 @@ dir_file_fuzz: { 'auto_calibration': true, 'enable_http_crawl': true, 'rate_limit': 150, - 'extensions': ['html', 'php','git','yaml','conf','cnf','config','gz','env','log','db','mysql','bak','asp','aspx','txt','conf','sql','json','yml','pdf'], + 'extensions': [], + # 'extensions': ['html', 'php','git','yaml','conf','cnf','config','gz','env','log','db','mysql','bak','asp','aspx','txt','conf','sql','json','yml','pdf'], 'follow_redirect': false, 'max_time': 0, 'match_http_status': [200, 204], - 'recursive_level': 2, + 'recursive_level': 0, 'stop_on_error': false, 'timeout': 5, 'threads': 30, - 'wordlist_name': 'dicc', + 'wordlist_name': 'default', # fuzz-Bo0oM } fetch_url: { # 'custom_header': { diff --git a/web/dashboard/context_processors.py b/web/dashboard/context_processors.py new file mode 100644 index 000000000..901a9bb1d --- /dev/null +++ b/web/dashboard/context_processors.py @@ -0,0 +1,14 @@ +from dashboard.utils import get_user_projects # Assuming this function exists + +def project_context(request): + current_project = getattr(request, 'current_project', None) # Get the project from the request + projects = get_user_projects(request.user) if request.user.is_authenticated else [] + + # If project is None, take the first project from the projects list + if current_project is None and projects: + current_project = projects[0] # Get the first project from the projects list + + return { + 'current_project': current_project, # Add the current project to the context + 'projects': projects, # Add user projects to the context if needed + } diff --git a/web/dashboard/fixtures/dashboard.json b/web/dashboard/fixtures/dashboard.json new file mode 100644 index 000000000..75dde2cf9 --- /dev/null +++ b/web/dashboard/fixtures/dashboard.json @@ -0,0 +1,37 @@ +[ +{ + "model": "dashboard.project", + "pk": 1, + "fields": { + "name": "Default", + "slug": "default", + "insert_date": "2024-09-03T21:23:21.459Z" + } +}, +{ + "model": "dashboard.project", + "pk": 2, + "fields": { + "name": "My Project", + "slug": "my-project", + "insert_date": "2024-09-04T00:32:08.839Z" + } +}, +{ + "model": "dashboard.project", + "pk": 3, + "fields": { + "name": "My Other Project", + "slug": "my-other-project", + "insert_date": "2024-09-04T00:32:31.475Z" + } +}, +{ + "model": "dashboard.ollamasettings", + "pk": 1, + "fields": { + "selected_model": "gpt-3", + "use_ollama": false + } +} +] diff --git a/web/dashboard/forms.py b/web/dashboard/forms.py new file mode 100644 index 000000000..6764a4b62 --- /dev/null +++ b/web/dashboard/forms.py @@ -0,0 +1,20 @@ +from django import forms +from django.contrib.auth.models import User + +from .models import Project + + +class ProjectForm(forms.ModelForm): + users = forms.ModelMultipleChoiceField( + queryset=User.objects.all(), + widget=forms.CheckboxSelectMultiple, + required=False + ) + description = forms.CharField( + widget=forms.Textarea(attrs={'rows': 4}), + required=False + ) + + class Meta: + model = Project + fields = ['name', 'description', 'users'] \ No newline at end of file diff --git a/web/dashboard/middleware.py b/web/dashboard/middleware.py new file mode 100644 index 000000000..a4b671f5b --- /dev/null +++ b/web/dashboard/middleware.py @@ -0,0 +1,59 @@ +from django.urls import resolve +from dashboard.utils import get_user_projects +from django.shortcuts import redirect +from django.urls import reverse +from dashboard.models import Project +from django.utils.deprecation import MiddlewareMixin +from django.shortcuts import get_object_or_404 +from .models import Project + +class ProjectAccessMiddleware: + def __init__(self, get_response): + self.get_response = get_response + + def __call__(self, request): + resolved = resolve(request.path_info) + if 'slug' in resolved.kwargs: + slug = resolved.kwargs['slug'] + project = Project.objects.filter(slug=slug).first() + + # Check if the user is authenticated + if not request.user.is_authenticated: + return redirect(reverse('permission_denied')) + + # If the project exists and the user has access + if project and project in get_user_projects(request.user): + return self.get_response(request) + + # If the project does not exist or the user does not have access + if project: + return redirect(reverse('page_not_found')) + else: + return redirect(reverse('permission_denied')) + + return self.get_response(request) + + +class SlugMiddleware(MiddlewareMixin): + def process_request(self, request): + request.current_project = None + request.slug = None + + # Try to get the project ID from the cookie + if project_id := request.COOKIES.get('currentProjectId'): + request.current_project = get_object_or_404(Project, id=project_id) + request.slug = request.current_project.slug + elif request.resolver_match and 'slug' in request.resolver_match.kwargs: + slug = request.resolver_match.kwargs['slug'] + request.slug = slug + request.current_project = get_object_or_404(Project, slug=slug) + + # If no project is found, use the first project of the user + if request.current_project is None and request.user.is_authenticated: + request.current_project = Project.objects.filter(users=request.user).first() + if request.current_project: + request.slug = request.current_project.slug + + # Update the session with the current project ID + if request.current_project: + request.session['current_project_id'] = request.current_project.id diff --git a/web/dashboard/migrations/0006_project_insert_date.py b/web/dashboard/migrations/0006_project_insert_date.py index 6d51f4e22..8f6e116f6 100644 --- a/web/dashboard/migrations/0006_project_insert_date.py +++ b/web/dashboard/migrations/0006_project_insert_date.py @@ -1,7 +1,7 @@ # Generated by Django 3.2.4 on 2023-07-06 09:08 from django.db import migrations, models - +from django.utils import timezone class Migration(migrations.Migration): @@ -13,7 +13,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name='project', name='insert_date', - field=models.DateTimeField(default='2023-06-06'), + field=models.DateTimeField(default=timezone.now), preserve_default=False, ), - ] + ] \ No newline at end of file diff --git a/web/dashboard/migrations/0010_ollamasettings.py b/web/dashboard/migrations/0010_ollamasettings.py new file mode 100644 index 000000000..1e47e5d0a --- /dev/null +++ b/web/dashboard/migrations/0010_ollamasettings.py @@ -0,0 +1,20 @@ +# Generated by Django 3.2.4 on 2024-04-21 04:35 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('dashboard', '0009_delete_openaikeys'), + ] + + operations = [ + migrations.CreateModel( + name='OllamaSettings', + fields=[ + ('id', models.AutoField(primary_key=True, serialize=False)), + ('selected_model', models.CharField(max_length=500)), + ], + ), + ] diff --git a/web/dashboard/migrations/0011_ollamasettings_is_ollama.py b/web/dashboard/migrations/0011_ollamasettings_is_ollama.py new file mode 100644 index 000000000..aefdef37c --- /dev/null +++ b/web/dashboard/migrations/0011_ollamasettings_is_ollama.py @@ -0,0 +1,18 @@ +# Generated by Django 3.2.4 on 2024-04-21 05:06 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('dashboard', '0010_ollamasettings'), + ] + + operations = [ + migrations.AddField( + model_name='ollamasettings', + name='is_ollama', + field=models.BooleanField(default=False), + ), + ] diff --git a/web/dashboard/migrations/0012_rename_is_ollama_ollamasettings_is_openai.py b/web/dashboard/migrations/0012_rename_is_ollama_ollamasettings_is_openai.py new file mode 100644 index 000000000..52bdf6ff8 --- /dev/null +++ b/web/dashboard/migrations/0012_rename_is_ollama_ollamasettings_is_openai.py @@ -0,0 +1,18 @@ +# Generated by Django 3.2.4 on 2024-04-21 05:06 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('dashboard', '0011_ollamasettings_is_ollama'), + ] + + operations = [ + migrations.RenameField( + model_name='ollamasettings', + old_name='is_ollama', + new_name='is_openai', + ), + ] diff --git a/web/dashboard/migrations/0013_auto_20240421_0507.py b/web/dashboard/migrations/0013_auto_20240421_0507.py new file mode 100644 index 000000000..11ab6594a --- /dev/null +++ b/web/dashboard/migrations/0013_auto_20240421_0507.py @@ -0,0 +1,22 @@ +# Generated by Django 3.2.4 on 2024-04-21 05:07 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('dashboard', '0012_rename_is_ollama_ollamasettings_is_openai'), + ] + + operations = [ + migrations.RemoveField( + model_name='ollamasettings', + name='is_openai', + ), + migrations.AddField( + model_name='ollamasettings', + name='is_ollama', + field=models.BooleanField(default=True), + ), + ] diff --git a/web/dashboard/migrations/0014_rename_is_ollama_ollamasettings_use_ollama.py b/web/dashboard/migrations/0014_rename_is_ollama_ollamasettings_use_ollama.py new file mode 100644 index 000000000..a201df3ad --- /dev/null +++ b/web/dashboard/migrations/0014_rename_is_ollama_ollamasettings_use_ollama.py @@ -0,0 +1,18 @@ +# Generated by Django 3.2.4 on 2024-04-21 05:08 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('dashboard', '0013_auto_20240421_0507'), + ] + + operations = [ + migrations.RenameField( + model_name='ollamasettings', + old_name='is_ollama', + new_name='use_ollama', + ), + ] diff --git a/web/dashboard/migrations/0015_project_users.py b/web/dashboard/migrations/0015_project_users.py new file mode 100644 index 000000000..46e6b94d2 --- /dev/null +++ b/web/dashboard/migrations/0015_project_users.py @@ -0,0 +1,20 @@ +# Generated by Django 3.2.25 on 2024-08-21 00:46 + +from django.conf import settings +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ('dashboard', '0014_rename_is_ollama_ollamasettings_use_ollama'), + ] + + operations = [ + migrations.AddField( + model_name='project', + name='users', + field=models.ManyToManyField(related_name='projects', to=settings.AUTH_USER_MODEL), + ), + ] diff --git a/web/dashboard/migrations/0016_project_description.py b/web/dashboard/migrations/0016_project_description.py new file mode 100644 index 000000000..d0b3c7f47 --- /dev/null +++ b/web/dashboard/migrations/0016_project_description.py @@ -0,0 +1,18 @@ +# Generated by Django 3.2.25 on 2024-08-21 11:06 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('dashboard', '0015_project_users'), + ] + + operations = [ + migrations.AddField( + model_name='project', + name='description', + field=models.TextField(blank=True, null=True), + ), + ] diff --git a/web/dashboard/models.py b/web/dashboard/models.py index 3d6001745..15a821e2e 100644 --- a/web/dashboard/models.py +++ b/web/dashboard/models.py @@ -1,5 +1,5 @@ from django.db import models - +from django.contrib.auth.models import User class SearchHistory(models.Model): query = models.CharField(max_length=1000) @@ -9,13 +9,18 @@ def __str__(self): class Project(models.Model): - id = models.AutoField(primary_key=True) - name = models.CharField(max_length=500) - slug = models.SlugField(unique=True) - insert_date = models.DateTimeField() + id = models.AutoField(primary_key=True) + name = models.CharField(max_length=500) + description = models.TextField(blank=True, null=True) + slug = models.SlugField(unique=True) + insert_date = models.DateTimeField() + users = models.ManyToManyField(User, related_name='projects') - def __str__(self): - return self.slug + def __str__(self): + return self.slug + + def is_user_authorized(self, user): + return user.is_superuser or self.users.filter(id=user.id).exists() class OpenAiAPIKey(models.Model): @@ -24,6 +29,15 @@ class OpenAiAPIKey(models.Model): def __str__(self): return self.key + + +class OllamaSettings(models.Model): + id = models.AutoField(primary_key=True) + selected_model = models.CharField(max_length=500) + use_ollama = models.BooleanField(default=True) + + def __str__(self): + return self.selected_model class NetlasAPIKey(models.Model): @@ -31,4 +45,4 @@ class NetlasAPIKey(models.Model): key = models.CharField(max_length=500) def __str__(self): - return self.key + return self.key \ No newline at end of file diff --git a/web/dashboard/templates/dashboard/admin.html b/web/dashboard/templates/dashboard/admin.html index 31c754a88..dfe8d6d94 100644 --- a/web/dashboard/templates/dashboard/admin.html +++ b/web/dashboard/templates/dashboard/admin.html @@ -2,6 +2,7 @@ {% load humanize %} {% load permission_tags %} {% load static %} +{% load custom_filters %} {% block title %} Profile @@ -15,7 +16,7 @@ {% endblock page_title %} {% block breadcrumb_title %} - + {% endblock breadcrumb_title %} @@ -99,16 +100,33 @@

Users

{% if user != muser %} {% if muser.is_active %} - + + + {% else %} - + + + {% endif %} {% else %}   {% endif %} - + {% with user_role=muser|get_user_role %} + {% with user_projects=muser.projects.all|map:'id'|join:',' %} + + + + {% endwith %} + {% endwith %} {% if user != muser %} - + + + {% endif %} @@ -159,29 +177,37 @@

Users

}]) } -function update_user_modal(user_id, permission){ - var html_content = ''; - - html_content += ` +function update_user_modal(user_id, permission, user_projects) { + var html_content = `
- - + +
- -
- -
-
- - -
+ + ${permission === 'sys_admin' ? + `

This user has access to all projects.

` : + `` + } +
+
+ +
+ +
+
+ + +
- `; Swal.fire({ @@ -192,79 +218,91 @@

Users

confirmButtonText: 'Update', showLoaderOnConfirm: true, preConfirm: function () { - return new Promise(function (resolve) { - var role_selected = document.getElementById('user_role').value; - var change_password = document.getElementById('password').value; - const data = { - 'role': role_selected, - 'change_password': change_password - }; - fetch('./update?mode=update&user=' + user_id, { - method: 'POST', - credentials: "same-origin", - headers: { - "X-CSRFToken": getCookie("csrftoken") - }, - body: JSON.stringify(data) - }) - .then(function (response) { - return response.json(); - }) - .then(function(data) { - return location.reload(); - }) - .catch(function() { - swal.insertQueueStep({ - type: 'error', - title: 'Oops! Unable to updte user!' - }) + return new Promise(function (resolve) { + var role_selected = document.getElementById('user_role').value; + var change_password = document.getElementById('password').value; + var selected_projects = permission === 'sys_admin' ? [] : Array.from(document.getElementById('user_projects').selectedOptions).map(option => option.value); + const data = { + 'role': role_selected, + 'change_password': change_password, + 'projects': selected_projects + }; + fetch('./update?mode=update&user=' + user_id, { + method: 'POST', + credentials: "same-origin", + headers: { + "X-CSRFToken": getCookie("csrftoken") + }, + body: JSON.stringify(data) + }) + .then(function (response) { + return response.json(); + }) + .then(function(data) { + return location.reload(); + }) + .catch(function() { + swal.insertQueueStep({ + type: 'error', + title: 'Oops! Unable to update user!' + }) + }) + resolve() }) - resolve() - }) } }); + // If the user is a Sys Admin, no need to select projects + if (permission !== 'sys_admin') { + user_projects.forEach(function(project_id) { + document.querySelector(`#user_projects option[value="${project_id}"]`).selected = true; + }); + } + const passwordInput = document.getElementById('password'); const showPasswordCheckbox = document.getElementById('show-password'); - showPasswordCheckbox.addEventListener('change', function() { - if (showPasswordCheckbox.checked) { - passwordInput.type = 'text'; - } else { - passwordInput.type = 'password'; - } + if (showPasswordCheckbox.checked) { + passwordInput.type = 'text'; + } else { + passwordInput.type = 'password'; + } }); - } -function create_user_modal(){ - var html_content = ''; - - html_content += ` +function create_user_modal() { + var html_content = `
- - + +
- - + + +
+
+ +
- -
- -
-
- - -
+ +
+ +
+
+ + +
- `; Swal.fire({ @@ -275,69 +313,92 @@

Users

confirmButtonText: 'Create User', showLoaderOnConfirm: true, preConfirm: function () { - return new Promise(function (resolve) { - var create_username = document.getElementById('create_username').value; - var role_selected = document.getElementById('create_user_role').value; - var create_password = document.getElementById('create_password').value; - if (!create_password) { - Swal.fire({ - title: "Oops! Passwords can't be empty!", - icon: 'error', - }) - return - } - const data = { - 'username': create_username, - 'role': role_selected, - 'password': create_password - }; - fetch('./update?mode=create', { - method: 'POST', - credentials: "same-origin", - headers: { - "X-CSRFToken": getCookie("csrftoken") - }, - body: JSON.stringify(data) - }) - .then(function (response) { - return response.json(); - }) - .then(function(data) { - if (data.status) { - return location.reload(); - } - else{ - Swal.fire({ - title: "Oops! Can't create user!", - icon: 'error', - text: 'Error: ' + data.error, + return new Promise(function (resolve) { + var create_username = document.getElementById('create_username').value; + var role_selected = document.getElementById('create_user_role').value; + var create_password = document.getElementById('create_password').value; + var selected_projects = role_selected === 'sys_admin' ? [] : Array.from(document.getElementById('create_user_projects').selectedOptions).map(option => option.value); + + if (!create_password) { + Swal.fire({ + title: "Oops! Passwords can't be empty!", + icon: 'error', + }); + return; + } + + const data = { + 'username': create_username, + 'role': role_selected, + 'password': create_password, + 'projects': selected_projects + }; + + fetch('./update?mode=create', { + method: 'POST', + credentials: "same-origin", + headers: { + "X-CSRFToken": getCookie("csrftoken") + }, + body: JSON.stringify(data) }) - } - - }) - .catch(function() { - swal.insertQueueStep({ - type: 'error', - title: 'Oops! Unable to updte user!' - }) - }) - resolve() - }) + .then(function (response) { + return response.json(); + }) + .then(function(data) { + if (data.status) { + return location.reload(); + } else { + Swal.fire({ + title: "Oops! Can't create user!", + icon: 'error', + text: 'Error: ' + data.error, + }); + } + }) + .catch(function() { + swal.insertQueueStep({ + type: 'error', + title: 'Oops! Unable to create user!' + }); + }); + resolve(); + }); } }); const passwordInput = document.getElementById('create_password'); const showPasswordCheckbox = document.getElementById('show-password'); - showPasswordCheckbox.addEventListener('change', function() { - if (showPasswordCheckbox.checked) { - passwordInput.type = 'text'; - } else { - passwordInput.type = 'password'; - } + if (showPasswordCheckbox.checked) { + passwordInput.type = 'text'; + } else { + passwordInput.type = 'password'; + } + }); + + // Handle the role change to show/hide project selection + const roleSelect = document.getElementById('create_user_role'); + roleSelect.addEventListener('change', function() { + var role = this.value; + var projectsContainer = document.getElementById('projects_container'); + if (role === 'sys_admin') { + projectsContainer.innerHTML = '

This user has access to all projects.

'; + } else { + projectsContainer.innerHTML = ` + + + `; + } }); + // Trigger the change event on load to set the correct state + roleSelect.dispatchEvent(new Event('change')); } diff --git a/web/dashboard/templates/dashboard/edit_project.html b/web/dashboard/templates/dashboard/edit_project.html new file mode 100644 index 000000000..017280ba5 --- /dev/null +++ b/web/dashboard/templates/dashboard/edit_project.html @@ -0,0 +1,63 @@ +{% extends 'base/base.html' %} +{% load humanize %} +{% load static %} + +{% block title %}Edit Project{% endblock title %} + +{% block custom_js_css_link %} + +{% endblock custom_js_css_link %} + +{% block page_title %}Edit Project: {{ edit_project.name }}{% endblock page_title %} + +{% block breadcrumb_title %} + + + +{% endblock breadcrumb_title %} + +{% block main_content %} +
+
+
+
+

Edit Project Details

+
+ {% csrf_token %} +
+ + +
+
+ + +
+
+ + +
+
+ +
+
+
+
+
+
+{% endblock main_content %} + +{% block page_level_script %} + + +{% endblock page_level_script %} diff --git a/web/dashboard/templates/dashboard/index.html b/web/dashboard/templates/dashboard/index.html index d4ead887f..f3f778763 100644 --- a/web/dashboard/templates/dashboard/index.html +++ b/web/dashboard/templates/dashboard/index.html @@ -150,7 +150,7 @@
text-info {% endif %}">
{% endblock page_level_script %} diff --git a/web/recon_note/tests.py b/web/recon_note/tests.py deleted file mode 100644 index 7ce503c2d..000000000 --- a/web/recon_note/tests.py +++ /dev/null @@ -1,3 +0,0 @@ -from django.test import TestCase - -# Create your tests here. diff --git a/web/recon_note/tests/__init__.py b/web/recon_note/tests/__init__.py new file mode 100644 index 000000000..e6c6fbbc8 --- /dev/null +++ b/web/recon_note/tests/__init__.py @@ -0,0 +1,2 @@ +from utils.test_base import * +from .test_recon_note import * diff --git a/web/recon_note/tests/test_recon_note.py b/web/recon_note/tests/test_recon_note.py new file mode 100644 index 000000000..ba5985977 --- /dev/null +++ b/web/recon_note/tests/test_recon_note.py @@ -0,0 +1,81 @@ +""" +TestScanReconNoteViews contains unit tests for the scan recon note functionality within the application. +It verifies the behavior of the API endpoints related to adding, listing, and deleting recon notes. + +Methods: + setUp: Initializes the test environment by creating a base project and a test TodoNote. + test_add_recon_note_success: Tests the successful addition of a recon note. + test_add_recon_note_missing_data: Tests the addition of a recon note with missing required data. + test_list_recon_notes: Tests the retrieval of all recon notes associated with a project. + test_delete_recon_note_success: Tests the successful deletion of a recon note. + test_delete_recon_note_not_found: Tests the deletion of a recon note that does not exist. +""" + + +from django.urls import reverse +from rest_framework import status +from utils.test_base import BaseTestCase +from recon_note.models import TodoNote + +__all__ = [ + 'TestScanReconNoteViews', +] + +class TestScanReconNoteViews(BaseTestCase): + """Test case for the Scan Recon Note views.""" + + def setUp(self): + """Set up the test environment.""" + super().setUp() + self.data_generator.create_project_base() # Create a base project + self.todo_note = self.data_generator.create_todo_note() # Create a test TodoNote + + def test_add_recon_note_success(self): + """Test adding a recon note successfully.""" + api_url = reverse("api:addReconNote") + data = { + "subdomain_id": self.data_generator.subdomain.id, + "scan_history_id": self.data_generator.scan_history.id, + "title": "New Recon Note", + "description": "This is a new recon note", + "project": self.data_generator.project.slug, + } + response = self.client.post(api_url, data, content_type='application/json') + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.json()["status"]) + + def test_add_recon_note_missing_data(self): + """Test adding a recon note with missing data.""" + api_url = reverse("api:addReconNote") + data = { + "title": "Incomplete Note", + "slug": self.data_generator.project.slug, + } + response = self.client.post(api_url, data, content_type='application/json') + self.assertIn(response.status_code, [status.HTTP_400_BAD_REQUEST]) + self.assertFalse(response.json()["status"]) + self.assertIn("error", response.json()) + self.assertEqual(response.json()["error"], "Project is required.") + + def test_list_recon_notes(self): + """Test listing all recon notes.""" + api_url = reverse("list_note", kwargs={'slug': self.data_generator.project.slug}) + response = self.client.get(api_url, {"project": self.data_generator.project.slug}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + + def test_delete_recon_note_success(self): + """Test deleting a recon note successfully.""" + api_url = reverse("delete_note") + data = {"id": self.todo_note.id} + response = self.client.post(api_url, data, content_type='application/json') + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(response.json()["status"]) + self.assertFalse(TodoNote.objects.filter(id=self.todo_note.id).exists()) + + def test_delete_recon_note_not_found(self): + """Test deleting a recon note that does not exist.""" + api_url = reverse("delete_note") + data = {"id": 99999} # Non-existent ID + response = self.client.post(api_url, data, content_type='application/json') + self.assertIn(response.status_code, [status.HTTP_404_NOT_FOUND]) + self.assertFalse(response.json()["status"]) diff --git a/web/recon_note/views.py b/web/recon_note/views.py index 562ae933d..19f0082f5 100644 --- a/web/recon_note/views.py +++ b/web/recon_note/views.py @@ -1,44 +1,144 @@ +""" +Views for the recon_note app. + +This module contains the views for the recon_note app, which handles +the management of todo notesand related operations. +""" import json +import logging from django.http import JsonResponse from django.shortcuts import render -from recon_note.models import * -from startScan.models import * - +from recon_note.models import TodoNote def list_note(request, slug): - context = {} - context['recon_note_active'] = 'active' + """ + list_note renders the list view for recon notes associated with a specific project. + It prepares the context for the template and returns the rendered HTML response. + + Args: + request (HttpRequest): The HTTP request object containing metadata about the request. + slug (str): The slug of the project for which the recon notes are being listed. + + Returns: + HttpResponse: The rendered HTML response for the note list view. + """ + context = {'recon_note_active': 'active'} return render(request, 'note/index.html', context) def flip_todo_status(request): - if request.method == "POST": + """ + flip_todo_status toggles the completion status of a todo note based on the provided request data. + It processes a POST request, validates the input, and updates the note's status, + returning a JSON response indicating the result. + + Args: + request (HttpRequest): The HTTP request object containing the note ID and the request method. + + Returns: + JsonResponse: A JSON response indicating the success or failure of the operation, + along with the updated completion status if successful. + + Raises: + JsonDecodeError: If the request body contains invalid JSON. + Http404: If the specified todo note does not exist. + """ + if request.method != "POST": + return JsonResponse({'status': False, 'error': 'Invalid request method.'}, status=400) + + try: body_unicode = request.body.decode('utf-8') body = json.loads(body_unicode) + except json.JSONDecodeError as e: + logging.error('JSON decode error: %s', e) + return JsonResponse({'status': False, 'error': 'Invalid JSON.'}, status=400) - note = TodoNote.objects.get(id=body['id']) - note.is_done = not note.is_done - note.save() + note_id = body.get('id') + if note_id is None: + return JsonResponse({'status': False, 'error': 'ID is required.'}, status=400) - return JsonResponse({'status': True}) + try: + note = TodoNote.objects.get(id=note_id) + except TodoNote.DoesNotExist: + return JsonResponse({'status': False, 'error': 'Note not found.'}, status=404) + + note.is_done = not note.is_done + note.save() + return JsonResponse({'status': True, 'error': False, 'is_done': note.is_done}, status=200) def flip_important_status(request): - if request.method == "POST": + """ + flip_important_status toggles the importance status of a todo note based on the provided request data. + It processes a POST request, validates the input, and updates the note's status, + returning a JSON response indicating the result. + + Args: + request (HttpRequest): The HTTP request object containing the note ID and the request method. + + Returns: + JsonResponse: A JSON response indicating the success or failure of the operation, + along with the updated importance status if successful. + + Raises: + JsonDecodeError: If the request body contains invalid JSON. + Http404: If the specified todo note does not exist. + """ + if request.method != "POST": + return JsonResponse({'status': False, 'error': 'Invalid request method.'}, status=400) + + try: body_unicode = request.body.decode('utf-8') body = json.loads(body_unicode) + except json.JSONDecodeError as e: + logging.error('JSON decode error: %s', e) + return JsonResponse({'status': False, 'error': 'Invalid JSON.'}, status=400) - note = TodoNote.objects.get(id=body['id']) - note.is_important = not note.is_important - note.save() + note_id = body.get('id') + if note_id is None: + return JsonResponse({'status': False, 'error': 'ID is required.'}, status=400) - return JsonResponse({'status': True}) + try: + note = TodoNote.objects.get(id=note_id) + except TodoNote.DoesNotExist: + return JsonResponse({'status': False, 'error': 'Note not found.'}, status=404) + + note.is_important = not note.is_important + note.save() + return JsonResponse({'status': True, 'error': False, 'is_important': note.is_important}, status=200) def delete_note(request): - if request.method == "POST": + """ + delete_note handles the deletion of a todo note based on the provided request data. + It processes a POST request, validates the input, and removes the specified note, + returning a JSON response indicating the result. + + Args: + request (HttpRequest): The HTTP request object containing the note ID and the request method. + + Returns: + JsonResponse: A JSON response indicating the success or failure of the deletion operation. + + Raises: + JsonDecodeError: If the request body contains invalid JSON. + Http404: If the specified todo note does not exist. + """ + if request.method != "POST": + return JsonResponse({'status': False, 'error': 'Invalid request method.'}, status=400) + + try: body_unicode = request.body.decode('utf-8') body = json.loads(body_unicode) + except json.JSONDecodeError as e: + logging.error('JSON decode error: %s', e) + return JsonResponse({'status': False, 'error': 'Invalid JSON.'}, status=400) + + note_id = body.get('id') + if note_id is None: + return JsonResponse({'status': False, 'error': 'ID is required.'}, status=400) - TodoNote.objects.filter(id=body['id']).delete() + if not TodoNote.objects.filter(id=note_id).exists(): + return JsonResponse({'status': False, 'error': 'Note not found.'}, status=404) - return JsonResponse({'status': True}) + TodoNote.objects.filter(id=note_id).delete() + return JsonResponse({'status': True, 'error': False, 'deleted': True}, status=200) diff --git a/web/requirements.txt b/web/requirements.txt deleted file mode 100644 index 82fda1a19..000000000 --- a/web/requirements.txt +++ /dev/null @@ -1,39 +0,0 @@ -aiodns==3.0.0 -argh==0.26.2 -beautifulsoup4==4.9.3 -celery==5.4.0 -discord-webhook==1.3.0 -Django==3.2.25 -django-ace==1.32.4 -django-celery-beat==2.6.0 -django-login-required-middleware==0.9.0 -django-role-permissions==3.2.0 -django-mathfilters==1.0.0 -django-timezone-field==6.1.0 -djangorestframework==3.14.0 -djangorestframework-datatables==0.7.2 -dotted-dict==1.1.3 -drf-yasg==1.21.5 -gunicorn==23.0.0 -gevent==24.2.1 -humanize==4.3.0 -Markdown==3.3.4 -metafinder==1.2 -netaddr==0.8.0 -netlas==0.4.1 -openai==0.28.0 -PyYAML==6.0.1 -PySocks==1.7.1 -psycopg2==2.9.7 -pycvesearch==1.0 -redis==5.0.3 -requests==2.32.2 -scapy==2.4.3 -tldextract==3.5.0 -uro==1.0.0 -validators==0.18.2 -watchdog==4.0.0 -whatportis -weasyprint==53.3 -wafw00f==2.2.0 -xmltodict==0.13.0 diff --git a/web/scanEngine/fixtures/scanEngine.json b/web/scanEngine/fixtures/scanEngine.json new file mode 100644 index 000000000..030b7b148 --- /dev/null +++ b/web/scanEngine/fixtures/scanEngine.json @@ -0,0 +1,436 @@ +[ +{ + "model": "scanEngine.enginetype", + "pk": 1, + "fields": { + "engine_name": "Full Scan", + "yaml_configuration": "subdomain_discovery: {\r\n 'uses_tools': ['subfinder', 'ctfr', 'sublist3r', 'tlsx', 'oneforall', 'netlas'],\r\n 'enable_http_crawl': true,\r\n 'threads': 30,\r\n 'timeout': 5,\r\n}\r\nhttp_crawl: {}\r\nport_scan: {\r\n 'enable_http_crawl': true,\r\n 'timeout': 5,\r\n # 'exclude_ports': [],\r\n # 'exclude_subdomains': [],\r\n 'ports': ['top-100'],\r\n 'rate_limit': 150,\r\n 'threads': 30,\r\n 'passive': false,\r\n # 'use_naabu_config': false,\r\n # 'enable_nmap': true,\r\n # 'nmap_cmd': '',\r\n # 'nmap_script': '',\r\n # 'nmap_script_args': ''\r\n}\r\nosint: {\r\n 'discover': [\r\n 'emails',\r\n 'metainfo',\r\n 'employees'\r\n ],\r\n 'dorks': [\r\n 'login_pages',\r\n 'admin_panels',\r\n 'dashboard_pages',\r\n 'stackoverflow',\r\n 'social_media',\r\n 'project_management',\r\n 'code_sharing',\r\n 'config_files',\r\n 'jenkins',\r\n 'wordpress_files',\r\n 'php_error',\r\n 'exposed_documents',\r\n 'db_files',\r\n 'git_exposed'\r\n ],\r\n 'intensity': 'normal',\r\n 'documents_limit': 50\r\n}\r\ndir_file_fuzz: {\r\n 'auto_calibration': true,\r\n 'enable_http_crawl': true,\r\n 'rate_limit': 150,\r\n 'extensions': [],\r\n 'follow_redirect': false,\r\n 'max_time': 0,\r\n 'match_http_status': [200, 204],\r\n 'recursive_level': 0,\r\n 'stop_on_error': false,\r\n 'timeout': 5,\r\n 'threads': 30,\r\n 'wordlist_name': 'default', # fuzz-Bo0oM\r\n}\r\nfetch_url: {\r\n 'uses_tools': ['gospider', 'hakrawler', 'waybackurls', 'katana', 'gau'],\r\n 'remove_duplicate_endpoints': true,\r\n 'duplicate_fields': ['content_length', 'page_title'],\r\n 'follow_redirect': false,\r\n 'enable_http_crawl': true,\r\n 'gf_patterns': ['debug_logic', 'idor', 'interestingEXT', 'interestingparams', 'interestingsubs', 'lfi', 'rce', 'redirect', 'sqli', 'ssrf', 'ssti', 'xss'],\r\n 'ignore_file_extensions': ['png', 'jpg', 'jpeg', 'gif', 'mp4', 'mpeg', 'mp3'],\r\n 'threads': 30\r\n}\r\nvulnerability_scan: {\r\n 'run_nuclei': true,\r\n 'run_dalfox': true,\r\n 'run_crlfuzz': true,\r\n 'enable_http_crawl': true,\r\n 'concurrency': 50,\r\n 'intensity': 'normal',\r\n 'rate_limit': 150,\r\n 'retries': 1,\r\n 'timeout': 5,\r\n 'fetch_gpt_report': true,\r\n 'nuclei': {\r\n 'use_nuclei_config': false,\r\n 'severities': ['unknown', 'info', 'low', 'medium', 'high', 'critical']\r\n }\r\n}\r\nwaf_detection: {\r\n\r\n}\r\nscreenshot: {\r\n 'enable_http_crawl': true,\r\n 'intensity': 'normal',\r\n 'timeout': 10,\r\n 'threads': 40\r\n}\r\n\r\n# custom_header: \"Cookie: Test\"", + "default_engine": true + } +}, +{ + "model": "scanEngine.enginetype", + "pk": 2, + "fields": { + "engine_name": "Subdomain Scan", + "yaml_configuration": "subdomain_discovery: {\r\n 'uses_tools': [\r\n 'subfinder', \r\n 'ctfr', \r\n 'sublist3r', \r\n 'tlsx', \r\n 'oneforall', \r\n 'netlas'\r\n ],\r\n 'enable_http_crawl': true,\r\n 'threads': 30,\r\n 'timeout': 5,\r\n}\r\nhttp_crawl: {}", + "default_engine": true + } +}, +{ + "model": "scanEngine.enginetype", + "pk": 3, + "fields": { + "engine_name": "OSINT", + "yaml_configuration": "osint: {\r\n 'discover': [\r\n 'emails',\r\n 'metainfo',\r\n 'employees'\r\n ],\r\n 'dorks': [\r\n 'login_pages',\r\n 'admin_panels',\r\n 'dashboard_pages',\r\n 'stackoverflow',\r\n 'social_media',\r\n 'project_management',\r\n 'code_sharing',\r\n 'config_files',\r\n 'jenkins',\r\n 'wordpress_files',\r\n 'php_error',\r\n 'exposed_documents',\r\n 'db_files',\r\n 'git_exposed'\r\n ],\r\n 'custom_dorks': [\r\n {'lookup_site': '_target_', 'lookup_extensions': 'php'}\r\n ],\r\n 'intensity': 'normal',\r\n 'documents_limit': 50\r\n}", + "default_engine": true + } +}, +{ + "model": "scanEngine.enginetype", + "pk": 4, + "fields": { + "engine_name": "Vulnerability Scan", + "yaml_configuration": "subdomain_discovery: {\r\n 'uses_tools': ['subfinder', 'ctfr', 'sublist3r', 'tlsx', 'oneforall', 'netlas'],\r\n 'enable_http_crawl': true,\r\n 'threads': 30,\r\n 'timeout': 5,\r\n}\r\nhttp_crawl: {}\r\nosint: {\r\n 'discover': [\r\n 'emails',\r\n 'metainfo',\r\n 'employees'\r\n ],\r\n 'dorks': [\r\n 'login_pages',\r\n 'admin_panels',\r\n 'dashboard_pages',\r\n 'stackoverflow',\r\n 'social_media',\r\n 'project_management',\r\n 'code_sharing',\r\n 'config_files',\r\n 'jenkins',\r\n 'wordpress_files',\r\n 'php_error',\r\n 'exposed_documents',\r\n 'db_files',\r\n 'git_exposed'\r\n ],\r\n 'intensity': 'normal',\r\n 'documents_limit': 50\r\n}\r\nvulnerability_scan: {\r\n 'run_nuclei': true,\r\n 'run_dalfox': true,\r\n 'run_crlfuzz': true,\r\n 'enable_http_crawl': true,\r\n 'concurrency': 50,\r\n 'intensity': 'normal',\r\n 'rate_limit': 150,\r\n 'retries': 1,\r\n 'timeout': 5,\r\n 'fetch_gpt_report': true,\r\n 'nuclei': {\r\n 'use_nuclei_config': false,\r\n 'severities': ['unknown', 'info', 'low', 'medium', 'high', 'critical']\r\n }\r\n}", + "default_engine": true + } +}, +{ + "model": "scanEngine.enginetype", + "pk": 5, + "fields": { + "engine_name": "Port Scan", + "yaml_configuration": "http_crawl: {}\r\nport_scan: {\r\n 'enable_http_crawl': true,\r\n 'timeout': 5,\r\n # 'exclude_ports': [],\r\n # 'exclude_subdomains': [],\r\n 'ports': ['top-100'],\r\n 'rate_limit': 150,\r\n 'threads': 30,\r\n 'passive': false,\r\n # 'use_naabu_config': false,\r\n # 'enable_nmap': true,\r\n # 'nmap_cmd': '',\r\n # 'nmap_script': '',\r\n # 'nmap_script_args': ''\r\n}", + "default_engine": true + } +}, +{ + "model": "scanEngine.enginetype", + "pk": 6, + "fields": { + "engine_name": "reNgine Recommended", + "yaml_configuration": "subdomain_discovery: {\r\n 'uses_tools': ['subfinder', 'ctfr', 'sublist3r', 'tlsx', 'oneforall', 'netlas'],\r\n 'enable_http_crawl': true,\r\n 'threads': 30,\r\n 'timeout': 5,\r\n}\r\nhttp_crawl: {}\r\nosint: {\r\n 'discover': [\r\n 'emails',\r\n 'metainfo'\r\n ],\r\n 'dorks': [\r\n 'login_pages',\r\n 'admin_panels',\r\n 'dashboard_pages',\r\n 'config_files',\r\n 'exposed_documents',\r\n ],\r\n 'intensity': 'normal',\r\n 'documents_limit': 50\r\n}\r\nvulnerability_scan: {\r\n 'run_nuclei': true,\r\n 'run_dalfox': true,\r\n 'run_crlfuzz': true,\r\n 'enable_http_crawl': false,\r\n 'concurrency': 50,\r\n 'intensity': 'normal',\r\n 'rate_limit': 150,\r\n 'retries': 1,\r\n 'timeout': 5,\r\n 'fetch_gpt_report': true,\r\n 'nuclei': {\r\n 'use_nuclei_config': false,\r\n 'severities': ['low', 'medium', 'high', 'critical']\r\n }\r\n}", + "default_engine": true + } +}, +{ + "model": "scanEngine.enginetype", + "pk": 7, + "fields": { + "engine_name": "Full (perso)", + "yaml_configuration": "# Global vars for all tools\r\n#\r\n# Custom header - FFUF, Nuclei, Dalfox, CRL Fuzz, HTTPx, Fetch URL (Hakrawler, Katana, Gospider)\r\n# custom_header: {\r\n# 'Cookie':'Test',\r\n# 'User-Agent': 'Mozilla/5.0',\r\n# 'Custom-Header': 'My custom header'\r\n# }\r\n# 'user_agent': '' # Dalfox only\r\n# 'enable_http_crawl': true # All tools\r\n# 'timeout': 10 # Subdomain discovery, Screenshot, Port scan, FFUF, Nuclei \r\n# 'threads': 30 # All tools\r\n# 'rate_limit': 150 # Port scan, FFUF, Nuclei\r\n# 'intensity': 'normal' # Screenshot (grab only the root endpoints of each subdomain), Nuclei (reduce number of endpoints to scan), OSINT (not implemented yet)\r\n# 'retries': 1 # Nuclei\r\n\r\nsubdomain_discovery: {\r\n 'uses_tools': ['subfinder', 'ctfr', 'sublist3r', 'tlsx', 'oneforall', 'netlas'], # amass-passive, amass-active, All\r\n 'enable_http_crawl': true,\r\n 'threads': 30,\r\n 'timeout': 5,\r\n # 'use_subfinder_config': false,\r\n # 'use_amass_config': false,\r\n # 'amass_wordlist': 'deepmagic.com-prefixes-top50000'\r\n}\r\nhttp_crawl: {\r\n # 'custom_header': {\r\n # 'Cookie':'Test',\r\n # 'User-Agent': 'Mozilla/5.0'\r\n # },\r\n # 'threads': 30,\r\n # 'follow_redirect': false\r\n}\r\nport_scan: {\r\n 'enable_http_crawl': true,\r\n 'timeout': 5,\r\n # 'exclude_ports': [],\r\n # 'exclude_subdomains': [],\r\n 'ports': ['top-100'],\r\n 'rate_limit': 150,\r\n 'threads': 30,\r\n 'passive': false,\r\n # 'use_naabu_config': false,\r\n # 'enable_nmap': true,\r\n # 'nmap_cmd': '',\r\n # 'nmap_script': '',\r\n # 'nmap_script_args': ''\r\n}\r\nosint: {\r\n 'discover': [\r\n 'emails',\r\n 'metainfo',\r\n 'employees'\r\n ],\r\n 'dorks': [\r\n 'login_pages',\r\n 'admin_panels',\r\n 'dashboard_pages',\r\n 'stackoverflow',\r\n 'social_media',\r\n 'project_management',\r\n 'code_sharing',\r\n 'config_files',\r\n 'jenkins',\r\n 'wordpress_files',\r\n 'php_error',\r\n 'exposed_documents',\r\n 'db_files',\r\n 'git_exposed'\r\n ],\r\n # 'custom_dorks': [],\r\n 'intensity': 'normal',\r\n 'documents_limit': 50\r\n}\r\ndir_file_fuzz: {\r\n # 'custom_header': {\r\n # 'Cookie':'Test',\r\n # 'User-Agent': 'Mozilla/5.0',\r\n # 'Custom-Header': 'My custom header'\r\n # },\r\n 'auto_calibration': true,\r\n 'enable_http_crawl': true,\r\n 'rate_limit': 150,\r\n 'extensions': [],\r\n 'follow_redirect': false,\r\n 'max_time': 0,\r\n 'match_http_status': [200, 204],\r\n 'recursive_level': 0,\r\n 'stop_on_error': false,\r\n 'timeout': 5,\r\n 'threads': 30,\r\n 'wordlist_name': 'default', # fuzz-Bo0oM,\r\n}\r\nfetch_url: {\r\n # 'custom_header': {\r\n # 'Cookie':'Test',\r\n # 'User-Agent': 'Mozilla/5.0',\r\n # 'Custom-Header': 'My custom header'\r\n # },\r\n 'uses_tools': ['gospider', 'hakrawler', 'waybackurls', 'katana', 'gau'],\r\n 'remove_duplicate_endpoints': true,\r\n 'duplicate_fields': ['content_length', 'page_title'],\r\n 'follow_redirect': false,\r\n 'enable_http_crawl': true,\r\n 'gf_patterns': ['debug_logic', 'idor', 'interestingEXT', 'interestingparams', 'interestingsubs', 'lfi', 'rce', 'redirect', 'sqli', 'ssrf', 'ssti', 'xss'],\r\n 'ignore_file_extensions': ['png', 'jpg', 'jpeg', 'gif', 'mp4', 'mpeg', 'mp3'],\r\n 'threads': 30,\r\n # 'exclude_subdomains': false\r\n}\r\nvulnerability_scan: {\r\n # 'custom_header': {\r\n # 'Cookie':'Test',\r\n # 'User-Agent': 'Mozilla/5.0',\r\n # 'Custom-Header': 'My custom header'\r\n # },\r\n 'run_nuclei': true,\r\n 'run_dalfox': false,\r\n 'run_crlfuzz': false,\r\n 'run_s3scanner': false,\r\n 'enable_http_crawl': true,\r\n 'concurrency': 50,\r\n 'intensity': 'normal',\r\n 'rate_limit': 150,\r\n 'retries': 1,\r\n 'timeout': 5,\r\n 'fetch_gpt_report': true,\r\n 'nuclei': {\r\n 'use_nuclei_config': false,\r\n 'severities': ['unknown', 'info', 'low', 'medium', 'high', 'critical'],\r\n # 'tags': [], # Nuclei tags (https://github.com/projectdiscovery/nuclei-templates)\r\n # 'templates': [], # Nuclei templates (https://github.com/projectdiscovery/nuclei-templates)\r\n # 'custom_templates': [] # Nuclei custom templates uploaded in reNgine\r\n }\r\n}\r\nwaf_detection: {\r\n 'enable_http_crawl': true\r\n}\r\nscreenshot: {\r\n 'enable_http_crawl': true,\r\n 'intensity': 'normal',\r\n 'timeout': 10,\r\n 'threads': 40\r\n}", + "default_engine": false + } +}, +{ + "model": "scanEngine.interestinglookupmodel", + "pk": 1, + "fields": { + "keywords": "admin, ftp, cpanel, dashboard", + "custom_type": false, + "title_lookup": true, + "url_lookup": true, + "condition_200_http_lookup": false + } +}, +{ + "model": "scanEngine.installedexternaltool", + "pk": 1, + "fields": { + "logo_url": "https://raw.githubusercontent.com/projectdiscovery/subfinder/master/static/subfinder-logo.png", + "name": "subfinder", + "description": "Subfinder is a subdomain discovery tool that discovers valid subdomains for websites by using passive online sources.", + "github_url": "https://github.com/projectdiscovery/subfinder", + "license_url": "https://github.com/projectdiscovery/subfinder/blob/master/LICENSE.md", + "version_lookup_command": "subfinder -version", + "update_command": "go install -v github.com/projectdiscovery/subfinder/v2/cmd/subfinder@latest", + "install_command": "go install -v github.com/projectdiscovery/subfinder/v2/cmd/subfinder@latest", + "version_match_regex": "v(\\d+\\.)?(\\d+\\.)?(\\*|\\d+)", + "is_default": true, + "is_subdomain_gathering": true, + "is_github_cloned": false, + "github_clone_path": null, + "subdomain_gathering_command": null + } +}, +{ + "model": "scanEngine.installedexternaltool", + "pk": 2, + "fields": { + "logo_url": "https://raw.githubusercontent.com/projectdiscovery/nuclei/master/static/nuclei-logo.png", + "name": "Nuclei", + "description": "Nuclei is used to send requests across targets based on a template leading to zero false positives and providing fast scanning on large number of hosts. Nuclei offers scanning for a variety of protocols including TCP, DNS, HTTP, File, etc. With powerful and flexible templating, all kinds of security checks can be modelled with Nuclei.", + "github_url": "https://github.com/projectdiscovery/nuclei", + "license_url": "https://github.com/projectdiscovery/nuclei/blob/master/LICENSE.md", + "version_lookup_command": "nuclei -version", + "update_command": "nuclei -update", + "install_command": "go install -v github.com/projectdiscovery/nuclei/v2/cmd/nuclei@latest", + "version_match_regex": "v(\\d+\\.)?(\\d+\\.)?(\\*|\\d+)", + "is_default": true, + "is_subdomain_gathering": false, + "is_github_cloned": false, + "github_clone_path": null, + "subdomain_gathering_command": null + } +}, +{ + "model": "scanEngine.installedexternaltool", + "pk": 3, + "fields": { + "logo_url": "https://raw.githubusercontent.com/projectdiscovery/httpx/master/static/httpx-logo.png", + "name": "httpx", + "description": "httpx is a fast and multi-purpose HTTP toolkit allow to run multiple probers using retryablehttp library, it is designed to maintain the result reliability with increased threads.", + "github_url": "https://github.com/projectdiscovery/httpx", + "license_url": "https://github.com/projectdiscovery/httpx/blob/master/LICENSE.md", + "version_lookup_command": "/go/bin/httpx -version", + "update_command": "go install -v github.com/projectdiscovery/httpx/cmd/httpx@latest", + "install_command": "go install -v github.com/projectdiscovery/httpx/cmd/httpx@latest", + "version_match_regex": "v(\\d+\\.)?(\\d+\\.)?(\\*|\\d+)", + "is_default": true, + "is_subdomain_gathering": false, + "is_github_cloned": false, + "github_clone_path": null, + "subdomain_gathering_command": null + } +}, +{ + "model": "scanEngine.installedexternaltool", + "pk": 4, + "fields": { + "logo_url": "https://raw.githubusercontent.com/projectdiscovery/naabu/master/static/naabu-logo.png", + "name": "naabu", + "description": "Naabu is a port scanning tool written in Go that allows you to enumerate valid ports for hosts in a fast and reliable manner. It is a really simple tool that does fast SYN/CONNECT scans on the host/list of hosts and lists all ports that return a reply.", + "github_url": "https://github.com/projectdiscovery/naabu", + "license_url": "https://github.com/projectdiscovery/naabu/blob/main/LICENSE.md", + "version_lookup_command": "naabu -version", + "update_command": "go install -v github.com/projectdiscovery/naabu/v2/cmd/naabu@latest", + "install_command": "go install -v github.com/projectdiscovery/naabu/v2/cmd/naabu@latest", + "version_match_regex": "(\\b\\d+\\.\\d+\\.\\d+\\b)", + "is_default": true, + "is_subdomain_gathering": false, + "is_github_cloned": false, + "github_clone_path": null, + "subdomain_gathering_command": null + } +}, +{ + "model": "scanEngine.installedexternaltool", + "pk": 5, + "fields": { + "logo_url": "https://raw.githubusercontent.com/osmedeus/assets/main/logo-transparent.png", + "name": "gospider", + "description": "Fast web spider written in Go", + "github_url": "https://github.com/jaeles-project/gospider", + "license_url": "https://github.com/jaeles-project/gospider/blob/master/LICENSE", + "version_lookup_command": "gospider --version", + "update_command": "go install -v github.com/jaeles-project/gospider@latest", + "install_command": "go install -v github.com/jaeles-project/gospider@latest", + "version_match_regex": "v(\\d+\\.)?(\\d+\\.)?(\\*|\\d+)", + "is_default": true, + "is_subdomain_gathering": false, + "is_github_cloned": false, + "github_clone_path": null, + "subdomain_gathering_command": null + } +}, +{ + "model": "scanEngine.installedexternaltool", + "pk": 6, + "fields": { + "logo_url": "https://raw.githubusercontent.com/owasp-amass/amass/master/images/amass_logo.png", + "name": "amass", + "description": "The OWASP Amass Project performs network mapping of attack surfaces and external asset discovery using open source information gathering and active reconnaissance techniques.", + "github_url": "https://github.com/owasp-amass/amass", + "license_url": "https://github.com/owasp-amass/amass/blob/master/LICENSE", + "version_lookup_command": "amass -version", + "update_command": "go install -v github.com/owasp-amass/amass/v4/...@latest", + "install_command": "go install -v github.com/owasp-amass/amass/v4/...@latest", + "version_match_regex": "v(\\d+\\.)?(\\d+\\.)?(\\*|\\d+)", + "is_default": true, + "is_subdomain_gathering": true, + "is_github_cloned": false, + "github_clone_path": null, + "subdomain_gathering_command": null + } +}, +{ + "model": "scanEngine.installedexternaltool", + "pk": 7, + "fields": { + "logo_url": "https://github.com/ffuf/ffuf/raw/master/_img/ffuf_run_logo_600.png", + "name": "ffuf", + "description": "A fast web fuzzer written in Go.", + "github_url": "https://github.com/ffuf/ffuf", + "license_url": "https://github.com/ffuf/ffuf/blob/master/LICENSE", + "version_lookup_command": "ffuf -V", + "update_command": "go install github.com/ffuf/ffuf/v2@latest", + "install_command": "", + "version_match_regex": "(\\d+\\.)?(\\d+\\.)?(\\*|\\d+)", + "is_default": true, + "is_subdomain_gathering": false, + "is_github_cloned": false, + "github_clone_path": null, + "subdomain_gathering_command": null + } +}, +{ + "model": "scanEngine.installedexternaltool", + "pk": 8, + "fields": { + "logo_url": null, + "name": "sublist3r", + "description": "Sublist3r is a python tool designed to enumerate subdomains of websites using OSINT. It helps penetration testers and bug hunters collect and gather subdomains for the domain they are targeting. Sublist3r enumerates subdomains using many search engines such as Google, Yahoo, Bing, Baidu and Ask. Sublist3r also enumerates subdomains using Netcraft, Virustotal, ThreatCrowd, DNSdumpster, and ReverseDNS.", + "github_url": "https://github.com/aboul3la/Sublist3r", + "license_url": "https://github.com/aboul3la/Sublist3r/blob/master/LICENSE", + "version_lookup_command": null, + "update_command": "git pull", + "install_command": "git clone https://github.com/aboul3la/Sublist3r.git", + "version_match_regex": "[vV]*(\\d+\\.)?(\\d+\\.)?(\\*|\\d+)", + "is_default": true, + "is_subdomain_gathering": true, + "is_github_cloned": true, + "github_clone_path": "/home/rengine/tools/.github/Sublist3r", + "subdomain_gathering_command": null + } +}, +{ + "model": "scanEngine.installedexternaltool", + "pk": 9, + "fields": { + "logo_url": null, + "name": "hakrawler", + "description": "Fast golang web crawler for gathering URLs and JavaSript file locations. This is basically a simple implementation of the awesome Gocolly library.", + "github_url": "https://github.com/hakluke/hakrawler", + "license_url": "https://github.com/hakluke/hakrawler/blob/master/LICENSE", + "version_lookup_command": null, + "update_command": "go install -v github.com/hakluke/hakrawler@latest", + "install_command": "go install -v github.com/hakluke/hakrawler@latest", + "version_match_regex": "[vV]*(\\d+\\.)?(\\d+\\.)?(\\*|\\d+)", + "is_default": true, + "is_subdomain_gathering": false, + "is_github_cloned": false, + "github_clone_path": null, + "subdomain_gathering_command": null + } +}, +{ + "model": "scanEngine.installedexternaltool", + "pk": 10, + "fields": { + "logo_url": null, + "name": "OneForAll", + "description": "A powerful subdomain integration tool.", + "github_url": "https://github.com/shmilylty/OneForAll", + "license_url": "https://github.com/shmilylty/OneForAll/blob/master/LICENSE", + "version_lookup_command": "cat /home/rengine/tools/.github/OneForAll/oneforall.py", + "update_command": "git pull", + "install_command": "git clone https://github.com/shmilylty/OneForAll", + "version_match_regex": "v\\d+\\.\\d+\\.\\d+\\b", + "is_default": true, + "is_subdomain_gathering": true, + "is_github_cloned": true, + "github_clone_path": "/home/rengine/tools/.github/OneForAll", + "subdomain_gathering_command": null + } +}, +{ + "model": "scanEngine.installedexternaltool", + "pk": 11, + "fields": { + "logo_url": "https://raw.githubusercontent.com/laramies/theHarvester/master/theHarvester-logo.png", + "name": "theHarvester", + "description": "theHarvester is a very simple to use, yet powerful and effective tool designed to be used in the early stages of a penetration test or red team engagement. Use it for open source intelligence (OSINT) gathering to help determine a company's external threat landscape on the internet. The tool gathers emails, names, subdomains, IPs and URLs using multiple public data sources.", + "github_url": "https://github.com/laramies/theHarvester", + "license_url": "https://github.com/laramies/theHarvester/blob/master/README/LICENSES", + "version_lookup_command": "cat /home/rengine/tools/.github/theHarvester/theHarvester/lib/version.py", + "update_command": "git pull", + "install_command": "git clone https://github.com/laramies/theHarvester", + "version_match_regex": "\\b\\d+\\.\\d+\\.\\d+\\b", + "is_default": true, + "is_subdomain_gathering": false, + "is_github_cloned": true, + "github_clone_path": "/home/rengine/tools/.github/theHarvester", + "subdomain_gathering_command": null + } +}, +{ + "model": "scanEngine.installedexternaltool", + "pk": 12, + "fields": { + "logo_url": null, + "name": "ctfr", + "description": "This tool allows to get the subdomains from a HTTPS website in a few seconds. CTFR does not use neither dictionary attack nor brute-force, it just abuses of Certificate Transparency logs.", + "github_url": "https://github.com/UnaPibaGeek/ctfr", + "license_url": "https://github.com/UnaPibaGeek/ctfr/blob/master/LICENSE", + "version_lookup_command": "python3 /home/rengine/tools/.github/ctfr/ctfr.py --help", + "update_command": "git pull", + "install_command": "git clone https://github.com/UnaPibaGeek/ctfr/", + "version_match_regex": "(\\d+\\.)?(\\d+\\.).", + "is_default": true, + "is_subdomain_gathering": true, + "is_github_cloned": true, + "github_clone_path": "/home/rengine/tools/.github/ctfr", + "subdomain_gathering_command": null + } +}, +{ + "model": "scanEngine.installedexternaltool", + "pk": 13, + "fields": { + "logo_url": "https://user-images.githubusercontent.com/8293321/174841003-01a62bad-2ecf-4874-89c4-efa53dd56884.png", + "name": "tlsx", + "description": "A fast and configurable TLS grabber focused on TLS based data collection and analysis.", + "github_url": "https://github.com/projectdiscovery/tlsx", + "license_url": "https://github.com/projectdiscovery/tlsx/blob/main/LICENSE", + "version_lookup_command": "tlsx -version", + "update_command": "tlsx -update", + "install_command": "go install -v github.com/projectdiscovery/tlsx/cmd/tlsx@latest", + "version_match_regex": "(\\d+\\.)?(\\d+\\.)?(\\*|\\d+)", + "is_default": true, + "is_subdomain_gathering": true, + "is_github_cloned": false, + "github_clone_path": null, + "subdomain_gathering_command": null + } +}, +{ + "model": "scanEngine.installedexternaltool", + "pk": 14, + "fields": { + "logo_url": "https://avatars.githubusercontent.com/u/79084675?v=4", + "name": "netlas", + "description": "Non-intrusive Internet Scanner.", + "github_url": "https://github.com/netlas-io/netlas-python", + "license_url": "https://github.com/netlas-io/netlas-python/blob/master/LICENSE", + "version_lookup_command": "pip3 show netlas", + "update_command": "pip3 install netlas --update", + "install_command": "pip3 install netlas", + "version_match_regex": "(\\d+\\.)?(\\d+\\.)?(\\*|\\d+)", + "is_default": true, + "is_subdomain_gathering": true, + "is_github_cloned": false, + "github_clone_path": null, + "subdomain_gathering_command": null + } +}, +{ + "model": "scanEngine.installedexternaltool", + "pk": 15, + "fields": { + "logo_url": "https://github.githubassets.com/images/icons/emoji/unicode/1f98a.png", + "name": "dalfox", + "description": "DalFox is a powerful open-source tool that focuses on automation, making it ideal for quickly scanning for XSS flaws and analyzing parameters. Its advanced testing engine and niche features are designed to streamline the process of detecting and verifying vulnerabilities. As for the name, Dal(달) is the Korean word for \"moon,\" while \"Fox\" stands for \"Finder Of XSS\".", + "github_url": "https://github.com/hahwul/dalfox", + "license_url": "https://github.com/hahwul/dalfox/blob/main/LICENSE.txt", + "version_lookup_command": "dalfox version", + "update_command": "go install -v github.com/hahwul/dalfox/v2@latest", + "install_command": "go install -v github.com/hahwul/dalfox/v2@latest", + "version_match_regex": "v(\\d+\\.)?(\\d+\\.)?(\\*|\\d+)", + "is_default": true, + "is_subdomain_gathering": false, + "is_github_cloned": false, + "github_clone_path": null, + "subdomain_gathering_command": null + } +}, +{ + "model": "scanEngine.installedexternaltool", + "pk": 16, + "fields": { + "logo_url": "https://user-images.githubusercontent.com/8293321/196779266-421c79d4-643a-4f73-9b54-3da379bbac09.png", + "name": "katana", + "description": "A next-generation crawling and spidering framework.", + "github_url": "https://github.com/projectdiscovery/katana", + "license_url": "https://github.com/projectdiscovery/katana/blob/main/LICENSE.md", + "version_lookup_command": "katana -version", + "update_command": "go install -v github.com/projectdiscovery/katana/cmd/katana@latest", + "install_command": "go install -v github.com/projectdiscovery/katana/cmd/katana@latest", + "version_match_regex": "v(\\d+\\.)?(\\d+\\.)?(\\*|\\d+)", + "is_default": true, + "is_subdomain_gathering": false, + "is_github_cloned": false, + "github_clone_path": null, + "subdomain_gathering_command": null + } +}, +{ + "model": "scanEngine.installedexternaltool", + "pk": 17, + "fields": { + "logo_url": "https://user-images.githubusercontent.com/25837540/90128972-fc3bdf00-dd91-11ea-8c3b-0d6f4e8c6ba3.png", + "name": "crlfuzz", + "description": "A fast tool to scan CRLF vulnerability written in Go", + "github_url": "https://github.com/dwisiswant0/crlfuzz", + "license_url": "https://github.com/dwisiswant0/crlfuzz/blob/master/README.md", + "version_lookup_command": "crlfuzz -V", + "update_command": "go install -v github.com/dwisiswant0/crlfuzz/cmd/crlfuzz@latest", + "install_command": "go install -v github.com/dwisiswant0/crlfuzz/cmd/crlfuzz@latest", + "version_match_regex": "(\\d+\\.)?(\\d+\\.)?(\\*|\\d+)", + "is_default": true, + "is_subdomain_gathering": false, + "is_github_cloned": false, + "github_clone_path": null, + "subdomain_gathering_command": null + } +}, +{ + "model": "scanEngine.installedexternaltool", + "pk": 18, + "fields": { + "logo_url": null, + "name": "gau", + "description": "Get all URLs", + "github_url": "https://github.com/lc/gau", + "license_url": "https://github.com/lc/gau/blob/main/LICENSE", + "version_lookup_command": "gau --version", + "update_command": "go install github.com/lc/gau/v2/cmd/gau@latest", + "install_command": "go install github.com/lc/gau/v2/cmd/gau@latest", + "version_match_regex": "[vV]*(\\d+\\.)?(\\d+\\.)?(\\*|\\d+)", + "is_default": true, + "is_subdomain_gathering": false, + "is_github_cloned": false, + "github_clone_path": null, + "subdomain_gathering_command": null + } +} +] diff --git a/web/scanEngine/forms.py b/web/scanEngine/forms.py index cdf71a0f7..40d47cc40 100644 --- a/web/scanEngine/forms.py +++ b/web/scanEngine/forms.py @@ -183,6 +183,23 @@ class Meta: "placeholder": "https://hooks.slack.com/services/T00000000/B00000000/XXXXXXXXXXXXXXXXXXXXXXXX", })) + send_to_lark = forms.BooleanField( + required=False, + widget=forms.CheckboxInput( + attrs={ + "class": "form-check-input", + "id": "lark_checkbox", + })) + + lark_hook_url = forms.CharField( + required=False, + widget=forms.TextInput( + attrs={ + "class": "form-control", + "id": "lark_hook_url", + "placeholder": "https://open.larksuite.com/open-apis/bot/v2/hook/XXXXXXXXXXXXXXXXXXXXXXXX", + })) + send_to_discord = forms.BooleanField( required=False, widget=forms.CheckboxInput( @@ -280,10 +297,12 @@ class Meta: def set_value(self, key): self.initial['send_to_slack'] = key.send_to_slack + self.initial['send_to_lark'] = key.send_to_lark self.initial['send_to_discord'] = key.send_to_discord self.initial['send_to_telegram'] = key.send_to_telegram self.initial['slack_hook_url'] = key.slack_hook_url + self.initial['lark_hook_url'] = key.lark_hook_url self.initial['discord_hook_url'] = key.discord_hook_url self.initial['telegram_bot_token'] = key.telegram_bot_token self.initial['telegram_bot_chat_id'] = key.telegram_bot_chat_id @@ -298,6 +317,8 @@ def set_value(self, key): if not key.send_to_slack: self.fields['slack_hook_url'].widget.attrs['readonly'] = True + if not key.send_to_lark: + self.fields['lark_hook_url'].widget.attrs['readonly'] = True if not key.send_to_discord: self.fields['discord_hook_url'].widget.attrs['readonly'] = True if not key.send_to_telegram: @@ -307,10 +328,12 @@ def set_value(self, key): def set_initial(self): self.initial['send_to_slack'] = False + self.initial['send_to_lark'] = False self.initial['send_to_discord'] = False self.initial['send_to_telegram'] = False self.fields['slack_hook_url'].widget.attrs['readonly'] = True + self.fields['lark_hook_url'].widget.attrs['readonly'] = True self.fields['discord_hook_url'].widget.attrs['readonly'] = True self.fields['telegram_bot_token'].widget.attrs['readonly'] = True self.fields['telegram_bot_chat_id'].widget.attrs['readonly'] = True diff --git a/web/scanEngine/management/commands/dumpcustomengines.py b/web/scanEngine/management/commands/dumpcustomengines.py index 2fc63549d..6a957c998 100644 --- a/web/scanEngine/management/commands/dumpcustomengines.py +++ b/web/scanEngine/management/commands/dumpcustomengines.py @@ -1,9 +1,10 @@ from django.core.management.base import BaseCommand from reNgine.common_func import dump_custom_scan_engines +from reNgine.settings import RENGINE_CUSTOM_ENGINES class Command(BaseCommand): help = 'Dumps custom engines into YAMLs in custom_engines/ folder' def handle(self, *args, **kwargs): - return dump_custom_scan_engines('/usr/src/app/custom_engines') \ No newline at end of file + return dump_custom_scan_engines(RENGINE_CUSTOM_ENGINES) \ No newline at end of file diff --git a/web/scanEngine/management/commands/loadcustomengines.py b/web/scanEngine/management/commands/loadcustomengines.py index fbe2050a2..7ae76e8c8 100644 --- a/web/scanEngine/management/commands/loadcustomengines.py +++ b/web/scanEngine/management/commands/loadcustomengines.py @@ -1,11 +1,12 @@ from django.core.management.base import BaseCommand from reNgine.common_func import load_custom_scan_engines +from reNgine.settings import RENGINE_CUSTOM_ENGINES class Command(BaseCommand): help = 'Loads custom engines from YAMLs in custom_engines/ folder into database' def handle(self, *args, **kwargs): - return load_custom_scan_engines('/usr/src/app/custom_engines') + return load_custom_scan_engines(RENGINE_CUSTOM_ENGINES) diff --git a/web/scanEngine/migrations/0007_lark_notification.py b/web/scanEngine/migrations/0007_lark_notification.py new file mode 100644 index 000000000..1a96411f1 --- /dev/null +++ b/web/scanEngine/migrations/0007_lark_notification.py @@ -0,0 +1,23 @@ +# Generated by Django 3.2.25 on 2024-09-25 11:36 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('scanEngine', '0006_alter_installedexternaltool_version_lookup_command'), + ] + + operations = [ + migrations.AddField( + model_name='notification', + name='lark_hook_url', + field=models.CharField(blank=True, max_length=200, null=True), + ), + migrations.AddField( + model_name='notification', + name='send_to_lark', + field=models.BooleanField(default=False), + ), + ] diff --git a/web/scanEngine/models.py b/web/scanEngine/models.py index 89a7cbb31..3e5c6d07f 100644 --- a/web/scanEngine/models.py +++ b/web/scanEngine/models.py @@ -70,10 +70,12 @@ class InterestingLookupModel(models.Model): class Notification(models.Model): id = models.AutoField(primary_key=True) send_to_slack = models.BooleanField(default=False) + send_to_lark = models.BooleanField(default=False) send_to_discord = models.BooleanField(default=False) send_to_telegram = models.BooleanField(default=False) slack_hook_url = models.CharField(max_length=200, null=True, blank=True) + lark_hook_url = models.CharField(max_length=200, null=True, blank=True) discord_hook_url = models.CharField(max_length=200, null=True, blank=True) telegram_bot_token = models.CharField(max_length=100, null=True, blank=True) telegram_bot_chat_id = models.CharField(max_length=100, null=True, blank=True) diff --git a/web/scanEngine/static/scanEngine/js/custom_scan_engine.js b/web/scanEngine/static/scanEngine/js/custom_scan_engine.js index 17c122670..2e1e4cb34 100644 --- a/web/scanEngine/static/scanEngine/js/custom_scan_engine.js +++ b/web/scanEngine/static/scanEngine/js/custom_scan_engine.js @@ -20,11 +20,10 @@ $(document).ready(function() { } ); -function delete_api(id, item) -{ - var delAPI = 'delete/'+id; +function delete_api(id, item, type) { + const delAPI = type === 'wordlist' ? `wordlist/delete/${id}` : `delete/${id}`; swal.queue([{ - title: 'Are you sure you want to delete this scan engine?', + title: `Are you sure you want to delete this ${item}?`, text: "You won't be able to revert this!", type: 'warning', showCancelButton: true, @@ -32,8 +31,8 @@ function delete_api(id, item) padding: '2em', showLoaderOnConfirm: true, preConfirm: function() { - return fetch(delAPI, { - method: 'POST', + return fetch(delAPI, { + method: 'POST', credentials: "same-origin", headers: { "X-CSRFToken": getCookie("csrftoken") @@ -44,14 +43,14 @@ function delete_api(id, item) }) .then(function(data) { // TODO Look for better way - return location.reload(); + return location.reload(); }) .catch(function() { - swal.insertQueueStep({ - type: 'error', - title: 'Oops! Unable to delete' - }) - }) + swal.insertQueueStep({ + type: 'error', + title: 'Oops! Unable to delete' + }); + }); } - }]) + }]); } diff --git a/web/scanEngine/static/scanEngine/js/custom_tools.js b/web/scanEngine/static/scanEngine/js/custom_tools.js index 68af0ab5f..4d8cef2a6 100644 --- a/web/scanEngine/static/scanEngine/js/custom_tools.js +++ b/web/scanEngine/static/scanEngine/js/custom_tools.js @@ -7,13 +7,13 @@ function load_gf_template(pattern_name){ $.getJSON(`/api/getFileContents?gf_pattern&name=${pattern_name}&format=json`, function(response) { swal.close(); if (response.status) { - $('#modal_title').empty(); - $('#modal-content').empty(); - $("#modal-footer").empty(); + $('#modal_dialog .modal-title').empty(); + $('#modal_dialog .modal-text').empty(); + $("#modal_dialog .modal-footer").empty(); - $('#modal_title').html(`GF Pattern ` + htmlEncode(pattern_name)); + $('#modal_dialog .modal-title').html(`GF Pattern ` + htmlEncode(pattern_name)); - $('#modal-content').append(`
${htmlEncode(response['content'])}
`); + $('#modal_dialog .modal-text').append(`
${htmlEncode(response['content'])}
`); $('#modal_dialog').modal('show'); } else{ @@ -38,13 +38,13 @@ function load_nuclei_template(pattern_name){ $.getJSON(`/api/getFileContents?nuclei_template&name=${pattern_name}&format=json`, function(response) { swal.close(); if (response.status) { - $('#modal_title').empty(); - $('#modal-content').empty(); - $("#modal-footer").empty(); + $('#modal_dialog .modal-title').empty(); + $('#modal_dialog .modal-text').empty(); + $("#modal_dialog .modal-footer").empty(); - $('#modal_title').html(`Nuclei Template: ` + htmlEncode(pattern_name)); + $('#modal_dialog .modal-title').html(`Nuclei Template: ` + htmlEncode(pattern_name)); - $('#modal-content').append(`
${htmlEncode(response['content'])}
`); + $('#modal_dialog .modal-text').append(`
${htmlEncode(response['content'])}
`); $('#modal_dialog').modal('show'); } else{ @@ -146,3 +146,19 @@ $("#theharvester_config_text_area").dblclick(function() { $("#theharvester-config-form").append(''); } }); + +$.getJSON(`/api/getFileContents?gau_config&format=json`, function(data) { + $("#gau_config_text_area").attr("rows", 14); + $("textarea#gau_config_text_area").html(htmlEncode(data['content'])); +}).fail(function(){ + $("#gau_config_text_area").removeAttr("readonly"); + $("textarea#gau_config_text_area").html(`# Your GAU configuration here.`); + $("#gau-config-form").append(''); +}); + +$("#gau_config_text_area").dblclick(function() { + if (!document.getElementById('gau-config-submit')) { + $("#gau_config_text_area").removeAttr("readonly"); + $("#gau-config-form").append(''); + } +}); diff --git a/web/scanEngine/templates/scanEngine/_items/form_engine.html b/web/scanEngine/templates/scanEngine/_items/form_engine.html index b060acacc..96792fd4a 100644 --- a/web/scanEngine/templates/scanEngine/_items/form_engine.html +++ b/web/scanEngine/templates/scanEngine/_items/form_engine.html @@ -122,7 +122,7 @@
reNgine YAML Documentation

Please, do not modify the configuration unless you know what what you are doing.
- If default YAML configuration doesn't automatically load,
download default configuration from here and paste it. (Firefox may have issues loading default YAML configuration.) + If default YAML configuration doesn't automatically load, download default configuration from here and paste it. (Firefox may have issues loading default YAML configuration.)

{{form.yaml_configuration}} diff --git a/web/scanEngine/templates/scanEngine/add_engine.html b/web/scanEngine/templates/scanEngine/add_engine.html index 568db8214..4c00f5ec8 100644 --- a/web/scanEngine/templates/scanEngine/add_engine.html +++ b/web/scanEngine/templates/scanEngine/add_engine.html @@ -23,7 +23,7 @@ {% endblock custom_js_css_link %} {% block breadcrumb_title %} - + {% endblock breadcrumb_title %} @@ -57,148 +57,6 @@

Scan Engines

{% endblock page_level_script %} diff --git a/web/scanEngine/templates/scanEngine/index.html b/web/scanEngine/templates/scanEngine/index.html index ce065dc7f..300c2a213 100644 --- a/web/scanEngine/templates/scanEngine/index.html +++ b/web/scanEngine/templates/scanEngine/index.html @@ -10,7 +10,7 @@ {% endblock custom_js_css_link %} {% block breadcrumb_title %} - + {% endblock breadcrumb_title %} @@ -28,7 +28,7 @@
{% if user|can:'add_modify_scan_settings' %} - Add Scan Engine + Add Scan Engine {% endif %}
@@ -126,12 +126,12 @@ {% if user|can:'add_modify_scan_settings' %} - + - + diff --git a/web/scanEngine/templates/scanEngine/lookup.html b/web/scanEngine/templates/scanEngine/lookup.html index 050a2bf7b..2294a9ee8 100644 --- a/web/scanEngine/templates/scanEngine/lookup.html +++ b/web/scanEngine/templates/scanEngine/lookup.html @@ -1,6 +1,6 @@ {% extends 'base/base.html' %} {% load static %} -{% load custom_tags %} +{% load custom_filters %} {% block title %} Interesting entries Lookup {% endblock title %} @@ -10,7 +10,7 @@ {% endblock custom_js_css_link %} {% block breadcrumb_title %} - + {% endblock breadcrumb_title %} diff --git a/web/scanEngine/templates/scanEngine/settings/add_tool.html b/web/scanEngine/templates/scanEngine/settings/add_tool.html index f7fd2f8e0..795b13f40 100644 --- a/web/scanEngine/templates/scanEngine/settings/add_tool.html +++ b/web/scanEngine/templates/scanEngine/settings/add_tool.html @@ -7,7 +7,7 @@ {% endblock custom_js_css_link %} {% block breadcrumb_title %} - + {% endblock breadcrumb_title %} {% block page_title %} diff --git a/web/scanEngine/templates/scanEngine/settings/api.html b/web/scanEngine/templates/scanEngine/settings/api.html index 37337f7e5..2eb528f41 100644 --- a/web/scanEngine/templates/scanEngine/settings/api.html +++ b/web/scanEngine/templates/scanEngine/settings/api.html @@ -1,6 +1,6 @@ {% extends 'base/base.html' %} {% load static %} -{% load custom_tags %} +{% load custom_filters %} {% block title %} reNgine-ng Settings {% endblock title %} @@ -26,26 +26,19 @@ {% csrf_token %}
+ {% for apiKey in apiKeys %}
- -

OpenAI keys will be used to generate vulnerability description, remediation, impact and vulnerability report writing using ChatGPT.

- {% if openai_key %} - - {% else %} - - {% endif %} - This is optional but recommended. -
-
- -

Netlas keys will be used to get whois information and other OSINT data.

- {% if netlas_key %} - - {% else %} - - {% endif %} - This is optional + +

{{ apiKey.text }}

+
+ + +
+ {% if apiKey.optional %}{% endif %}
+ {% endfor %}
@@ -60,4 +53,65 @@ {% block page_level_script %} + {% endblock page_level_script %} diff --git a/web/scanEngine/templates/scanEngine/settings/hackerone.html b/web/scanEngine/templates/scanEngine/settings/hackerone.html index 0b54c43f4..ec4e9fc16 100644 --- a/web/scanEngine/templates/scanEngine/settings/hackerone.html +++ b/web/scanEngine/templates/scanEngine/settings/hackerone.html @@ -1,7 +1,7 @@ {# DO NOT AUTO INDENT #} {% extends 'base/base.html' %} {% load static %} -{% load custom_tags %} +{% load custom_filters %} {% block title %} Hackerone Settings {% endblock title %} @@ -116,7 +116,7 @@

Vulnerability Report Template

} } else{ - const hackerone_api = 'testHackerone/'; + const hackerone_api = '{% url 'testHackerone' %}'; var username = $("#username").val(); var api_key = $("#api_key").val(); swal.queue([{ diff --git a/web/scanEngine/templates/scanEngine/settings/llm_toolkit.html b/web/scanEngine/templates/scanEngine/settings/llm_toolkit.html new file mode 100644 index 000000000..cf5434590 --- /dev/null +++ b/web/scanEngine/templates/scanEngine/settings/llm_toolkit.html @@ -0,0 +1,287 @@ +{% extends 'base/base.html' %} +{% load static %} +{% load humanize %} +{% block title %} +LLM Toolkit +{% endblock title %} + +{% block custom_js_css_link %} +{% endblock custom_js_css_link %} + +{% block breadcrumb_title %} + + +{% endblock breadcrumb_title %} + +{% block page_title %} +LLM Toolkit (Beta) +{% endblock page_title %} + +{% block main_content %} +
+
+
+
+ LLM Toolkit includes the ability to download new LLMs, view available models, and delete models no longer needed, and also choose between various models. +
+

reNgine makes use of various LLMs to enhance reporting process. Using various LLM AI Models penetration testers will be able to to generate detailed, insightful penetration testing reports. +
+ If you are using custom LLM models, it is expected that response time are much slower in CPU. We recommend using GPU for better performance. Models such as llama2, or llama3 requires significant computation and GPU are required. Having only CPU will result in slow response time. +
+ OpenAI GPT models do not run locally, hence the requirement of GPU is not necessary. +

+
+
+
+ +
{{installed_models|length}} available Models
+ {% if openai_key_error %} + + {% endif %} +
+ {% for model in installed_models %} +
+
+
+ +

+ {{model.name}} {% if model.selected %}Selected Model{% endif %} +

+

+

+ + + Modified {% if model.modified_at %}{{model.modified_at|naturaltime}} {% else %} NA{% endif %} + +
+ + + {% if model.is_local %} + Locally installed model + {% else %} + Open AI Model + {% endif %} + +
+ + + {{model.details.parameter_size}} Parameters + + + + {{model.details.family}} Family + +

+
+
+
+ {% empty %} + + {% endfor %} +
+
+{% endblock main_content %} + + +{% block page_level_script %} + +{% endblock page_level_script %} diff --git a/web/scanEngine/templates/scanEngine/settings/notification.html b/web/scanEngine/templates/scanEngine/settings/notification.html index 8259884e7..9c58f9749 100644 --- a/web/scanEngine/templates/scanEngine/settings/notification.html +++ b/web/scanEngine/templates/scanEngine/settings/notification.html @@ -1,12 +1,12 @@ {% extends 'base/base.html' %} {% load static %} -{% load custom_tags %} +{% load custom_filters %} {% block title %} Notification Settings {% endblock title %} {% block custom_js_css_link %} - + {% endblock custom_js_css_link %} @@ -51,6 +51,29 @@

Send Notifications to:

Slack Webhook API Documentation
+
+ +
+
+
+ +
+ {{form.send_to_lark}} +
+
+
+ {{form.lark_hook_url}} +
+ + + +
+
+
+ +
@@ -183,6 +206,16 @@

Upload S } }); + var lark_checkbox = document.getElementById("lark_checkbox"); + lark_checkbox.addEventListener('change', function() { + if (this.checked) { + document.getElementById("lark_hook_url").readOnly = false; + } + else{ + document.getElementById("lark_hook_url").readOnly = true; + } + }); + var discord_checkbox = document.getElementById("discord_checkbox"); discord_checkbox.addEventListener('change', function() { if (this.checked) { diff --git a/web/scanEngine/templates/scanEngine/settings/proxy.html b/web/scanEngine/templates/scanEngine/settings/proxy.html index 8eff8f7c1..93b3d580f 100644 --- a/web/scanEngine/templates/scanEngine/settings/proxy.html +++ b/web/scanEngine/templates/scanEngine/settings/proxy.html @@ -1,6 +1,6 @@ {% extends 'base/base.html' %} {% load static %} -{% load custom_tags %} +{% load custom_filters %} {% block title %} Proxy Settings {% endblock title %} diff --git a/web/scanEngine/templates/scanEngine/settings/rengine.html b/web/scanEngine/templates/scanEngine/settings/rengine.html index dbe99b101..28f1a69ff 100644 --- a/web/scanEngine/templates/scanEngine/settings/rengine.html +++ b/web/scanEngine/templates/scanEngine/settings/rengine.html @@ -1,6 +1,6 @@ {% extends 'base/base.html' %} {% load static %} -{% load custom_tags %} +{% load custom_filters %} {% block title %} reNgine-ng Settings {% endblock title %} @@ -42,7 +42,7 @@

Danger Zone

- +

@@ -57,7 +57,7 @@

Danger Zone

- +
diff --git a/web/scanEngine/templates/scanEngine/settings/report.html b/web/scanEngine/templates/scanEngine/settings/report.html index 54425093b..2865b21f9 100644 --- a/web/scanEngine/templates/scanEngine/settings/report.html +++ b/web/scanEngine/templates/scanEngine/settings/report.html @@ -1,7 +1,7 @@ {# DO NOT AUTO INDENT #} {% extends 'base/base.html' %} {% load static %} -{% load custom_tags %} +{% load custom_filters %} {% block title %} Customize Report {% endblock title %} diff --git a/web/scanEngine/templates/scanEngine/settings/tool.html b/web/scanEngine/templates/scanEngine/settings/tool.html index 57dfc2fe4..0462fafd6 100644 --- a/web/scanEngine/templates/scanEngine/settings/tool.html +++ b/web/scanEngine/templates/scanEngine/settings/tool.html @@ -1,6 +1,6 @@ {% extends 'base/base.html' %} {% load static %} -{% load custom_tags %} +{% load custom_filters %} {% block title %} Settings, API Keys & Tool Settings {% endblock title %} @@ -163,6 +163,22 @@

Current theHarvester Configuration

+
+
+

GAU

+

+ This section lets you modify GAU config file. You can find more information about the GAU config here.
+
+ Please note that only TOML config is supported. +

+
+ {% csrf_token %} +

Current GAU Configuration

+ + +
+
+
diff --git a/web/scanEngine/templates/scanEngine/settings/tool_arsenal.html b/web/scanEngine/templates/scanEngine/settings/tool_arsenal.html index 84b164627..272f3d3d6 100644 --- a/web/scanEngine/templates/scanEngine/settings/tool_arsenal.html +++ b/web/scanEngine/templates/scanEngine/settings/tool_arsenal.html @@ -1,6 +1,6 @@ {% extends 'base/base.html' %} {% load static %} -{% load custom_tags %} +{% load custom_filters %} {% block title %} Tool Arsenal {% endblock title %} @@ -20,7 +20,7 @@ {% block main_content %}
@@ -56,7 +56,7 @@
{% endif %} @@ -92,7 +92,7 @@
Current Installed Version
{{tool.description}}

- +
diff --git a/web/scanEngine/templates/scanEngine/settings/update_tool.html b/web/scanEngine/templates/scanEngine/settings/update_tool.html index 80992fce9..f15db79e2 100644 --- a/web/scanEngine/templates/scanEngine/settings/update_tool.html +++ b/web/scanEngine/templates/scanEngine/settings/update_tool.html @@ -6,7 +6,7 @@ {% block custom_js_css_link %} {% endblock custom_js_css_link %} {% block breadcrumb_title %} - + {% endblock breadcrumb_title %} {% block page_title %} diff --git a/web/scanEngine/templates/scanEngine/update_engine.html b/web/scanEngine/templates/scanEngine/update_engine.html index 431184e4a..3ed629c47 100644 --- a/web/scanEngine/templates/scanEngine/update_engine.html +++ b/web/scanEngine/templates/scanEngine/update_engine.html @@ -22,7 +22,7 @@ {% endblock custom_js_css_link %} {% block breadcrumb_title %} - + {% endblock breadcrumb_title %} diff --git a/web/scanEngine/templates/scanEngine/wordlist/add.html b/web/scanEngine/templates/scanEngine/wordlist/add.html index 2f19214d8..af506e882 100644 --- a/web/scanEngine/templates/scanEngine/wordlist/add.html +++ b/web/scanEngine/templates/scanEngine/wordlist/add.html @@ -10,7 +10,7 @@ {% endblock custom_js_css_link %} {% block breadcrumb_title %} - + {% endblock breadcrumb_title %} diff --git a/web/scanEngine/templates/scanEngine/wordlist/index.html b/web/scanEngine/templates/scanEngine/wordlist/index.html index f7dddf57d..b9bbdc54a 100644 --- a/web/scanEngine/templates/scanEngine/wordlist/index.html +++ b/web/scanEngine/templates/scanEngine/wordlist/index.html @@ -9,7 +9,7 @@ {% endblock custom_js_css_link %} {% block breadcrumb_title %} - + {% endblock breadcrumb_title %} @@ -26,7 +26,7 @@
diff --git a/web/scanEngine/templatetags/custom_tags.py b/web/scanEngine/templatetags/custom_tags.py deleted file mode 100644 index 5777e837e..000000000 --- a/web/scanEngine/templatetags/custom_tags.py +++ /dev/null @@ -1,9 +0,0 @@ -from django import template -from urllib.parse import urlparse - -register = template.Library() - - -@register.filter(name='split') -def split(value, key): - return value.split(key) diff --git a/web/scanEngine/tests.py b/web/scanEngine/tests.py deleted file mode 100644 index 7ce503c2d..000000000 --- a/web/scanEngine/tests.py +++ /dev/null @@ -1,3 +0,0 @@ -from django.test import TestCase - -# Create your tests here. diff --git a/web/scanEngine/tests/__init__.py b/web/scanEngine/tests/__init__.py new file mode 100644 index 000000000..0af3f29e3 --- /dev/null +++ b/web/scanEngine/tests/__init__.py @@ -0,0 +1,2 @@ +from utils.test_base import * +from .test_scan_engine import * diff --git a/web/scanEngine/tests/test_scan_engine.py b/web/scanEngine/tests/test_scan_engine.py new file mode 100644 index 000000000..749fa2afd --- /dev/null +++ b/web/scanEngine/tests/test_scan_engine.py @@ -0,0 +1,223 @@ +""" +test_scan_engine.py + +This file contains unit tests for the views of the scanEngine application. +It tests functionalities related to scan engines, wordlists, settings, and tools. +""" + +from django.urls import reverse +from utils.test_base import BaseTestCase +from scanEngine.models import EngineType, Wordlist, InstalledExternalTool + +__all__ = [ + 'TestScanEngineViews', +] + +class TestScanEngineViews(BaseTestCase): + """ + Test class for the scanEngine views. + """ + + def setUp(self): + """ + Initial setup for the tests. + Creates test objects for engines, wordlists, settings, and tools. + """ + super().setUp() + self.data_generator.create_project_full() + + def test_index_view(self): + """ + Tests the index view to ensure it returns the correct status code and template. + """ + response = self.client.get(reverse('scan_engine_index')) + self.assertEqual(response.status_code, 200) + self.assertTemplateUsed(response, 'scanEngine/index.html') + + def test_add_engine_view(self): + """ + Tests the add engine view to ensure a new engine is created successfully. + """ + response = self.client.post(reverse('add_engine'), { + 'engine_name': 'New Engine', + 'yaml_configuration': 'new: config' + }) + self.assertEqual(response.status_code, 302) + self.assertTrue(EngineType.objects.filter(engine_name='New Engine').exists()) + + def test_delete_engine_view(self): + """ + Tests the delete engine view to ensure an engine is deleted successfully. + """ + response = self.client.post(reverse('delete_engine_url', kwargs={ + 'id': self.data_generator.engine_type.id + })) + self.assertEqual(response.status_code, 200) + self.assertFalse(EngineType.objects.filter(id=self.data_generator.engine_type.id).exists()) + + def test_update_engine_view(self): + """ + Tests the update engine view to ensure an engine is updated successfully. + """ + response = self.client.post(reverse('update_engine', kwargs={ + 'id': self.data_generator.engine_type.id + }), { + 'engine_name': 'Updated Engine', + 'yaml_configuration': 'updated: config' + }) + self.assertEqual(response.status_code, 302) + self.data_generator.engine_type.refresh_from_db() + self.assertEqual(self.data_generator.engine_type.engine_name, 'Updated Engine') + + def test_wordlist_list_view(self): + """ + Tests the wordlist list view to ensure it returns the correct status code and template. + """ + response = self.client.get(reverse('wordlist_list')) + self.assertEqual(response.status_code, 200) + self.assertTemplateUsed(response, 'scanEngine/wordlist/index.html') + + def test_add_wordlist_view(self): + """ + Tests the add wordlist view to ensure a new wordlist is created successfully. + """ + with open('test_wordlist.txt', 'w', encoding='utf-8') as f: + f.write('test\nword\nlist') + with open('test_wordlist.txt', 'rb') as f: + response = self.client.post(reverse('add_wordlist'), { + 'name': 'New Wordlist', + 'short_name': 'new', + 'upload_file': f + }) + self.assertEqual(response.status_code, 302) + self.assertTrue(Wordlist.objects.filter(name='New Wordlist').exists()) + + def test_delete_wordlist_view(self): + """ + Tests the delete wordlist view to ensure a wordlist is deleted successfully. + """ + response = self.client.post(reverse('delete_wordlist', kwargs={ + 'id': self.data_generator.wordlist.id + })) + self.assertEqual(response.status_code, 200) + self.assertFalse(Wordlist.objects.filter(id=self.data_generator.wordlist.id).exists()) + + def test_interesting_lookup_view(self): + """ + Tests the interesting lookup view to ensure it updates keywords successfully. + """ + response = self.client.post(reverse('interesting_lookup'), { + 'custom_type': True, + 'keywords': 'test,lookup' + }) + self.assertEqual(response.status_code, 302) + self.data_generator.interesting_lookup_model.refresh_from_db() + self.assertEqual(self.data_generator.interesting_lookup_model.keywords, 'test,lookup') + + def test_tool_specific_settings_view(self): + """ + Tests the tool-specific settings view to ensure it returns the correct status code and template. + """ + response = self.client.get(reverse('tool_settings')) + self.assertEqual(response.status_code, 200) + self.assertTemplateUsed(response, 'scanEngine/settings/tool.html') + + def test_rengine_settings_view(self): + """ + Tests the rengine settings view to ensure it returns the correct status code and template. + """ + response = self.client.get(reverse('rengine_settings')) + self.assertEqual(response.status_code, 200) + self.assertTemplateUsed(response, 'scanEngine/settings/rengine.html') + + def test_notification_settings_view(self): + """ + Tests the notification settings view to ensure it returns the correct status code and template. + """ + response = self.client.get(reverse('notification_settings')) + self.assertEqual(response.status_code, 200) + self.assertTemplateUsed(response, 'scanEngine/settings/notification.html') + + def test_proxy_settings_view(self): + """ + Tests the proxy settings view to ensure it updates proxy settings successfully. + """ + response = self.client.post(reverse('proxy_settings'), { + 'use_proxy': True, + 'proxies': '192.168.1.1', + }) + self.assertEqual(response.status_code, 302) + self.data_generator.proxy.refresh_from_db() + self.assertEqual(self.data_generator.proxy.proxies, '192.168.1.1') + + def test_hackerone_settings_view(self): + """ + Tests the Hackerone settings view to ensure it updates settings successfully. + """ + response = self.client.post(reverse('hackerone_settings'), { + 'username': 'newuser', + 'api_key': 'newapikey' + }) + self.assertEqual(response.status_code, 302) + self.data_generator.hackerone.refresh_from_db() + self.assertEqual(self.data_generator.hackerone.username, 'newuser') + + def test_report_settings_view(self): + """ + Tests the report settings view to ensure it updates settings successfully. + """ + response = self.client.post(reverse('report_settings'), { + 'primary_color': '#FFFFFF', + 'secondary_color': '#000000' + }) + self.assertEqual(response.status_code, 302) + self.data_generator.report_setting.refresh_from_db() + self.assertEqual(self.data_generator.report_setting.primary_color, '#FFFFFF') + + def test_tool_arsenal_section_view(self): + """ + Tests the tool arsenal section view to ensure it returns the correct status code and template. + """ + response = self.client.get(reverse('tool_arsenal')) + self.assertEqual(response.status_code, 200) + self.assertTemplateUsed(response, 'scanEngine/settings/tool_arsenal.html') + + def test_api_vault_view(self): + """ + Tests the API vault view to ensure it updates API keys successfully. + """ + response = self.client.post(reverse('api_vault'), { + 'key_openai': 'test_openai_key', + 'key_netlas': 'test_netlas_key' + }) + self.assertEqual(response.status_code, 200) + self.assertTemplateUsed(response, 'scanEngine/settings/api.html') + + def test_add_tool_view(self): + """ + Tests the add tool view to ensure a new tool is created successfully. + """ + response = self.client.post(reverse('add_tool'), { + 'name': 'New Tool', + 'github_url': 'https://github.com/new/tool', + 'install_command': 'pip install new-tool', + 'description': 'New Tool Description' + }) + self.assertEqual(response.status_code, 302) + self.assertTrue(InstalledExternalTool.objects.filter(name='New Tool').exists()) + + def test_modify_tool_in_arsenal_view(self): + """ + Tests the modify tool in arsenal view to ensure a tool is updated successfully. + """ + response = self.client.post(reverse('update_tool_in_arsenal', kwargs={ + 'id': self.data_generator.external_tool.id + }), { + 'name': 'Modified Tool', + 'github_url': 'https://github.com/modified/tool', + 'install_command': 'pip install modified-tool', + 'description': 'Modified Tool Description' + }) + self.assertEqual(response.status_code, 302) + self.data_generator.external_tool.refresh_from_db() + self.assertEqual(self.data_generator.external_tool.name, 'Modified Tool') diff --git a/web/scanEngine/urls.py b/web/scanEngine/urls.py index ff7d2812c..46acc13e5 100644 --- a/web/scanEngine/urls.py +++ b/web/scanEngine/urls.py @@ -5,80 +5,88 @@ urlpatterns = [ path( - '/', + '', views.index, name='scan_engine_index'), path( - '/add/', + 'add/', views.add_engine, name='add_engine'), path( - '/delete/', + 'delete/', views.delete_engine, name='delete_engine_url'), path( - '/update/', + 'update/', views.update_engine, name='update_engine'), path( - '/tool_arsenal/update/', - views.modify_tool_in_arsenal, - name='update_tool_in_arsenal'), - path( - '/wordlist/', - views.wordlist_list, - name='wordlist_list'), - path( - '/wordlist/add/', - views.add_wordlist, - name='add_wordlist'), + 'api_vault', + views.api_vault, + name='api_vault'), path( - '/tool_arsenal/add/', - views.add_tool, - name='add_tool'), + 'api_vault/delete', + views.api_vault_delete, + name='api_vault_delete'), path( - '/wordlist/delete/', - views.delete_wordlist, - name='delete_wordlist'), + 'hackerone_settings', + views.hackerone_settings, + name='hackerone_settings'), path( - '/interesting/lookup/', + 'interesting/lookup', views.interesting_lookup, name='interesting_lookup'), + path( + 'llm_toolkit', + views.llm_toolkit_section, + name='llm_toolkit'), path( - '/tool_settings', - views.tool_specific_settings, - name='tool_settings'), - path( - '/api_vault', - views.api_vault, - name='api_vault'), - path( - '/tool_arsenal', - views.tool_arsenal_section, - name='tool_arsenal'), - path( - '/rengine_settings', - views.rengine_settings, - name='rengine_settings'), - path( - '/notification_settings', + 'notification_settings', views.notification_settings, name='notification_settings'), path( - '/proxy_settings', + 'proxy_settings', views.proxy_settings, name='proxy_settings'), path( - '/hackerone_settings', - views.hackerone_settings, - name='hackerone_settings'), + 'rengine_settings', + views.rengine_settings, + name='rengine_settings'), path( - '/report_settings', + 'report_settings', views.report_settings, name='report_settings'), path( - '/testHackerone/', + 'testHackerone', views.test_hackerone, name='testHackerone' ), + path( + 'tool_arsenal', + views.tool_arsenal_section, + name='tool_arsenal'), + path( + 'tool_arsenal/add', + views.add_tool, + name='add_tool'), + path( + 'tool_arsenal/update/', + views.modify_tool_in_arsenal, + name='update_tool_in_arsenal'), + path( + 'tool_settings', + views.tool_specific_settings, + name='tool_settings'), + path( + 'wordlist', + views.wordlist_list, + name='wordlist_list'), + path( + 'wordlist/delete/', + views.delete_wordlist, + name='delete_wordlist'), + path( + 'wordlist/add', + views.add_wordlist, + name='add_wordlist'), ] diff --git a/web/scanEngine/views.py b/web/scanEngine/views.py index 0a06cb790..f61d47a17 100644 --- a/web/scanEngine/views.py +++ b/web/scanEngine/views.py @@ -1,23 +1,28 @@ import glob +import json import os import re import shutil -import subprocess +from datetime import datetime from django import http from django.contrib import messages from django.shortcuts import get_object_or_404, render from django.urls import reverse from rolepermissions.decorators import has_permission_decorator -from reNgine.common_func import * -from reNgine.tasks import (run_command, send_discord_message, send_slack_message, send_telegram_message) -from scanEngine.forms import * -from scanEngine.forms import ConfigurationForm -from scanEngine.models import * - - -def index(request, slug): +from reNgine.common_func import get_open_ai_key +from reNgine.definitions import OLLAMA_INSTANCE, DEFAULT_GPT_MODELS +from reNgine.tasks import run_command, send_discord_message, send_slack_message, send_lark_message, send_telegram_message, run_gf_list +from scanEngine.forms import AddEngineForm, UpdateEngineForm, AddWordlistForm, ExternalToolForm, InterestingLookupForm, NotificationForm, ProxyForm, HackeroneForm, ReportForm +from scanEngine.models import EngineType, Wordlist, InstalledExternalTool, InterestingLookupModel, Notification, Hackerone, Proxy, VulnerabilityReportSetting +from dashboard.models import OpenAiAPIKey, NetlasAPIKey, OllamaSettings +from reNgine.definitions import PERM_MODIFY_SCAN_CONFIGURATIONS, PERM_MODIFY_SCAN_REPORT, PERM_MODIFY_WORDLISTS, PERM_MODIFY_INTERESTING_LOOKUP, PERM_MODIFY_SYSTEM_CONFIGURATIONS, FOUR_OH_FOUR_URL +from reNgine.settings import RENGINE_WORDLISTS, RENGINE_HOME, RENGINE_TOOL_GITHUB_PATH +from pathlib import Path +import requests + +def index(request): engine_type = EngineType.objects.order_by('engine_name').all() context = { 'engine_ul_show': 'show', @@ -27,28 +32,47 @@ def index(request, slug): } return render(request, 'scanEngine/index.html', context) +def clean_quotes(data): + if isinstance(data, dict): + return {key: clean_quotes(value) for key, value in data.items()} + elif isinstance(data, list): + return [clean_quotes(item) for item in data] + elif isinstance(data, str): + return data.replace('"', '') + return data @has_permission_decorator(PERM_MODIFY_SCAN_CONFIGURATIONS, redirect_url=FOUR_OH_FOUR_URL) -def add_engine(request, slug): +def add_engine(request): form = AddEngineForm() + + # load default yaml config + with open(RENGINE_HOME + '/config/default_yaml_config.yaml', 'r', encoding='utf-8') as yaml_file: + default_config = yaml_file.read() + if request.method == "POST": form = AddEngineForm(request.POST) if form.is_valid(): - form.save() + cleaned_data = {key: clean_quotes(value) for key, value in form.cleaned_data.items()} + for key, value in cleaned_data.items(): + setattr(form.instance, key, value) + form.instance.save() messages.add_message( request, messages.INFO, 'Scan Engine Added successfully') - return http.HttpResponseRedirect(reverse('scan_engine_index', kwargs={'slug': slug})) + return http.HttpResponseRedirect(reverse('scan_engine_index')) + else: + # fill form with default yaml config + form = AddEngineForm(initial={'yaml_configuration': default_config}) + context = { 'scan_engine_nav_active': 'active', 'form': form } return render(request, 'scanEngine/add_engine.html', context) - @has_permission_decorator(PERM_MODIFY_SCAN_CONFIGURATIONS, redirect_url=FOUR_OH_FOUR_URL) -def delete_engine(request, slug, id): +def delete_engine(request, id): obj = get_object_or_404(EngineType, id=id) if request.method == "POST": obj.delete() @@ -65,9 +89,8 @@ def delete_engine(request, slug, id): 'Oops! Engine could not be deleted!') return http.JsonResponse(responseData) - @has_permission_decorator(PERM_MODIFY_SCAN_CONFIGURATIONS, redirect_url=FOUR_OH_FOUR_URL) -def update_engine(request, slug, id): +def update_engine(request, id): engine = get_object_or_404(EngineType, id=id) form = UpdateEngineForm( initial={ @@ -77,21 +100,23 @@ def update_engine(request, slug, id): if request.method == "POST": form = UpdateEngineForm(request.POST, instance=engine) if form.is_valid(): - form.save() + cleaned_data = {key: clean_quotes(value) for key, value in form.cleaned_data.items()} + for key, value in cleaned_data.items(): + setattr(form.instance, key, value) + form.instance.save() messages.add_message( request, messages.INFO, 'Engine edited successfully') - return http.HttpResponseRedirect(reverse('scan_engine_index', kwargs={'slug': slug})) + return http.HttpResponseRedirect(reverse('scan_engine_index')) context = { 'scan_engine_nav_active': 'active', 'form': form } return render(request, 'scanEngine/update_engine.html', context) - @has_permission_decorator(PERM_MODIFY_WORDLISTS, redirect_url=FOUR_OH_FOUR_URL) -def wordlist_list(request, slug): +def wordlist_list(request): wordlists = Wordlist.objects.all().order_by('id') context = { 'scan_engine_nav_active': 'active', @@ -99,9 +124,8 @@ def wordlist_list(request, slug): 'wordlists': wordlists} return render(request, 'scanEngine/wordlist/index.html', context) - @has_permission_decorator(PERM_MODIFY_WORDLISTS, redirect_url=FOUR_OH_FOUR_URL) -def add_wordlist(request, slug): +def add_wordlist(request): context = {'scan_engine_nav_active': 'active', 'wordlist_li': 'active'} form = AddWordlistForm(request.POST or None, request.FILES or None) if request.method == "POST": @@ -110,10 +134,10 @@ def add_wordlist(request, slug): if txt_file.content_type == 'text/plain': wordlist_content = txt_file.read().decode('UTF-8', "ignore") wordlist_file = open( - '/usr/src/' + - 'wordlist/' + - form.cleaned_data['short_name'] + '.txt', - 'w') + Path(RENGINE_WORDLISTS) / f"{form.cleaned_data['short_name']}.txt", + 'w', + encoding='utf-8', + ) wordlist_file.write(wordlist_content) Wordlist.objects.create( name=form.cleaned_data['name'], @@ -124,22 +148,17 @@ def add_wordlist(request, slug): messages.INFO, 'Wordlist ' + form.cleaned_data['name'] + ' added successfully') - return http.HttpResponseRedirect(reverse('wordlist_list', kwargs={'slug': slug})) + return http.HttpResponseRedirect(reverse('wordlist_list')) context['form'] = form return render(request, 'scanEngine/wordlist/add.html', context) - @has_permission_decorator(PERM_MODIFY_WORDLISTS, redirect_url=FOUR_OH_FOUR_URL) -def delete_wordlist(request, slug, id): +def delete_wordlist(request, id): obj = get_object_or_404(Wordlist, id=id) if request.method == "POST": obj.delete() try: - os.remove( - '/usr/src/' + - 'wordlist/' + - obj.short_name + - '.txt') + os.remove(Path(RENGINE_WORDLISTS) / f'{obj.short_name}.txt') responseData = {'status': True} except Exception as e: responseData = {'status': False} @@ -155,358 +174,326 @@ def delete_wordlist(request, slug, id): 'Oops! Wordlist could not be deleted!') return http.JsonResponse(responseData) - @has_permission_decorator(PERM_MODIFY_INTERESTING_LOOKUP, redirect_url=FOUR_OH_FOUR_URL) -def interesting_lookup(request, slug): - lookup_keywords = None - context = {} - context['scan_engine_nav_active'] = 'active' - context['interesting_lookup_li'] = 'active' - context['engine_ul_show'] = 'show' - form = InterestingLookupForm() - if InterestingLookupModel.objects.filter(custom_type=True).exists(): - lookup_keywords = InterestingLookupModel.objects.filter(custom_type=True).order_by('-id')[0] - else: +def interesting_lookup(request): + lookup_keywords = InterestingLookupModel.objects.filter(custom_type=True).order_by('-id').first() + form = InterestingLookupForm(instance=lookup_keywords) + + if not lookup_keywords: form.initial_checkbox() + if request.method == "POST": - if lookup_keywords: - form = InterestingLookupForm(request.POST, instance=lookup_keywords) - else: - form = InterestingLookupForm(request.POST or None) - print(form.errors) + form = InterestingLookupForm(request.POST, instance=lookup_keywords) if form.is_valid(): - print(form.cleaned_data) form.save() - messages.add_message( - request, - messages.INFO, - 'Lookup Keywords updated successfully') - return http.HttpResponseRedirect(reverse('interesting_lookup', kwargs={'slug': slug})) + messages.info(request, 'Lookup Keywords updated successfully') + return http.HttpResponseRedirect(reverse('interesting_lookup')) - if lookup_keywords: - form.set_value(lookup_keywords) - context['interesting_lookup_found'] = True - context['form'] = form - context['default_lookup'] = InterestingLookupModel.objects.filter(id=1) + context = { + 'scan_engine_nav_active': 'active', + 'interesting_lookup_li': 'active', + 'engine_ul_show': 'show', + 'form': form, + 'interesting_lookup_found': bool(lookup_keywords), + 'default_lookup': InterestingLookupModel.objects.filter(id=1) + } return render(request, 'scanEngine/lookup.html', context) - @has_permission_decorator(PERM_MODIFY_SCAN_CONFIGURATIONS, redirect_url=FOUR_OH_FOUR_URL) -def tool_specific_settings(request, slug): +def tool_specific_settings(request): context = {} # check for incoming form requests if request.method == "POST": + handle_post_request(request) + return http.HttpResponseRedirect(reverse('tool_settings')) - print(request.FILES) - if 'gfFileUpload' in request.FILES: - gf_file = request.FILES['gfFileUpload'] - file_extension = gf_file.name.split('.')[len(gf_file.name.split('.'))-1] - if file_extension != 'json': - messages.add_message(request, messages.ERROR, 'Invalid GF Pattern, upload only *.json extension') - else: - # remove special chars from filename, that could possibly do directory traversal or XSS - filename = re.sub(r'[\\/*?:"<>|]',"", gf_file.name) - file_path = '/root/.gf/' + filename - file = open(file_path, "w") - file.write(gf_file.read().decode("utf-8")) - file.close() - messages.add_message(request, messages.INFO, 'Pattern {} successfully uploaded'.format(gf_file.name[:4])) - return http.HttpResponseRedirect(reverse('tool_settings', kwargs={'slug': slug})) - - elif 'nucleiFileUpload' in request.FILES: - nuclei_file = request.FILES['nucleiFileUpload'] - file_extension = nuclei_file.name.split('.')[len(nuclei_file.name.split('.'))-1] - if file_extension != 'yaml': - messages.add_message(request, messages.ERROR, 'Invalid Nuclei Pattern, upload only *.yaml extension') - else: - filename = re.sub(r'[\\/*?:"<>|]',"", nuclei_file.name) - file_path = '/root/nuclei-templates/' + filename - file = open(file_path, "w") - file.write(nuclei_file.read().decode("utf-8")) - file.close() - messages.add_message(request, messages.INFO, 'Nuclei Pattern {} successfully uploaded'.format(nuclei_file.name[:-5])) - return http.HttpResponseRedirect(reverse('tool_settings', kwargs={'slug': slug})) - - elif 'nuclei_config_text_area' in request.POST: - with open('/root/.config/nuclei/config.yaml', "w") as fhandle: - fhandle.write(request.POST.get('nuclei_config_text_area')) - messages.add_message(request, messages.INFO, 'Nuclei config updated!') - return http.HttpResponseRedirect(reverse('tool_settings', kwargs={'slug': slug})) - - elif 'subfinder_config_text_area' in request.POST: - with open('/root/.config/subfinder/config.yaml', "w") as fhandle: - fhandle.write(request.POST.get('subfinder_config_text_area')) - messages.add_message(request, messages.INFO, 'Subfinder config updated!') - return http.HttpResponseRedirect(reverse('tool_settings', kwargs={'slug': slug})) - - elif 'naabu_config_text_area' in request.POST: - with open('/root/.config/naabu/config.yaml', "w") as fhandle: - fhandle.write(request.POST.get('naabu_config_text_area')) - messages.add_message(request, messages.INFO, 'Naabu config updated!') - return http.HttpResponseRedirect(reverse('tool_settings', kwargs={'slug': slug})) - - elif 'amass_config_text_area' in request.POST: - with open('/root/.config/amass.ini', "w") as fhandle: - fhandle.write(request.POST.get('amass_config_text_area')) - messages.add_message(request, messages.INFO, 'Amass config updated!') - return http.HttpResponseRedirect(reverse('tool_settings', kwargs={'slug': slug})) - - elif 'theharvester_config_text_area' in request.POST: - with open('/usr/src/github/theHarvester/api-keys.yaml', "w") as fhandle: - fhandle.write(request.POST.get('theharvester_config_text_area')) - messages.add_message(request, messages.INFO, 'theHarvester config updated!') - return http.HttpResponseRedirect(reverse('tool_settings', kwargs={'slug': slug})) - - context['settings_nav_active'] = 'active' - context['tool_settings_li'] = 'active' - context['settings_ul_show'] = 'show' - gf_list = (subprocess.check_output(['gf', '-list'])).decode("utf-8") - nuclei_custom_pattern = [f for f in glob.glob("/root/nuclei-templates/*.yaml")] - context['nuclei_templates'] = nuclei_custom_pattern - context['gf_patterns'] = sorted(gf_list.split('\n')) + context = { + 'settings_nav_active': 'active', + 'tool_settings_li': 'active', + 'settings_ul_show': 'show', + 'gf_patterns': get_gf_patterns(request), + 'nuclei_templates': list(glob.glob(str(Path.home() / "nuclei-templates" / "*.yaml"))) + } return render(request, 'scanEngine/settings/tool.html', context) +def handle_post_request(request): + handlers = { + 'gfFileUpload': handle_gf_upload, + 'nucleiFileUpload': handle_nuclei_upload, + 'nuclei_config_text_area': lambda r: update_config(r, 'nuclei', 'Nuclei'), + 'subfinder_config_text_area': lambda r: update_config(r, 'subfinder', 'Subfinder'), + 'naabu_config_text_area': lambda r: update_config(r, 'naabu', 'Naabu'), + 'amass_config_text_area': lambda r: update_config(r, 'amass', 'Amass', '.ini'), + 'theharvester_config_text_area': lambda r: update_config(r, 'theHarvester/api-keys', 'theHarvester'), + 'gau_config_text_area': lambda r: update_config(r, '.gau', 'GAU', '.toml'), + } + for key, handler in handlers.items(): + if key in request.FILES or key in request.POST: + handler(request) + break + +def handle_gf_upload(request): + handle_file_upload(request, 'gfFileUpload', '.gf', 'json', 'GF Pattern') + +def handle_nuclei_upload(request): + handle_file_upload(request, 'nucleiFileUpload', 'nuclei-templates', 'yaml', 'Nuclei Pattern') + +def handle_file_upload(request, file_key, directory, expected_extension, pattern_name): + uploaded_file = request.FILES[file_key] + file_extension = uploaded_file.name.split('.')[-1] + if file_extension != expected_extension: + messages.error(request, f'Invalid {pattern_name}, upload only *.{expected_extension} extension') + else: + filename = re.sub(r'[\\/*?:"<>|]', "", uploaded_file.name) + file_path = Path.home() / directory / filename + with open(file_path, "w", encoding='utf-8') as file: + file.write(uploaded_file.read().decode("utf-8")) + messages.info(request, f'{pattern_name} {uploaded_file.name[:4]} successfully uploaded') + +def update_config(request, tool_name, display_name, file_extension='.yaml'): + config_path = Path.home() / '.config' / tool_name / f'config{file_extension}' + with open(config_path, "w", encoding='utf-8') as fhandle: + fhandle.write(request.POST.get(f'{tool_name}_config_text_area')) + messages.info(request, f'{display_name} config updated!') + +def get_gf_patterns(request): + try: + gf_result = run_gf_list.delay().get(timeout=30) + if gf_result['status']: + return sorted(gf_result['output']) + messages.error(request, f"Error fetching GF patterns: {gf_result['message']}") + except Exception as e: + messages.error(request, f"Error fetching GF patterns: {str(e)}") + return [] @has_permission_decorator(PERM_MODIFY_SYSTEM_CONFIGURATIONS, redirect_url=FOUR_OH_FOUR_URL) -def rengine_settings(request, slug): - context = {} - +@has_permission_decorator(PERM_MODIFY_SYSTEM_CONFIGURATIONS, redirect_url=FOUR_OH_FOUR_URL) +def rengine_settings(request): total, used, _ = shutil.disk_usage("/") - total = total // (2**30) - used = used // (2**30) - context['total'] = total - context['used'] = used - context['free'] = total-used - context['consumed_percent'] = int(100 * float(used)/float(total)) + total_gb = total // (2**30) + used_gb = used // (2**30) - context['settings_nav_active'] = 'active' - context['rengine_settings_li'] = 'active' - context['settings_ul_show'] = 'show' + context = { + 'total': total_gb, + 'used': used_gb, + 'free': total_gb - used_gb, + 'consumed_percent': int(100 * float(used) / float(total)), + 'settings_nav_active': 'active', + 'rengine_settings_li': 'active', + 'settings_ul_show': 'show' + } return render(request, 'scanEngine/settings/rengine.html', context) - -@has_permission_decorator(PERM_MODIFY_SCAN_CONFIGURATIONS, redirect_url=FOUR_OH_FOUR_URL) -def notification_settings(request, slug): - context = {} - form = NotificationForm() - notification = None - if Notification.objects.all().exists(): - notification = Notification.objects.all()[0] - form.set_value(notification) - else: - form.set_initial() +@has_permission_decorator(PERM_MODIFY_SYSTEM_CONFIGURATIONS, redirect_url=FOUR_OH_FOUR_URL) +def notification_settings(request): + notification = Notification.objects.first() + form = NotificationForm(instance=notification) if request.method == "POST": - if notification: - form = NotificationForm(request.POST, instance=notification) - else: - form = NotificationForm(request.POST or None) - + form = NotificationForm(request.POST, instance=notification) if form.is_valid(): form.save() - send_slack_message('*reNgine*\nCongratulations! your notification services are working.') - send_telegram_message('*reNgine*\nCongratulations! your notification services are working.') + for service in [send_slack_message, send_lark_message, send_telegram_message]: + service('*reNgine*\nCongratulations! your notification services are working.') send_discord_message('**reNgine**\nCongratulations! your notification services are working.') - messages.add_message( - request, - messages.INFO, - 'Notification Settings updated successfully and test message was sent.') - return http.HttpResponseRedirect(reverse('notification_settings', kwargs={'slug': slug})) - - context['settings_nav_active'] = 'active' - context['notification_settings_li'] = 'active' - context['settings_ul_show'] = 'show' - context['form'] = form - + messages.info(request, 'Notification Settings updated successfully and test message was sent.') + return http.HttpResponseRedirect(reverse('notification_settings')) + + context = { + 'form': form, + 'settings_nav_active': 'active', + 'notification_settings_li': 'active', + 'settings_ul_show': 'show' + } return render(request, 'scanEngine/settings/notification.html', context) - @has_permission_decorator(PERM_MODIFY_SCAN_CONFIGURATIONS, redirect_url=FOUR_OH_FOUR_URL) -def proxy_settings(request, slug): - context = {} - form = ProxyForm() - context['form'] = form - - proxy = None - if Proxy.objects.all().exists(): - proxy = Proxy.objects.all()[0] - form.set_value(proxy) - else: - form.set_initial() +def proxy_settings(request): + proxy = Proxy.objects.first() + form = ProxyForm(instance=proxy) if request.method == "POST": - if proxy: - form = ProxyForm(request.POST, instance=proxy) - else: - form = ProxyForm(request.POST or None) - + form = ProxyForm(request.POST, instance=proxy) if form.is_valid(): form.save() - messages.add_message( - request, - messages.INFO, - 'Proxies updated.') - return http.HttpResponseRedirect(reverse('proxy_settings', kwargs={'slug': slug})) - context['settings_nav_active'] = 'active' - context['proxy_settings_li'] = 'active' - context['settings_ul_show'] = 'show' + messages.info(request, 'Proxies updated.') + return http.HttpResponseRedirect(reverse('proxy_settings')) + context = { + 'form': form, + 'settings_nav_active': 'active', + 'proxy_settings_li': 'active', + 'settings_ul_show': 'show' + } return render(request, 'scanEngine/settings/proxy.html', context) - -@has_permission_decorator(PERM_MODIFY_SCAN_CONFIGURATIONS, redirect_url=FOUR_OH_FOUR_URL) +@has_permission_decorator(PERM_MODIFY_SYSTEM_CONFIGURATIONS, redirect_url=FOUR_OH_FOUR_URL) def test_hackerone(request): - context = {} if request.method == "POST": - headers = { - 'Accept': 'application/json' - } body = json.loads(request.body) - r = requests.get( + response = requests.get( 'https://api.hackerone.com/v1/hackers/payments/balance', auth=(body['username'], body['api_key']), - headers = headers + headers={'Accept': 'application/json'} ) - if r.status_code == 200: - return http.JsonResponse({"status": 200}) - + return http.JsonResponse({"status": response.status_code}) return http.JsonResponse({"status": 401}) - @has_permission_decorator(PERM_MODIFY_SCAN_CONFIGURATIONS, redirect_url=FOUR_OH_FOUR_URL) -def hackerone_settings(request, slug): - context = {} - form = HackeroneForm() - context['form'] = form - - hackerone = None - if Hackerone.objects.all().exists(): - hackerone = Hackerone.objects.all()[0] - form.set_value(hackerone) - else: - form.set_initial() +def hackerone_settings(request): + hackerone = Hackerone.objects.first() + form = HackeroneForm(instance=hackerone) if request.method == "POST": - if hackerone: - form = HackeroneForm(request.POST, instance=hackerone) - else: - form = HackeroneForm(request.POST or None) - + form = HackeroneForm(request.POST, instance=hackerone) if form.is_valid(): form.save() - messages.add_message( - request, - messages.INFO, - 'Hackerone Settings updated.') - return http.HttpResponseRedirect(reverse('hackerone_settings', kwargs={'slug': slug})) - context['settings_nav_active'] = 'active' - context['hackerone_settings_li'] = 'active' - context['settings_ul_show'] = 'show' + messages.info(request, 'Hackerone Settings updated.') + return http.HttpResponseRedirect(reverse('hackerone_settings')) + context = { + 'form': form, + 'settings_nav_active': 'active', + 'hackerone_settings_li': 'active', + 'settings_ul_show': 'show' + } return render(request, 'scanEngine/settings/hackerone.html', context) - @has_permission_decorator(PERM_MODIFY_SCAN_REPORT, redirect_url=FOUR_OH_FOUR_URL) -def report_settings(request, slug): - context = {} - form = ReportForm() - context['form'] = form - +def report_settings(request): primary_color = '#FFB74D' secondary_color = '#212121' - report = None - if VulnerabilityReportSetting.objects.all().exists(): - report = VulnerabilityReportSetting.objects.all()[0] + if report := VulnerabilityReportSetting.objects.first(): + form = ReportForm(instance=report) primary_color = report.primary_color secondary_color = report.secondary_color - form.set_value(report) else: + form = ReportForm() form.set_initial() if request.method == "POST": - if report: - form = ReportForm(request.POST, instance=report) - else: - form = ReportForm(request.POST or None) - + form = ReportForm(request.POST, instance=report) if report else ReportForm(request.POST) if form.is_valid(): form.save() - messages.add_message( - request, - messages.INFO, - 'Report Settings updated.') - return http.HttpResponseRedirect(reverse('report_settings', kwargs={'slug': slug})) + messages.info(request, 'Report Settings updated.') + return http.HttpResponseRedirect(reverse('report_settings')) - - context['settings_nav_active'] = 'active' - context['report_settings_li'] = 'active' - context['settings_ul_show'] = 'show' - context['primary_color'] = primary_color - context['secondary_color'] = secondary_color + context = { + 'form': form, + 'settings_nav_active': 'active', + 'report_settings_li': 'active', + 'settings_ul_show': 'show', + 'primary_color': primary_color, + 'secondary_color': secondary_color + } return render(request, 'scanEngine/settings/report.html', context) - @has_permission_decorator(PERM_MODIFY_SYSTEM_CONFIGURATIONS, redirect_url=FOUR_OH_FOUR_URL) -def tool_arsenal_section(request, slug): - context = {} - tools = InstalledExternalTool.objects.all().order_by('id') - context['installed_tools'] = tools - return render(request, 'scanEngine/settings/tool_arsenal.html', context) - +def tool_arsenal_section(request): + return render(request, 'scanEngine/settings/tool_arsenal.html', { + 'installed_tools': InstalledExternalTool.objects.all().order_by('id') + }) @has_permission_decorator(PERM_MODIFY_SYSTEM_CONFIGURATIONS, redirect_url=FOUR_OH_FOUR_URL) -def api_vault(request, slug): - context = {} +def api_vault_delete(request): + response = {"status": "error"} if request.method == "POST": - key_openai = request.POST.get('key_openai') - key_netlas = request.POST.get('key_netlas') + handler = {"key_openai": OpenAiAPIKey, "key_netlas": NetlasAPIKey} + response["deleted"] = [] + for key in json.loads(request.body.decode("utf-8"))["keys"]: + try: + handler[key].objects.first().delete() + response["deleted"].append(key) + except KeyError: + # Ignore the KeyError if the key does not exist + pass + response["status"] = "OK" + else: + response["message"] = "Method not allowed" + return http.JsonResponse(response) + +def llm_toolkit_section(request): + all_models = DEFAULT_GPT_MODELS.copy() + response = requests.get(f'{OLLAMA_INSTANCE}/api/tags') + if response.status_code == 200: + ollama_models = response.json().get('models', []) + date_format = "%Y-%m-%dT%H:%M:%S" + all_models.extend([{**model, + 'modified_at': datetime.strptime(model['modified_at'].split('.')[0], date_format), + 'is_local': True, + } for model in ollama_models]) + + selected_model = OllamaSettings.objects.first() + selected_model_name = selected_model.selected_model if selected_model else 'gpt-3.5-turbo' + + for model in all_models: + if model['name'] == selected_model_name: + model['selected'] = True + context = { + 'installed_models': all_models, + 'openai_key_error': not get_open_ai_key() and 'gpt' in selected_model_name + } + return render(request, 'scanEngine/settings/llm_toolkit.html', context) - if key_openai: - openai_api_key = OpenAiAPIKey.objects.first() - if openai_api_key: +@has_permission_decorator(PERM_MODIFY_SYSTEM_CONFIGURATIONS, redirect_url=FOUR_OH_FOUR_URL) +def api_vault(request): + if request.method == "POST": + if (key_openai := request.POST.get('key_openai')) and len(key_openai) > 0: + if openai_api_key := OpenAiAPIKey.objects.first(): openai_api_key.key = key_openai openai_api_key.save() else: OpenAiAPIKey.objects.create(key=key_openai) - if key_netlas: - netlas_api_key = NetlasAPIKey.objects.first() - if netlas_api_key: + if (key_netlas := request.POST.get('key_netlas')) and len(key_netlas) > 0: + if netlas_api_key := NetlasAPIKey.objects.first(): netlas_api_key.key = key_netlas netlas_api_key.save() else: NetlasAPIKey.objects.create(key=key_netlas) - openai_key = OpenAiAPIKey.objects.first() - netlas_key = NetlasAPIKey.objects.first() - context['openai_key'] = openai_key - context['netlas_key'] = netlas_key + # FIXME: This should be better handled via forms, formviews & formsets + context = {"apiKeys": [ + { + "recommended": True, + "optional": True, + "experimental": True, + "name": "OpenAI", + "text": "OpenAI keys will be used to generate vulnerability description, remediation, impact and vulnerability report writing using ChatGPT.", + "hasKey": OpenAiAPIKey.objects.first() is not None + }, + { + "name": "Netlas", + "text": "Netlas keys will be used to get whois information and other OSINT data.", + "optional": True, + "hasKey": NetlasAPIKey.objects.first() is not None + } + ]} return render(request, 'scanEngine/settings/api.html', context) - @has_permission_decorator(PERM_MODIFY_SYSTEM_CONFIGURATIONS, redirect_url=FOUR_OH_FOUR_URL) -def add_tool(request, slug): +def add_tool(request): form = ExternalToolForm() if request.method == "POST": form = ExternalToolForm(request.POST) - print(form.errors) if form.is_valid(): # add tool install_command = form.data['install_command'] github_clone_path = None + + # Only modify install_command if it contains 'git clone' if 'git clone' in install_command: project_name = install_command.split('/')[-1] - install_command = install_command + ' /usr/src/github/' + project_name + ' && pip install -r /usr/src/github/' + project_name + '/requirements.txt' - github_clone_path = '/usr/src/github/' + project_name - # if github cloned we also need to install requirements, atleast found in the main dir - install_command = 'pip3 install -r /usr/src/github/' + project_name + '/requirements.txt' + install_command = f'{install_command} {RENGINE_TOOL_GITHUB_PATH}/{project_name} && pip install -r {RENGINE_TOOL_GITHUB_PATH}/{project_name}/requirements.txt' + github_clone_path = f'{RENGINE_TOOL_GITHUB_PATH}/{project_name}' run_command(install_command) run_command.apply_async(args=(install_command,)) saved_form = form.save() + if github_clone_path: tool = InstalledExternalTool.objects.get(id=saved_form.pk) tool.github_clone_path = github_clone_path @@ -516,16 +503,15 @@ def add_tool(request, slug): request, messages.INFO, 'External Tool Successfully Added!') - return http.HttpResponseRedirect(reverse('tool_arsenal', kwargs={'slug': slug})) + return http.HttpResponseRedirect(reverse('tool_arsenal')) context = { - 'settings_nav_active': 'active', - 'form': form - } + 'settings_nav_active': 'active', + 'form': form + } return render(request, 'scanEngine/settings/add_tool.html', context) - @has_permission_decorator(PERM_MODIFY_SYSTEM_CONFIGURATIONS, redirect_url=FOUR_OH_FOUR_URL) -def modify_tool_in_arsenal(request, slug, id): +def modify_tool_in_arsenal(request, id): external_tool = get_object_or_404(InstalledExternalTool, id=id) form = ExternalToolForm() if request.method == "POST": @@ -536,7 +522,7 @@ def modify_tool_in_arsenal(request, slug, id): request, messages.INFO, 'Tool modified successfully') - return http.HttpResponseRedirect(reverse('tool_arsenal', kwargs={'slug': slug})) + return http.HttpResponseRedirect(reverse('tool_arsenal')) else: form.set_value(external_tool) context = { diff --git a/web/startScan/fixtures/startScan.json b/web/startScan/fixtures/startScan.json new file mode 100644 index 000000000..929ab016b --- /dev/null +++ b/web/startScan/fixtures/startScan.json @@ -0,0 +1,3392 @@ +[ +{ + "model": "startScan.scanhistory", + "pk": 1, + "fields": { + "start_scan_date": "2024-09-03T21:25:45.594Z", + "scan_status": 2, + "results_dir": "/home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97", + "domain": 1, + "scan_type": 7, + "celery_ids": "[\"c2de965c-4db3-405b-b58d-9894f18306b2\", \"d3715a2f-20dd-471f-a1b6-0a7f35472afd\", \"a0d9c581-9f0f-46e9-ad45-2abb20681fe0\", null, \"13e72a80-8f84-4ae6-b47f-081f0ecd4b6c\", \"3596c2c2-2dca-4164-b342-b5046c697304\", \"dc33f486-7ad0-4c73-a355-4c93df4538af\", \"b74526bd-8f9a-4738-a98f-876ba776da33\", \"e1351fcc-958c-4fd2-941b-e8c34c3db9d9\", \"e61a5153-22d3-4890-984c-c45177e93baa\", \"267cc8e5-24ab-4651-a708-4286b4506ee0\", \"3f932d02-9beb-4c0b-9cb0-394fc6e73401\"]", + "tasks": "[\"subdomain_discovery\", \"http_crawl\", \"port_scan\", \"osint\", \"dir_file_fuzz\", \"fetch_url\", \"vulnerability_scan\", \"waf_detection\", \"screenshot\"]", + "stop_scan_date": "2024-09-03T22:29:36.469Z", + "used_gf_patterns": "debug_logic,idor,interestingEXT,interestingparams,interestingsubs,lfi,rce,redirect,sqli,ssrf,ssti,xss", + "error_message": null, + "initiated_by": 1, + "aborted_by": null, + "emails": [], + "employees": [], + "buckets": [], + "dorks": [ + 1, + 2, + 3, + 4, + 5, + 6, + 7, + 8, + 9, + 10, + 11, + 12, + 13, + 14, + 15, + 16, + 17, + 18, + 19, + 20, + 21, + 22, + 23, + 24, + 25, + 26, + 27, + 28, + 29, + 30, + 31, + 32, + 33, + 34, + 35, + 36, + 37, + 38, + 39, + 40, + 41, + 42, + 43, + 44, + 45, + 46, + 47, + 48, + 49, + 50, + 51, + 52, + 53, + 54, + 55, + 56, + 57, + 58, + 59, + 60, + 61, + 62, + 63, + 64, + 65, + 66, + 67, + 68, + 69, + 70, + 71, + 72, + 73, + 74, + 75, + 76, + 77 + ] + } +}, +{ + "model": "startScan.subdomain", + "pk": 1, + "fields": { + "scan_history": 1, + "target_domain": 1, + "name": "testphp.vulnweb.com", + "is_imported_subdomain": true, + "is_important": false, + "http_url": "http://testphp.vulnweb.com", + "screenshot_path": "/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/screenshots/screens/http.testphp.vulnweb.com.png", + "http_header_path": null, + "discovered_date": "2024-09-03T21:25:45.864Z", + "cname": null, + "is_cdn": false, + "cdn_name": null, + "http_status": 200, + "content_type": "text/html", + "response_time": 0.345110429, + "webserver": "nginx/1.19.0", + "content_length": 4958, + "page_title": "Home of Acunetix Art", + "attack_surface": null, + "technologies": [ + 1, + 2, + 3, + 4 + ], + "ip_addresses": [ + 1 + ], + "directories": [ + 5 + ], + "waf": [] + } +}, +{ + "model": "startScan.subdomain", + "pk": 2, + "fields": { + "scan_history": 1, + "target_domain": 1, + "name": "vulnweb.com", + "is_imported_subdomain": false, + "is_important": false, + "http_url": "http://vulnweb.com", + "screenshot_path": "/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/screenshots/screens/http.vulnweb.com.png", + "http_header_path": null, + "discovered_date": "2024-09-03T21:25:57.076Z", + "cname": null, + "is_cdn": false, + "cdn_name": null, + "http_status": 200, + "content_type": "text/html", + "response_time": 0.352853777, + "webserver": "nginx/1.19.0", + "content_length": 4018, + "page_title": "Acunetix Web Vulnerability Scanner - Test websites", + "attack_surface": null, + "technologies": [ + 2 + ], + "ip_addresses": [ + 1 + ], + "directories": [ + 6 + ], + "waf": [] + } +}, +{ + "model": "startScan.subdomain", + "pk": 16, + "fields": { + "scan_history": 1, + "target_domain": 1, + "name": "rest.vulnweb.com", + "is_imported_subdomain": false, + "is_important": false, + "http_url": "http://rest.vulnweb.com", + "screenshot_path": "/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/screenshots/screens/http.rest.vulnweb.com.png", + "http_header_path": null, + "discovered_date": "2024-09-03T21:28:09.647Z", + "cname": null, + "is_cdn": false, + "cdn_name": null, + "http_status": 200, + "content_type": "text/html", + "response_time": 0.422205408, + "webserver": "Apache/2.4.25 (Debian)", + "content_length": 3555, + "page_title": "Acunetix Vulnerable REST API", + "attack_surface": null, + "technologies": [ + 9, + 10, + 11 + ], + "ip_addresses": [ + 3 + ], + "directories": [ + 1 + ], + "waf": [] + } +}, +{ + "model": "startScan.subdomain", + "pk": 23, + "fields": { + "scan_history": 1, + "target_domain": 1, + "name": "testasp.vulnweb.com", + "is_imported_subdomain": false, + "is_important": false, + "http_url": "http://testasp.vulnweb.com", + "screenshot_path": "/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/screenshots/screens/http.testasp.vulnweb.com.png", + "http_header_path": null, + "discovered_date": "2024-09-03T21:28:09.673Z", + "cname": null, + "is_cdn": false, + "cdn_name": null, + "http_status": 200, + "content_type": "text/html", + "response_time": 0.36117539699999995, + "webserver": "Microsoft-IIS/8.5", + "content_length": 3537, + "page_title": "acuforum forums", + "attack_surface": null, + "technologies": [ + 1, + 6, + 7, + 8 + ], + "ip_addresses": [ + 2 + ], + "directories": [ + 2 + ], + "waf": [] + } +}, +{ + "model": "startScan.subdomain", + "pk": 24, + "fields": { + "scan_history": 1, + "target_domain": 1, + "name": "testaspnet.vulnweb.com", + "is_imported_subdomain": false, + "is_important": false, + "http_url": "http://testaspnet.vulnweb.com", + "screenshot_path": "/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/screenshots/screens/http.testaspnet.vulnweb.com.png", + "http_header_path": null, + "discovered_date": "2024-09-03T21:28:09.677Z", + "cname": null, + "is_cdn": false, + "cdn_name": null, + "http_status": 200, + "content_type": "text/html", + "response_time": 0.535234499, + "webserver": "Microsoft-IIS/8.5", + "content_length": 14082, + "page_title": "acublog news", + "attack_surface": null, + "technologies": [ + 6, + 8, + 12, + 13 + ], + "ip_addresses": [ + 2 + ], + "directories": [ + 3 + ], + "waf": [] + } +}, +{ + "model": "startScan.subdomain", + "pk": 26, + "fields": { + "scan_history": 1, + "target_domain": 1, + "name": "testhtml5.vulnweb.com", + "is_imported_subdomain": false, + "is_important": false, + "http_url": "http://testhtml5.vulnweb.com", + "screenshot_path": "/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/screenshots/screens/http.testhtml5.vulnweb.com.png", + "http_header_path": null, + "discovered_date": "2024-09-03T21:28:09.686Z", + "cname": null, + "is_cdn": false, + "cdn_name": null, + "http_status": 200, + "content_type": "text/html", + "response_time": 0.342381867, + "webserver": "nginx/1.19.0", + "content_length": 6940, + "page_title": "SecurityTweets - HTML5 test website for Acunetix Web Vulnerability Scanner", + "attack_surface": null, + "technologies": [ + 2, + 5 + ], + "ip_addresses": [ + 1 + ], + "directories": [ + 4 + ], + "waf": [] + } +}, +{ + "model": "startScan.subdomain", + "pk": 38, + "fields": { + "scan_history": 1, + "target_domain": 1, + "name": "www.vulnweb.com", + "is_imported_subdomain": false, + "is_important": false, + "http_url": "http://www.vulnweb.com", + "screenshot_path": "/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/screenshots/screens/http.www.vulnweb.com.png", + "http_header_path": null, + "discovered_date": "2024-09-03T21:28:09.730Z", + "cname": null, + "is_cdn": false, + "cdn_name": null, + "http_status": 200, + "content_type": "text/html", + "response_time": 0.352635869, + "webserver": "nginx/1.19.0", + "content_length": 4018, + "page_title": "Acunetix Web Vulnerability Scanner - Test websites", + "attack_surface": null, + "technologies": [ + 2 + ], + "ip_addresses": [ + 1 + ], + "directories": [ + 7 + ], + "waf": [] + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 1, + "fields": { + "name": "dns" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 2, + "fields": { + "name": "caa" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 3, + "fields": { + "name": "spf" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 4, + "fields": { + "name": "txt" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 5, + "fields": { + "name": "tech" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 6, + "fields": { + "name": "nginx" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 7, + "fields": { + "name": "misconfig" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 8, + "fields": { + "name": "http" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 9, + "fields": { + "name": "cookie" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 10, + "fields": { + "name": "generic" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 11, + "fields": { + "name": "apache" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 12, + "fields": { + "name": "php" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 13, + "fields": { + "name": "ns" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 14, + "fields": { + "name": "form" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 15, + "fields": { + "name": "misc" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 16, + "fields": { + "name": "miscellaneous" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 17, + "fields": { + "name": "cve" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 18, + "fields": { + "name": "cve2022" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 19, + "fields": { + "name": "carrental" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 20, + "fields": { + "name": "cms" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 21, + "fields": { + "name": "sqli" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 22, + "fields": { + "name": "authenticated" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 23, + "fields": { + "name": "car_rental_management_system_project" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 24, + "fields": { + "name": "headers" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 25, + "fields": { + "name": "detect" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 26, + "fields": { + "name": "aspnet" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 27, + "fields": { + "name": "phpstorm" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 28, + "fields": { + "name": "jetbrains" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 29, + "fields": { + "name": "idea" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 30, + "fields": { + "name": "exposure" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 31, + "fields": { + "name": "files" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 32, + "fields": { + "name": "debug" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 33, + "fields": { + "name": "microsoft" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 34, + "fields": { + "name": "iis" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 35, + "fields": { + "name": "waf" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 36, + "fields": { + "name": "xxe" + } +}, +{ + "model": "startScan.vulnerabilitytags", + "pk": 37, + "fields": { + "name": "blind" + } +}, +{ + "model": "startScan.vulnerabilityreference", + "pk": 1, + "fields": { + "url": "https://support.dnsimple.com/articles/caa-record/#whats-a-caa-record" + } +}, +{ + "model": "startScan.vulnerabilityreference", + "pk": 2, + "fields": { + "url": "https://www.mimecast.com/content/how-to-create-an-spf-txt-record" + } +}, +{ + "model": "startScan.vulnerabilityreference", + "pk": 3, + "fields": { + "url": "https://www.netspi.com/blog/technical/network-penetration-testing/analyzing-dns-txt-records-to-fingerprint-service-providers/" + } +}, +{ + "model": "startScan.vulnerabilityreference", + "pk": 4, + "fields": { + "url": "https://owasp.org/www-community/HttpOnly" + } +}, +{ + "model": "startScan.vulnerabilityreference", + "pk": 5, + "fields": { + "url": "https://owasp.org/www-community/controls/SecureCookieAttribute" + } +}, +{ + "model": "startScan.vulnerabilityreference", + "pk": 6, + "fields": { + "url": "https://github.com/dirtycoder0124/formcrawler" + } +}, +{ + "model": "startScan.vulnerabilityreference", + "pk": 7, + "fields": { + "url": "https://github.com/k0xx11/bug_report/blob/main/vendors/campcodes.com/car-rental-management-system/SQLi-8.md" + } +}, +{ + "model": "startScan.vulnerabilityreference", + "pk": 8, + "fields": { + "url": "https://nvd.nist.gov/vuln/detail/CVE-2022-32028" + } +}, +{ + "model": "startScan.vulnerabilityreference", + "pk": 9, + "fields": { + "url": "https://github.com/ARPSyndicate/kenzer-templates" + } +}, +{ + "model": "startScan.vulnerabilityreference", + "pk": 10, + "fields": { + "url": "https://developer.mozilla.org/en-US/docs/Web/Security/Mixed_content" + } +}, +{ + "model": "startScan.vulnerabilityreference", + "pk": 11, + "fields": { + "url": "https://portswigger.net/kb/issues/01000400_mixed-content" + } +}, +{ + "model": "startScan.vulnerabilityreference", + "pk": 12, + "fields": { + "url": "https://resources.infosecinstitute.com/topics/vulnerabilities/https-mixed-content-vulnerability/" + } +}, +{ + "model": "startScan.vulnerabilityreference", + "pk": 13, + "fields": { + "url": "https://docs.gitlab.com/ee/user/application_security/dast/checks/319.1.html" + } +}, +{ + "model": "startScan.vulnerabilityreference", + "pk": 14, + "fields": { + "url": "https://portswigger.net/kb/issues/00100800_asp-net-debugging-enabled" + } +}, +{ + "model": "startScan.vulnerabilityreference", + "pk": 15, + "fields": { + "url": "https://github.com/Ekultek/WhatWaf" + } +}, +{ + "model": "startScan.scanactivity", + "pk": 1, + "fields": { + "scan_of": 1, + "title": "Subdomain discovery", + "name": "subdomain_discovery", + "time": "2024-09-03T21:28:31.457Z", + "status": 2, + "error_message": null, + "traceback": null, + "celery_id": "be34c2cd-c7b3-41df-abd6-3e12bb512ea3" + } +}, +{ + "model": "startScan.scanactivity", + "pk": 2, + "fields": { + "scan_of": 1, + "title": "OS Intelligence", + "name": "osint", + "time": "2024-09-03T21:26:58.275Z", + "status": 2, + "error_message": null, + "traceback": null, + "celery_id": "a0d9c581-9f0f-46e9-ad45-2abb20681fe0" + } +}, +{ + "model": "startScan.scanactivity", + "pk": 3, + "fields": { + "scan_of": 1, + "title": "Http crawl", + "name": "http_crawl", + "time": "2024-09-03T21:28:31.186Z", + "status": 2, + "error_message": null, + "traceback": null, + "celery_id": null + } +}, +{ + "model": "startScan.scanactivity", + "pk": 4, + "fields": { + "scan_of": 1, + "title": "Port scan", + "name": "port_scan", + "time": "2024-09-03T21:29:11.664Z", + "status": 2, + "error_message": null, + "traceback": null, + "celery_id": "13e72a80-8f84-4ae6-b47f-081f0ecd4b6c" + } +}, +{ + "model": "startScan.scanactivity", + "pk": 5, + "fields": { + "scan_of": 1, + "title": "Fetch URL", + "name": "fetch_url", + "time": "2024-09-03T22:02:45.068Z", + "status": 2, + "error_message": null, + "traceback": null, + "celery_id": "3596c2c2-2dca-4164-b342-b5046c697304" + } +}, +{ + "model": "startScan.scanactivity", + "pk": 6, + "fields": { + "scan_of": 1, + "title": "Directories & files fuzz", + "name": "dir_file_fuzz", + "time": "2024-09-03T22:13:05.287Z", + "status": 2, + "error_message": null, + "traceback": null, + "celery_id": "032f40df-a5f0-4fb6-bb06-63b73a54bd17" + } +}, +{ + "model": "startScan.scanactivity", + "pk": 7, + "fields": { + "scan_of": 1, + "title": "Vulnerability scan", + "name": "vulnerability_scan", + "time": "2024-09-03T22:29:16.424Z", + "status": 2, + "error_message": null, + "traceback": null, + "celery_id": "6673b166-bc55-4440-a217-9932b697cd69" + } +}, +{ + "model": "startScan.scanactivity", + "pk": 8, + "fields": { + "scan_of": 1, + "title": "Screenshot", + "name": "screenshot", + "time": "2024-09-03T22:03:01.734Z", + "status": 2, + "error_message": null, + "traceback": null, + "celery_id": "dc33f486-7ad0-4c73-a355-4c93df4538af" + } +}, +{ + "model": "startScan.scanactivity", + "pk": 9, + "fields": { + "scan_of": 1, + "title": "WAF detection", + "name": "waf_detection", + "time": "2024-09-03T22:03:10.096Z", + "status": 2, + "error_message": null, + "traceback": null, + "celery_id": "c4ccc7f8-b370-4354-9a6a-098ba13fde0f" + } +}, +{ + "model": "startScan.scanactivity", + "pk": 10, + "fields": { + "scan_of": 1, + "title": "Nuclei Scan", + "name": "nuclei_scan", + "time": "2024-09-03T22:29:15.243Z", + "status": 2, + "error_message": null, + "traceback": null, + "celery_id": "b74526bd-8f9a-4738-a98f-876ba776da33" + } +}, +{ + "model": "startScan.scanactivity", + "pk": 11, + "fields": { + "scan_of": 1, + "title": "Nuclei Scan with severity unknown", + "name": "nuclei_individual_severity_module", + "time": "2024-09-03T22:04:05.946Z", + "status": 2, + "error_message": null, + "traceback": null, + "celery_id": "e1351fcc-958c-4fd2-941b-e8c34c3db9d9" + } +}, +{ + "model": "startScan.scanactivity", + "pk": 12, + "fields": { + "scan_of": 1, + "title": "Nuclei Scan with severity info", + "name": "nuclei_individual_severity_module", + "time": "2024-09-03T22:16:51.232Z", + "status": 2, + "error_message": null, + "traceback": null, + "celery_id": "e61a5153-22d3-4890-984c-c45177e93baa" + } +}, +{ + "model": "startScan.scanactivity", + "pk": 13, + "fields": { + "scan_of": 1, + "title": "Nuclei Scan with severity low", + "name": "nuclei_individual_severity_module", + "time": "2024-09-03T22:08:56.115Z", + "status": 2, + "error_message": null, + "traceback": null, + "celery_id": "267cc8e5-24ab-4651-a708-4286b4506ee0" + } +}, +{ + "model": "startScan.scanactivity", + "pk": 14, + "fields": { + "scan_of": 1, + "title": "Nuclei Scan with severity medium", + "name": "nuclei_individual_severity_module", + "time": "2024-09-03T22:28:15.118Z", + "status": 2, + "error_message": null, + "traceback": null, + "celery_id": "47b90b12-ecce-49c9-93cf-f1e26fc15db6" + } +}, +{ + "model": "startScan.scanactivity", + "pk": 15, + "fields": { + "scan_of": 1, + "title": "Nuclei Scan with severity high", + "name": "nuclei_individual_severity_module", + "time": "2024-09-03T22:29:10.920Z", + "status": 2, + "error_message": null, + "traceback": null, + "celery_id": "06e12924-022d-4972-8b4b-ceda9b6985f2" + } +}, +{ + "model": "startScan.scanactivity", + "pk": 16, + "fields": { + "scan_of": 1, + "title": "Nuclei Scan with severity critical", + "name": "nuclei_individual_severity_module", + "time": "2024-09-03T22:06:56.220Z", + "status": 2, + "error_message": null, + "traceback": null, + "celery_id": "3f932d02-9beb-4c0b-9cb0-394fc6e73401" + } +}, +{ + "model": "startScan.command", + "pk": 1, + "fields": { + "scan_history": 1, + "activity": null, + "command": "httpx -cl -ct -rt -location -td -websocket -cname -asn -cdn -probe -random-agent -t 1 -json -u testphp.vulnweb.com -silent", + "return_code": 0, + "output": "{\"timestamp\":\"2024-09-03T21:25:56.985783616Z\",\"port\":\"80\",\"url\":\"http://testphp.vulnweb.com\",\"input\":\"testphp.vulnweb.com\",\"title\":\"Home of Acunetix Art\",\"scheme\":\"http\",\"webserver\":\"nginx/1.19.0\",\"content_type\":\"text/html\",\"method\":\"GET\",\"host\":\"44.228.249.3\",\"path\":\"/\",\"time\":\"343.388749ms\",\"a\":[\"44.228.249.3\"],\"tech\":[\"DreamWeaver\",\"Nginx:1.19.0\",\"PHP:5.6.40\",\"Ubuntu\"],\"words\":514,\"lines\":110,\"status_code\":200,\"content_length\":4958,\"failed\":false,\"knowledgebase\":{\"PageType\":\"nonerror\",\"pHash\":0}}\n", + "time": "2024-09-03T21:25:45.871Z" + } +}, +{ + "model": "startScan.command", + "pk": 2, + "fields": { + "scan_history": 1, + "activity": null, + "command": "rm /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/httpx_input.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:25:57.045Z" + } +}, +{ + "model": "startScan.command", + "pk": 3, + "fields": { + "scan_history": null, + "activity": null, + "command": "geoiplookup 44.228.249.3", + "return_code": null, + "output": null, + "time": "2024-09-03T21:25:57.037Z" + } +}, +{ + "model": "startScan.command", + "pk": 4, + "fields": { + "scan_history": 1, + "activity": null, + "command": "httpx -cl -ct -rt -location -td -websocket -cname -asn -cdn -probe -random-agent -t 1 -json -u vulnweb.com -silent", + "return_code": 0, + "output": "{\"timestamp\":\"2024-09-03T21:26:08.122014174Z\",\"port\":\"80\",\"url\":\"http://vulnweb.com\",\"input\":\"vulnweb.com\",\"title\":\"Acunetix Web Vulnerability Scanner - Test websites\",\"scheme\":\"http\",\"webserver\":\"nginx/1.19.0\",\"content_type\":\"text/html\",\"method\":\"GET\",\"host\":\"44.228.249.3\",\"path\":\"/\",\"time\":\"351.001638ms\",\"a\":[\"44.228.249.3\"],\"tech\":[\"Nginx:1.19.0\"],\"words\":482,\"lines\":74,\"status_code\":200,\"content_length\":4018,\"failed\":false,\"knowledgebase\":{\"PageType\":\"nonerror\",\"pHash\":0}}\n", + "time": "2024-09-03T21:25:57.079Z" + } +}, +{ + "model": "startScan.command", + "pk": 5, + "fields": { + "scan_history": 1, + "activity": null, + "command": "rm /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/httpx_input.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:26:08.160Z" + } +}, +{ + "model": "startScan.command", + "pk": 6, + "fields": { + "scan_history": 1, + "activity": 1, + "command": "subfinder -d vulnweb.com -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/subdomains_subfinder.txt -timeout 5 -t 30 -silent", + "return_code": 0, + "output": "\n[\u001b[34mINF\u001b[0m] Current subfinder version v2.6.6 (\u001b[92mlatest\u001b[0m)\nblogger.com.vulnweb.com\nodincovo.vulnweb.com\ntestasp.vulnweb.com\nwww.testphp.vulnweb.com\ntestphp.vulnweb.com\n2ftestasp.vulnweb.com\nedu-rost.rutestasp.vulnweb.com\nu003erest.vulnweb.com\ntestaspnet.vulnweb.com\ntetphp.vulnweb.com\nrest.vulnweb.com\ntestaspx.vulnweb.com\n5burl-3dhttp-3a-2f-2fwww.vulnweb.com\n7ctestasp.vulnweb.com\nwww.test.php.vulnweb.com\nedu-rost.ruwww.vulnweb.com\ntesthtml5.vulnweb.com\nwww.virus.vulnweb.com\nu003etestasp.vulnweb.com\ntest.php.vulnweb.com\ntestap.vulnweb.com\nwww.testasp.vulnweb.com\nestphp.vulnweb.com\ntestapsnet.vulnweb.com\nhttptestaspnet.vulnweb.com\n2f-2fwww.vulnweb.com\nrestasp.vulnweb.com\nviruswall.vulnweb.com\nvirus.vulnweb.com\nttestphp.vulnweb.com\nwww.vulnweb.com\ntest.vulnweb.com\ntestaps.vulnweb.com\n2fwww.vulnweb.com\nantivirus1.vulnweb.com\ntestpphp.vulnweb.com", + "time": "2024-09-03T21:26:08.233Z" + } +}, +{ + "model": "startScan.command", + "pk": 7, + "fields": { + "scan_history": 1, + "activity": null, + "command": "GooFuzz -t vulnweb.com -d 3 -p 5 -w /login/,login.html -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/gofuzz.txt", + "return_code": 1, + "output": "\n*********************************************************\n* GooFuzz 1.2.5 - The Power of Google Dorks\t\t*\n*********************************************************\n\nTarget: \u001b[1mvulnweb.com\u001b[0m\n\n===================================================================\nDirectories and files found from: \u001b[1m/login/,login.html\u001b[0m\n===================================================================\n\nhttp://rest.vulnweb.com/basic_authentication/api/\nhttp://rest.vulnweb.com/docs/\nhttp://testasp.vulnweb.com/Search.asp\nhttp://testaspnet.vulnweb.com/login.aspx\nhttp://testhtml5.vulnweb.com/\nhttp://testphp.vulnweb.com/\nhttp://testphp.vulnweb.com/admin/\nhttp://testphp.vulnweb.com/artists.php\nhttp://testphp.vulnweb.com/artists.php?artist\nhttp://testphp.vulnweb.com/artists.php?artist=1\nhttp://testphp.vulnweb.com/artists.php?artist=2\nhttp://testphp.vulnweb.com/categories.php\nhttp://testphp.vulnweb.com/disclaimer.php\nhttp://testphp.vulnweb.com/listproducts.php?cat\nhttp://testphp.vulnweb.com/listproducts.php?cat=1\nhttp://testphp.vulnweb.com/login.php\nhttp://testphp.vulnweb.com/search.php\nhttp://www.vulnweb.com/", + "time": "2024-09-03T21:26:08.250Z" + } +}, +{ + "model": "startScan.command", + "pk": 8, + "fields": { + "scan_history": 1, + "activity": 1, + "command": "ctfr -d vulnweb.com -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/subdomains_ctfr.txt && cat /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/subdomains_ctfr.txt | sed 's/\\*.//g' | tail -n +12 | uniq | sort > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/subdomains_ctfr.txt", + "return_code": 0, + "output": "\n\n____ _____ _____ ____\n/ ___|_ _| ___| _ \\\n| | | | | |_ | |_) |\n| |___ | | | _| | _ <\n\\____| |_| |_| |_| \\_\\\n\nVersion 1.2 - Hey don't miss AXFR!\nMade by Sheila A. Berta (UnaPibaGeek)\n\n\n[!] ---- TARGET: vulnweb.com ---- [!]\n\n\n\n[!] Done. Have a nice day! ;).", + "time": "2024-09-03T21:26:10.094Z" + } +}, +{ + "model": "startScan.command", + "pk": 9, + "fields": { + "scan_history": 1, + "activity": 1, + "command": "sublist3r -d vulnweb.com -t 30 -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/subdomains_sublister.txt", + "return_code": 0, + "output": "\n\u001b[91m\n____ _ _ _ _ _____\n/ ___| _ _| |__ | (_)___| |_|___ / _ __\n\\___ \\| | | | '_ \\| | / __| __| |_ \\| '__|\n___) | |_| | |_) | | \\__ \\ |_ ___) | |\n|____/ \\__,_|_.__/|_|_|___/\\__|____/|_|\u001b[0m\u001b[93m\n\n# Coded By Ahmed Aboul-Ela - @aboul3la\n\n\u001b[94m[-] Enumerating subdomains now for vulnweb.com\u001b[0m\n\u001b[92m[-] Searching now in Baidu..\u001b[0m\n\u001b[92m[-] Searching now in Yahoo..\u001b[0m\n\u001b[92m[-] Searching now in Google..\u001b[0m\n\u001b[92m[-] Searching now in Bing..\u001b[0m\n\u001b[92m[-] Searching now in Ask..\u001b[0m\n\u001b[92m[-] Searching now in Netcraft..\u001b[0m\n\u001b[92m[-] Searching now in DNSdumpster..\u001b[0m\n\u001b[92m[-] Searching now in Virustotal..\u001b[0m\n\u001b[92m[-] Searching now in ThreatCrowd..\u001b[0m\n\u001b[92m[-] Searching now in SSL Certificates..\u001b[0m\n\u001b[92m[-] Searching now in PassiveDNS..\u001b[0m\n\u001b[91m[!] Error: Virustotal probably now is blocking our requests\u001b[0m", + "time": "2024-09-03T21:26:11.036Z" + } +}, +{ + "model": "startScan.command", + "pk": 10, + "fields": { + "scan_history": 1, + "activity": null, + "command": "GooFuzz -t vulnweb.com -d 3 -p 5 -w /admin/,admin.html -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/gofuzz.txt", + "return_code": 1, + "output": "\n*********************************************************\n* GooFuzz 1.2.5 - The Power of Google Dorks\t\t*\n*********************************************************\n\nTarget: \u001b[1mvulnweb.com\u001b[0m\n\n===================================================================\nDirectories and files found from: \u001b[1m/admin/,admin.html\u001b[0m\n===================================================================\n\nhttp://rest.vulnweb.com/basic_authentication/api/\nhttp://rest.vulnweb.com/docs/\nhttp://rest.vulnweb.com/images/1_basic_authentication.png\nhttp://rest.vulnweb.com/images/2_jwt.png\nhttp://rest.vulnweb.com/images/3_oauth2.png\nhttp://testphp.vulnweb.com/admin/\nhttp://testphp.vulnweb.com/login.php\nhttp://testphp.vulnweb.com/secured/phpinfo.php", + "time": "2024-09-03T21:26:12.068Z" + } +}, +{ + "model": "startScan.command", + "pk": 11, + "fields": { + "scan_history": 1, + "activity": null, + "command": "GooFuzz -t vulnweb.com -d 3 -p 5 -w /dashboard/,dashboard.html -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/gofuzz.txt", + "return_code": 1, + "output": "\n*********************************************************\n* GooFuzz 1.2.5 - The Power of Google Dorks\t\t*\n*********************************************************\n\nTarget: \u001b[1mvulnweb.com\u001b[0m\n\nSorry, no results found.", + "time": "2024-09-03T21:26:15.816Z" + } +}, +{ + "model": "startScan.command", + "pk": 12, + "fields": { + "scan_history": 1, + "activity": null, + "command": "GooFuzz -t stackoverflow.com -d 3 -p 2 -w vulnweb.com -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/gofuzz.txt", + "return_code": 1, + "output": "\n*********************************************************\n* GooFuzz 1.2.5 - The Power of Google Dorks\t\t*\n*********************************************************\n\nTarget: \u001b[1mstackoverflow.com\u001b[0m\n\nSorry, no results found.", + "time": "2024-09-03T21:26:16.663Z" + } +}, +{ + "model": "startScan.command", + "pk": 13, + "fields": { + "scan_history": 1, + "activity": null, + "command": "GooFuzz -t tiktok.com -d 3 -p 2 -w vulnweb.com -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/gofuzz.txt", + "return_code": 1, + "output": "\n*********************************************************\n* GooFuzz 1.2.5 - The Power of Google Dorks\t\t*\n*********************************************************\n\nTarget: \u001b[1mtiktok.com\u001b[0m\n\nSorry, no results found.", + "time": "2024-09-03T21:26:17.334Z" + } +}, +{ + "model": "startScan.command", + "pk": 14, + "fields": { + "scan_history": 1, + "activity": null, + "command": "GooFuzz -t facebook.com -d 3 -p 2 -w vulnweb.com -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/gofuzz.txt", + "return_code": 1, + "output": "\n*********************************************************\n* GooFuzz 1.2.5 - The Power of Google Dorks\t\t*\n*********************************************************\n\nTarget: \u001b[1mfacebook.com\u001b[0m\n\n===================================================================\nDirectories and files found from: \u001b[1mvulnweb.com\u001b[0m\n===================================================================\n\nhttps://m.facebook.com/QwertyAloneC/posts/dork-sqli-just-add-inurl-before-themphpcatidcartphpidphpcidindexphpshowtopiccont/142828509609539/\nhttps://m.facebook.com/groups/3201261889909211/posts/3624197390948990/\nhttps://m.facebook.com/groups/3201261889909211/posts/4007010936000965/\nhttps://m.facebook.com/groups/3201261889909211/posts/4007311169304275/\nhttps://m.facebook.com/groups/3201261889909211/posts/4188319924536731/\nhttps://m.facebook.com/groups/3201261889909211/posts/4367877359914319/\nhttps://m.facebook.com/groups/3201261889909211/posts/6293716407330395/\nhttps://m.facebook.com/groups/hackingteam2022/posts/2726773620796174/\nhttps://www.facebook.com/KaliLinuxBrazil/photos/tenha-certeza-de-estar-com-o-tor-instaladoinstala%C3%A7%C3%A3o-tor-rootkaliapt-get-install/972928206082146/\nhttps://www.facebook.com/KaliLinuxBrazil/photos/tenha-certeza-de-estar-com-o-tor-instaladoinstala&C3&A7&C3&A3o-tor-rootkaliapt-get-install/972928206082146/\nhttps://www.facebook.com/groups/3201261889909211/", + "time": "2024-09-03T21:26:18.037Z" + } +}, +{ + "model": "startScan.command", + "pk": 15, + "fields": { + "scan_history": 1, + "activity": null, + "command": "GooFuzz -t twitter.com -d 3 -p 2 -w vulnweb.com -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/gofuzz.txt", + "return_code": 1, + "output": "\n*********************************************************\n* GooFuzz 1.2.5 - The Power of Google Dorks\t\t*\n*********************************************************\n\nTarget: \u001b[1mtwitter.com\u001b[0m\n\n===================================================================\nDirectories and files found from: \u001b[1mvulnweb.com\u001b[0m\n===================================================================\n\nhttps://mobile.twitter.com/MrHades2020\nhttps://twitter.com/0xSwayamm\nhttps://twitter.com/7h3r4bb17\nhttps://twitter.com/SadatTamzit\nhttps://twitter.com/deathpoolx1\nhttps://twitter.com/hunterabubakar\nhttps://twitter.com/kg4409\nhttps://twitter.com/ravidutt04?lang\nhttps://twitter.com/ravidutt04?lang=ca\nhttps://twitter.com/therceman/status/1711473903934054427\nhttps://twitter.com/vishack81", + "time": "2024-09-03T21:26:21.869Z" + } +}, +{ + "model": "startScan.command", + "pk": 16, + "fields": { + "scan_history": 1, + "activity": null, + "command": "GooFuzz -t youtube.com -d 3 -p 2 -w vulnweb.com -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/gofuzz.txt", + "return_code": 1, + "output": "\n*********************************************************\n* GooFuzz 1.2.5 - The Power of Google Dorks\t\t*\n*********************************************************\n\nTarget: \u001b[1myoutube.com\u001b[0m\n\n===================================================================\nDirectories and files found from: \u001b[1mvulnweb.com\u001b[0m\n===================================================================\n\nhttps://m.youtube.com/watch?v\nhttps://m.youtube.com/watch?v=2_lswM1S264\nhttps://m.youtube.com/watch?v=2tJgPyRITGc\nhttps://m.youtube.com/watch?v=6FDKHewTP4A\nhttps://m.youtube.com/watch?v=cEoPdpVUeyU\nhttps://m.youtube.com/watch?v=gHkGuVb9vX8\nhttps://www.youtube.com/watch?v\nhttps://www.youtube.com/watch?v=IbDAHDSlgYg\nhttps://www.youtube.com/watch?v=ZstyFyfS3g4\nhttps://www.youtube.com/watch?v=cEoPdpVUeyU\nhttps://www.youtube.com/watch?v=dabm-7CcHaE\nhttps://www.youtube.com/watch?v=tAxMpoKkvCw", + "time": "2024-09-03T21:26:25.626Z" + } +}, +{ + "model": "startScan.command", + "pk": 17, + "fields": { + "scan_history": 1, + "activity": null, + "command": "GooFuzz -t reddit.com -d 3 -p 2 -w vulnweb.com -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/gofuzz.txt", + "return_code": 1, + "output": "\n*********************************************************\n* GooFuzz 1.2.5 - The Power of Google Dorks\t\t*\n*********************************************************\n\nTarget: \u001b[1mreddit.com\u001b[0m\n\n===================================================================\nDirectories and files found from: \u001b[1mvulnweb.com\u001b[0m\n===================================================================\n\nhttps://www.reddit.com/r/sysadmin/comments/gs031c/how_to_login_to_web_by_submitting_username/", + "time": "2024-09-03T21:26:29.388Z" + } +}, +{ + "model": "startScan.command", + "pk": 18, + "fields": { + "scan_history": 1, + "activity": null, + "command": "GooFuzz -t trello.com -d 3 -p 2 -w vulnweb.com -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/gofuzz.txt", + "return_code": 1, + "output": "\n*********************************************************\n* GooFuzz 1.2.5 - The Power of Google Dorks\t\t*\n*********************************************************\n\nTarget: \u001b[1mtrello.com\u001b[0m\n\nSorry, no results found.", + "time": "2024-09-03T21:26:33.059Z" + } +}, +{ + "model": "startScan.command", + "pk": 19, + "fields": { + "scan_history": 1, + "activity": null, + "command": "GooFuzz -t atlassian.net -d 3 -p 2 -w vulnweb.com -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/gofuzz.txt", + "return_code": 1, + "output": "\n*********************************************************\n* GooFuzz 1.2.5 - The Power of Google Dorks\t\t*\n*********************************************************\n\nTarget: \u001b[1matlassian.net\u001b[0m\n\nSorry, no results found.", + "time": "2024-09-03T21:26:33.737Z" + } +}, +{ + "model": "startScan.command", + "pk": 20, + "fields": { + "scan_history": 1, + "activity": 1, + "command": "tlsx -san -cn -silent -ro -host vulnweb.com | sed -n '/^\\([a-zA-Z0-9]\\([-a-zA-Z0-9]*[a-zA-Z0-9]\\)\\?\\.\\)\\+vulnweb.com$/p' | uniq | sort > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/subdomains_tlsx.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:26:34.437Z" + } +}, +{ + "model": "startScan.command", + "pk": 21, + "fields": { + "scan_history": 1, + "activity": null, + "command": "GooFuzz -t github.com -d 3 -p 2 -w vulnweb.com -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/gofuzz.txt", + "return_code": 1, + "output": "\n*********************************************************\n* GooFuzz 1.2.5 - The Power of Google Dorks\t\t*\n*********************************************************\n\nTarget: \u001b[1mgithub.com\u001b[0m\n\n===================================================================\nDirectories and files found from: \u001b[1mvulnweb.com\u001b[0m\n===================================================================\n\nhttps://github.com/dradis/dradis-acunetix/blob/master/spec/fixtures/files/testphp.vulnweb.com.export.acunetix.xml\nhttps://github.com/dtag-dev-sec/explo/blob/master/examples/SQLI_LOGIN_testphp.vulnweb.com.yaml\nhttps://github.com/dtag-dev-sec/explo/blob/master/examples/SQLI_testphp.vulnweb.com.yaml\nhttps://github.com/yangbh/Hammer/blob/master/output/testphp.vulnweb.com/http_testphp.vulnweb.com", + "time": "2024-09-03T21:26:34.456Z" + } +}, +{ + "model": "startScan.command", + "pk": 22, + "fields": { + "scan_history": 1, + "activity": null, + "command": "GooFuzz -t gitlab.com -d 3 -p 2 -w vulnweb.com -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/gofuzz.txt", + "return_code": 1, + "output": "\n*********************************************************\n* GooFuzz 1.2.5 - The Power of Google Dorks\t\t*\n*********************************************************\n\nTarget: \u001b[1mgitlab.com\u001b[0m\n\nSorry, no results found.", + "time": "2024-09-03T21:26:38.165Z" + } +}, +{ + "model": "startScan.command", + "pk": 23, + "fields": { + "scan_history": 1, + "activity": null, + "command": "GooFuzz -t bitbucket.org -d 3 -p 2 -w vulnweb.com -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/gofuzz.txt", + "return_code": 1, + "output": "\n*********************************************************\n* GooFuzz 1.2.5 - The Power of Google Dorks\t\t*\n*********************************************************\n\nTarget: \u001b[1mbitbucket.org\u001b[0m\n\n===================================================================\nDirectories and files found from: \u001b[1mvulnweb.com\u001b[0m\n===================================================================\n\nhttps://bitbucket.org/snippets/alms/KrG4LL\nhttps://bitbucket.org/snippets/bakueikozo/rex9ar/playstation-classic-uart-login-console\nhttps://bitbucket.org/snippets/especnorthamerica/neb6dq/examples-of-web-controller-rest-api-usage\nhttps://bitbucket.org/snippets/faridani/pRX6r\nhttps://bitbucket.org/snippets/orrp/xeGzXB/interactive-mgg\nhttps://bitbucket.org/snippets/raerose01/5enKR5\nhttps://bitbucket.org/snippets/sglienke/64LG6b/introsort\nhttps://bitbucket.org/snippets/sglienke/6oBqMb\nhttps://bitbucket.org/snippets/suntorytime/rAGXar/wellness-sources-overview\nhttps://bitbucket.org/snippets/wmgodyak/6bXKj", + "time": "2024-09-03T21:26:38.820Z" + } +}, +{ + "model": "startScan.command", + "pk": 24, + "fields": { + "scan_history": 1, + "activity": null, + "command": "GooFuzz -t vulnweb.com -d 3 -p 4 -e env,xml,conf,toml,yml,yaml,cnf,inf,rdp,ora,txt,cfg,ini -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/gofuzz.txt", + "return_code": 1, + "output": "\n*********************************************************\n* GooFuzz 1.2.5 - The Power of Google Dorks\t\t*\n*********************************************************\n\nTarget: \u001b[1mvulnweb.com\u001b[0m\n\nSorry, no results found for \u001b[1menv\u001b[0m.\n\n===================================================================\nExtension: \u001b[1mxml\u001b[0m\n===================================================================\n\nhttp://testphp.vulnweb.com/.idea/workspace.xml\nhttp://testphp.vulnweb.com/crossdomain.xml\n\n\u001b[1m[!]\u001b[0m Oops... Looks like Google has temporarily blocked your IP address.", + "time": "2024-09-03T21:26:42.598Z" + } +}, +{ + "model": "startScan.command", + "pk": 25, + "fields": { + "scan_history": 1, + "activity": null, + "command": "GooFuzz -t vulnweb.com -d 3 -p 1 -w Jenkins -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/gofuzz.txt", + "return_code": 1, + "output": "\n*********************************************************\n* GooFuzz 1.2.5 - The Power of Google Dorks\t\t*\n*********************************************************\n\nTarget: \u001b[1mvulnweb.com\u001b[0m\n\n\u001b[1m[!]\u001b[0m Oops... Looks like Google has temporarily blocked your IP address.", + "time": "2024-09-03T21:26:47.795Z" + } +}, +{ + "model": "startScan.command", + "pk": 26, + "fields": { + "scan_history": 1, + "activity": null, + "command": "GooFuzz -t vulnweb.com -d 3 -p 5 -w /wp-content/,/wp-includes/ -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/gofuzz.txt", + "return_code": 1, + "output": "\n*********************************************************\n* GooFuzz 1.2.5 - The Power of Google Dorks\t\t*\n*********************************************************\n\nTarget: \u001b[1mvulnweb.com\u001b[0m\n\n\u001b[1m[!]\u001b[0m Oops... Looks like Google has temporarily blocked your IP address.", + "time": "2024-09-03T21:26:48.862Z" + } +}, +{ + "model": "startScan.command", + "pk": 27, + "fields": { + "scan_history": 1, + "activity": null, + "command": "GooFuzz -t vulnweb.com -d 3 -p 5 -w PHP Parse error,PHP Warning,PHP Error -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/gofuzz.txt", + "return_code": 1, + "output": "\n*********************************************************\n* GooFuzz 1.2.5 - The Power of Google Dorks\t\t*\n*********************************************************\n\nTarget: \u001b[1mvulnweb.com\u001b[0m\n\n\u001b[1m[!]\u001b[0m Oops... Looks like Google has temporarily blocked your IP address.", + "time": "2024-09-03T21:26:49.546Z" + } +}, +{ + "model": "startScan.command", + "pk": 28, + "fields": { + "scan_history": 1, + "activity": null, + "command": "GooFuzz -t vulnweb.com -d 3 -p 7 -e doc,docx,odt,pdf,rtf,sxw,psw,ppt,pptx,pps,csv -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/gofuzz.txt", + "return_code": 1, + "output": "\n*********************************************************\n* GooFuzz 1.2.5 - The Power of Google Dorks\t\t*\n*********************************************************\n\nTarget: \u001b[1mvulnweb.com\u001b[0m\n\n\u001b[1m[!]\u001b[0m Oops... Looks like Google has temporarily blocked your IP address.", + "time": "2024-09-03T21:26:50.666Z" + } +}, +{ + "model": "startScan.command", + "pk": 29, + "fields": { + "scan_history": 1, + "activity": null, + "command": "GooFuzz -t vulnweb.com -d 3 -p 1 -e sql,db,dbf,mdb -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/gofuzz.txt", + "return_code": 1, + "output": "\n*********************************************************\n* GooFuzz 1.2.5 - The Power of Google Dorks\t\t*\n*********************************************************\n\nTarget: \u001b[1mvulnweb.com\u001b[0m\n\n\u001b[1m[!]\u001b[0m Oops... Looks like Google has temporarily blocked your IP address.", + "time": "2024-09-03T21:26:51.510Z" + } +}, +{ + "model": "startScan.command", + "pk": 30, + "fields": { + "scan_history": 1, + "activity": null, + "command": "GooFuzz -t vulnweb.com -d 3 -p 1 -e git -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/gofuzz.txt", + "return_code": 1, + "output": "\n*********************************************************\n* GooFuzz 1.2.5 - The Power of Google Dorks\t\t*\n*********************************************************\n\nTarget: \u001b[1mvulnweb.com\u001b[0m\n\n\u001b[1m[!]\u001b[0m Oops... Looks like Google has temporarily blocked your IP address.", + "time": "2024-09-03T21:26:52.449Z" + } +}, +{ + "model": "startScan.command", + "pk": 31, + "fields": { + "scan_history": 1, + "activity": 1, + "command": "oneforall --target vulnweb.com run && cut -d',' -f6 /home/rengine/tools/.github/OneForAll/results/vulnweb.com.csv | tail -n +2 > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/subdomains_oneforall.txt && rm -rf /home/rengine/tools/.github/OneForAll/results/vulnweb.com.csv", + "return_code": 0, + "output": "\n\nOneForAll is a powerful subdomain integration tool\u001b[01;33m\n___ _ _\n___ ___ ___| _|___ ___ ___| | | \u001b[01;37m{\u001b[1;31mv0.4.5 #dev\u001b[01;37m}\u001b[01;32m\n| . | | -_| _| . | _| .'| | | \u001b[01;34m\n|___|_|_|___|_| |___|_| |__,|_|_| \u001b[01;37mgit.io/fjHT1\n\n\u001b[1;31mOneForAll is under development, please update before each use!\u001b[0m\n\n[*] Starting OneForAll @ 2024-09-03 21:27:05\n\n21:27:05,902 [INFOR] utils:532 - Checking dependent environment\n21:27:05,902 [INFOR] utils:544 - Checking network environment\n21:27:06,465 [INFOR] utils:555 - Checking for the latest version\n21:27:06,730 [INFOR] utils:579 - The current version v0.4.5 is already the latest version\n21:27:06,731 [INFOR] oneforall:241 - Start running OneForAll\n21:27:06,732 [INFOR] oneforall:246 - Got 1 domains\n21:27:06,763 [INFOR] wildcard:108 - Detecting vulnweb.com use wildcard dns record or not\n21:27:06,881 [ALERT] wildcard:47 - 370500ba.vulnweb.com resolve to: 370500ba.vulnweb.com. IP: {'44.228.249.3'} TTL: 3600\n21:27:06,907 [ALERT] wildcard:47 - 62a5f2cb.vulnweb.com resolve to: 62a5f2cb.vulnweb.com. IP: {'44.228.249.3'} TTL: 3600\n21:27:06,933 [ALERT] wildcard:47 - ee447f63.vulnweb.com resolve to: ee447f63.vulnweb.com. IP: {'44.228.249.3'} TTL: 3600\n21:27:06,934 [INFOR] utils:700 - Attempting to request http://370500ba.vulnweb.com\n21:27:07,318 [ALERT] utils:708 - Error request http://370500ba.vulnweb.com\n21:27:07,318 [INFOR] utils:700 - Attempting to request http://62a5f2cb.vulnweb.com\n21:27:07,721 [ALERT] utils:708 - Error request http://62a5f2cb.vulnweb.com\n21:27:07,722 [INFOR] utils:700 - Attempting to request http://ee447f63.vulnweb.com\n21:27:08,135 [ALERT] utils:708 - Error request http://ee447f63.vulnweb.com\n21:27:08,135 [ALERT] wildcard:121 - The domain vulnweb.com enables wildcard\n21:27:08,135 [INFOR] collect:44 - Start collecting subdomains of vulnweb.com\n21:27:08,208 [INFOR] module:63 - AXFRCheck module took 0.0 seconds found 0 subdomains\n21:27:08,218 [INFOR] module:63 - NSECCheck module took 0.0 seconds found 0 subdomains\n21:27:08,232 [INFOR] module:63 - QueryMX module took 0.0 seconds found 0 subdomains\n21:27:08,232 [INFOR] module:63 - QuerySOA module took 0.0 seconds found 0 subdomains\n21:27:08,233 [INFOR] module:63 - QueryNS module took 0.0 seconds found 0 subdomains\n21:27:08,262 [INFOR] module:63 - QuerySPF module took 0.0 seconds found 0 subdomains\n21:27:08,267 [INFOR] module:63 - QueryTXT module took 0.1 seconds found 0 subdomains\n21:27:08,418 [ERROR] module:129 - HTTPSConnectionPool(host='riddler.io', port=443): Max retries exceeded with url: /search?q=pld%3Avulnweb.com (Caused by NewConnectionError(': Failed to establish a new connection: [Errno -5] No address associated with hostname'))\n21:27:08,419 [INFOR] module:63 - RiddlerQuery module took 0.2 seconds found 0 subdomains\n21:27:08,448 [INFOR] module:63 - CrtshQuery module took 0.3 seconds found 0 subdomains\n21:27:08,537 [ALERT] utils:273 - GET https://www.dnsgrep.cn/subdomain/vulnweb.com 403 - Forbidden 17723\n21:27:08,538 [INFOR] module:63 - DnsgrepQuery module took 0.3 seconds found 0 subdomains\n21:27:08,555 [INFOR] module:63 - SiteDossierQuery module took 0.4 seconds found 4 subdomains\n21:27:08,556 [INFOR] module:63 - CertSpotterQuery module took 0.4 seconds found 0 subdomains\n21:27:08,564 [INFOR] module:63 - ThreatMinerQuery module took 0.4 seconds found 0 subdomains\n21:27:08,646 [INFOR] module:63 - UrlscanQuery module took 0.4 seconds found 8 subdomains\n21:27:08,648 [ALERT] utils:273 - GET https://www.virustotal.com/ui/domains/vulnweb.com/subdomains?limit=40&cursor= 429 - Too Many Requests 181\n21:27:08,648 [ALERT] utils:282 - {'error': {'code': 'RecaptchaRequiredError', 'message': 'Please re-send request with a valid reCAPTCHA response in the \"x-recaptcha-response\" header'}}\n21:27:08,649 [INFOR] module:63 - CSPCheck module took 0.5 seconds found 0 subdomains\n21:27:08,650 [INFOR] module:63 - VirusTotalQuery module took 0.4 seconds found 0 subdomains\n21:27:08,726 [ALERT] utils:273 - GET https://fullhunt.io/api/v1/domain/vulnweb.com/subdomains 401 - UNAUTHORIZED 50\n21:27:08,726 [ALERT] utils:282 - {'message': 'Unauthorized access', 'success': False}\n21:27:08,728 [INFOR] module:63 - FullHuntAPIQuery module took 0.5 seconds found 0 subdomains\n21:27:08,894 [INFOR] module:63 - AnubisQuery module took 0.7 seconds found 12 subdomains\n21:27:08,895 [ALERT] utils:273 - GET https://transparencyreport.google.com/transparencyreport/api/v3/httpsreport/ct/certsearch?include_expired=true&include_subdomains=true&domain=vulnweb.com 404 - Not Found 1611\n21:27:08,897 [INFOR] module:63 - GoogleQuery module took 0.7 seconds found 0 subdomains\n21:27:08,953 [INFOR] module:63 - HackerTargetQuery module took 0.8 seconds found 22 subdomains\n21:27:09,137 [INFOR] module:63 - AlienVaultQuery module took 0.9 seconds found 19 subdomains\n21:27:09,276 [ALERT] utils:273 - GET https://searchdns.netcraft.com/?restriction=site+contains&position=limited&host=%2A.vulnweb.com&from=1 403 - Forbidden 17830\n21:27:09,277 [INFOR] module:63 - NetCraftQuery module took 1.1 seconds found 0 subdomains\n21:27:09,399 [INFOR] module:63 - AskSearch module took 1.2 seconds found 0 subdomains\n21:27:09,427 [INFOR] module:63 - DNSDumpsterQuery module took 1.2 seconds found 11 subdomains\n21:27:09,572 [ALERT] utils:273 - GET https://ce.baidu.com/index/getRelatedSites?site_address=vulnweb.com 500 - Internal Server Error 0\n21:27:09,573 [INFOR] module:63 - CeBaiduQuery module took 1.4 seconds found 0 subdomains\n21:27:09,666 [ALERT] utils:273 - POST https://www.dnsscan.cn/dns.html?keywords=vulnweb.com&page=1 404 - Not Found 206\n21:27:09,668 [INFOR] module:63 - QianXunQuery module took 1.5 seconds found 0 subdomains\n21:27:09,897 [INFOR] module:63 - BingSearch module took 1.7 seconds found 0 subdomains\n21:27:09,985 [INFOR] module:63 - ChinazQuery module took 1.8 seconds found 2 subdomains\n21:27:10,151 [INFOR] module:63 - MySSLQuery module took 2.0 seconds found 0 subdomains\n21:27:10,355 [INFOR] module:63 - RapidDNSQuery module took 2.2 seconds found 281 subdomains\n21:27:10,428 [INFOR] module:63 - SogouSearch module took 2.2 seconds found 5 subdomains\n21:27:10,760 [INFOR] module:63 - IP138Query module took 2.6 seconds found 16 subdomains\n21:27:10,878 [INFOR] module:63 - GiteeSearch module took 2.7 seconds found 0 subdomains\n21:27:11,603 [ERROR] module:129 - HTTPSConnectionPool(host='api.sublist3r.com', port=443): Max retries exceeded with url: /search.php?domain=vulnweb.com (Caused by SSLError(SSLError(1, '[SSL: TLSV1_ALERT_INTERNAL_ERROR] tlsv1 alert internal error (_ssl.c:997)')))\n21:27:11,604 [INFOR] module:63 - Sublist3rQuery module took 3.4 seconds found 0 subdomains\n21:27:11,627 [INFOR] module:63 - YahooSearch module took 3.4 seconds found 0 subdomains\n21:27:11,922 [INFOR] module:63 - RobtexQuery module took 3.7 seconds found 15 subdomains\n21:27:12,070 [INFOR] module:63 - BaiduSearch module took 3.9 seconds found 0 subdomains\n21:27:12,271 [INFOR] module:63 - WzSearch module took 4.1 seconds found 0 subdomains\n21:27:12,901 [INFOR] module:63 - SoSearch module took 4.7 seconds found 1 subdomains\n21:27:12,985 [ALERT] utils:273 - GET https://www.google.com/sorry/index?continue=https://www.google.com/search%3Fq%3Dsite%253A.vulnweb.com%26start%3D1%26num%3D50%26filter%3D0%26btnG%3DSearch%26gbv%3D1%26hl%3Den&hl=en&q=EgRdFhUyGLD-3bYGIjAXsFmbSQzD5Zn_V7wNiBiIu7JIhrukOC5iwq21AZzzrLaQgQb6Du1W_V-vgWEc488yAXJaAUM 429 - Too Many Requests 3385\n21:27:12,987 [INFOR] module:63 - GoogleSearch module took 4.8 seconds found 0 subdomains\n21:27:14,309 [INFOR] module:63 - YandexSearch module took 6.1 seconds found 0 subdomains\n21:27:18,296 [INFOR] module:63 - CertInfo module took 10.1 seconds found 0 subdomains\n21:27:34,346 [INFOR] module:63 - SitemapCheck module took 26.2 seconds found 0 subdomains\n21:27:34,702 [INFOR] module:63 - RobotsCheck module took 26.5 seconds found 0 subdomains\n21:27:34,710 [INFOR] module:63 - CrossDomainCheck module took 26.5 seconds found 0 subdomains\n21:27:34,906 [INFOR] module:63 - BruteSRV module took 0.2 seconds found 0 subdomains\n21:27:34,910 [INFOR] brute:460 - Start running Brute module\n21:27:34,911 [INFOR] brute:410 - Blasting vulnweb.com\n21:27:34,911 [INFOR] utils:174 - /home/rengine/tools/.github/OneForAll/results/temp does not exist, directory will be created\n21:27:34,911 [INFOR] brute:119 - Querying NS records of vulnweb.com\n21:27:34,936 [INFOR] brute:129 - vulnweb.com's authoritative name server is ['ns1.eurodns.com.', 'ns2.eurodns.com.', 'ns3.eurodns.com.', 'ns4.eurodns.com.']\n21:27:34,936 [INFOR] brute:99 - Querying A record from authoritative name server: ['ns1.eurodns.com.', 'ns2.eurodns.com.', 'ns3.eurodns.com.', 'ns4.eurodns.com.']\n21:27:35,027 [INFOR] brute:114 - Authoritative name server A record result: ['199.167.66.107', '104.37.178.107', '199.167.66.108', '104.37.178.108']\n21:27:35,027 [INFOR] wildcard:159 - Collecting wildcard dns record for vulnweb.com\n21:27:35,028 [INFOR] wildcard:128 - Query e5822a54.vulnweb.com 's wildcard dns record in authoritative name server\n21:27:35,042 [INFOR] wildcard:153 - e5822a54.vulnweb.com results on authoritative name server: e5822a54.vulnweb.com. IP: {'44.228.249.3'} TTL: 3600\n21:27:35,043 [INFOR] wildcard:128 - Query 2d14ad45.vulnweb.com 's wildcard dns record in authoritative name server\n21:27:35,058 [INFOR] wildcard:153 - 2d14ad45.vulnweb.com results on authoritative name server: 2d14ad45.vulnweb.com. IP: {'44.228.249.3'} TTL: 3600\n21:27:35,059 [INFOR] brute:347 - Generating dictionary for vulnweb.com\n21:27:35,131 [ALERT] utils:695 - Please check whether sol.vulnweb.com is correct or not\n21:27:35,133 [INFOR] brute:365 - Dictionary size: 95247\n21:27:35,165 [INFOR] brute:441 - Running massdns to brute subdomains\n21:27:39,270 [INFOR] brute:197 - Counting IP cname appear times\n21:27:39,510 [INFOR] brute:238 - Processing result\n21:27:40,095 [ALERT] brute:451 - Brute module takes 5.2 seconds, found 4 subdomains of vulnweb.com\n21:27:40,109 [INFOR] brute:489 - Finished Brute module to brute vulnweb.com\n21:27:40,125 [INFOR] resolve:143 - Start resolving subdomains of vulnweb.com\n21:27:40,144 [INFOR] resolve:166 - Running massdns to resolve subdomains\n21:27:42,189 [INFOR] resolve:104 - Processing resolved results\n21:27:42,192 [INFOR] resolve:172 - Finished resolve subdomains of vulnweb.com\n21:27:42,192 [INFOR] resolve:61 - Saving resolved results\n21:27:42,236 [INFOR] request:258 - Start requesting subdomains of vulnweb.com\n21:27:42,236 [INFOR] request:39 - Port range:[80, 443]\n21:27:42,237 [INFOR] request:56 - Generating request urls\n21:27:42,237 [INFOR] request:217 - Requesting urls in bulk\n\n0it [00:00, ?it/s]\nRequest Progress: 88%|███████████████████████▋ | 7/8 [00:00<00:00, 70.00it/s]\nRequest Progress: 9it [00:13, 1.46s/it]\n21:27:55,384 [INFOR] request:264 - Found that vulnweb.com has 3 alive subdomains\n21:27:55,384 [INFOR] finder:23 - Start Finder module\n\n0it [00:00, ?it/s]21:27:55,387 [INFOR] request:217 - Requesting urls in bulk\n\nRequest Progress: 1it [00:00, 4165.15it/s]\n21:27:55,389 [INFOR] module:63 - Finder module took 0.0 seconds found 1 subdomains\n21:27:55,389 [INFOR] resolve:143 - Start resolving subdomains of vulnweb.com\n21:27:55,409 [INFOR] resolve:166 - Running massdns to resolve subdomains\n21:27:55,436 [INFOR] resolve:104 - Processing resolved results\n\n0it [00:00, ?it/s]21:27:55,436 [INFOR] resolve:172 - Finished resolve subdomains of vulnweb.com\n21:27:55,437 [INFOR] request:258 - Start requesting subdomains of vulnweb.com\n21:27:55,437 [INFOR] request:39 - Port range:[80, 443]\n21:27:55,437 [INFOR] request:56 - Generating request urls\n21:27:55,437 [INFOR] request:217 - Requesting urls in bulk\n\nRequest Progress: 3it [00:13, 4.36s/it]\n21:28:08,520 [INFOR] request:264 - Found that vulnweb.com has 4 alive subdomains\n21:28:08,520 [INFOR] altdns:203 - Start altdns module\n\n0it [00:00, ?it/s]21:28:08,529 [INFOR] altdns:210 - The altdns module generated 0 new subdomains\n21:28:08,529 [INFOR] resolve:143 - Start resolving subdomains of vulnweb.com\n21:28:08,529 [INFOR] request:258 - Start requesting subdomains of vulnweb.com\n21:28:08,530 [INFOR] request:39 - Port range:[80, 443]\n21:28:08,530 [INFOR] request:56 - Generating request urls\n21:28:08,530 [INFOR] request:217 - Requesting urls in bulk\n\nRequest Progress: 1it [00:00, 391.52it/s]\n21:28:08,534 [INFOR] request:264 - Found that vulnweb.com has 4 alive subdomains\n21:28:08,877 [ALERT] export:66 - The subdomain result for vulnweb.com: /home/rengine/tools/.github/OneForAll/results/vulnweb.com.csv\n21:28:08,878 [INFOR] oneforall:255 - Finished OneForAll", + "time": "2024-09-03T21:27:04.860Z" + } +}, +{ + "model": "startScan.command", + "pk": 32, + "fields": { + "scan_history": 1, + "activity": 1, + "command": "netlas search -d domain -i domain domain:\"*.vulnweb.com\" -f json | grep -oE '([a-zA-Z0-9]([-a-zA-Z0-9]*[a-zA-Z0-9])?\\.)+vulnweb.com' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/subdomains_netlas.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:28:08.941Z" + } +}, +{ + "model": "startScan.command", + "pk": 33, + "fields": { + "scan_history": 1, + "activity": 1, + "command": "cat /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/subdomains_*.txt > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/#1_subdomain_discovery.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:28:09.587Z" + } +}, +{ + "model": "startScan.command", + "pk": 34, + "fields": { + "scan_history": 1, + "activity": 1, + "command": "sort -u /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/#1_subdomain_discovery.txt -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/#1_subdomain_discovery.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:28:09.592Z" + } +}, +{ + "model": "startScan.command", + "pk": 35, + "fields": { + "scan_history": 1, + "activity": 3, + "command": "httpx -cl -ct -rt -location -td -websocket -cname -asn -cdn -probe -random-agent -t 30 -json -l /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/httpx_input.txt -silent", + "return_code": 0, + "output": "{\"timestamp\":\"2024-09-03T21:28:10.479003156Z\",\"url\":\"http://localhost.vulnweb.com\",\"input\":\"localhost.vulnweb.com\",\"error\":\"connection refused\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.770588207Z\",\"url\":\"http://edu-rost.ruwww.vulnweb.com\",\"input\":\"edu-rost.ruwww.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.770698494Z\",\"url\":\"http://test.vulnweb.com\",\"input\":\"test.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.772639921Z\",\"url\":\"http://tetphp.vulnweb.com\",\"input\":\"tetphp.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.773795126Z\",\"url\":\"http://blogger.com.vulnweb.com\",\"input\":\"blogger.com.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.774028924Z\",\"url\":\"http://testaps.vulnweb.com\",\"input\":\"testaps.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.774314729Z\",\"url\":\"http://test.php.vulnweb.com\",\"input\":\"test.php.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.778213954Z\",\"url\":\"http://u003erest.vulnweb.com\",\"input\":\"u003erest.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.779635989Z\",\"url\":\"http://u003etestasp.vulnweb.com\",\"input\":\"u003etestasp.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.779901386Z\",\"url\":\"http://5burl-3dhttp-3a-2f-2fwww.vulnweb.com\",\"input\":\"5burl-3dhttp-3a-2f-2fwww.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.782169265Z\",\"url\":\"http://testap.vulnweb.com\",\"input\":\"testap.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.783045016Z\",\"url\":\"http://testpphp.vulnweb.com\",\"input\":\"testpphp.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.783385584Z\",\"port\":\"80\",\"url\":\"http://testphp.vulnweb.com\",\"input\":\"testphp.vulnweb.com\",\"title\":\"Home of Acunetix Art\",\"scheme\":\"http\",\"webserver\":\"nginx/1.19.0\",\"content_type\":\"text/html\",\"method\":\"GET\",\"host\":\"44.228.249.3\",\"path\":\"/\",\"time\":\"345.110429ms\",\"a\":[\"44.228.249.3\"],\"tech\":[\"DreamWeaver\",\"Nginx:1.19.0\",\"PHP:5.6.40\",\"Ubuntu\"],\"words\":514,\"lines\":110,\"status_code\":200,\"content_length\":4958,\"failed\":false,\"knowledgebase\":{\"PageType\":\"nonerror\",\"pHash\":0}}\n{\"timestamp\":\"2024-09-03T21:28:20.783891311Z\",\"port\":\"80\",\"url\":\"http://testhtml5.vulnweb.com\",\"input\":\"testhtml5.vulnweb.com\",\"title\":\"SecurityTweets - HTML5 test website for Acunetix Web Vulnerability Scanner\",\"scheme\":\"http\",\"webserver\":\"nginx/1.19.0\",\"content_type\":\"text/html\",\"method\":\"GET\",\"host\":\"44.228.249.3\",\"path\":\"/\",\"time\":\"342.381867ms\",\"a\":[\"44.228.249.3\"],\"tech\":[\"AngularJS\",\"Nginx:1.19.0\"],\"words\":1483,\"lines\":164,\"status_code\":200,\"content_length\":6940,\"failed\":false,\"knowledgebase\":{\"PageType\":\"nonerror\",\"pHash\":0}}\n{\"timestamp\":\"2024-09-03T21:28:20.79672208Z\",\"url\":\"http://testapsnet.vulnweb.com\",\"input\":\"testapsnet.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.797297117Z\",\"url\":\"http://antivirus1.vulnweb.com\",\"input\":\"antivirus1.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.797526076Z\",\"url\":\"http://testaspx.vulnweb.com\",\"input\":\"testaspx.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.798569381Z\",\"port\":\"80\",\"url\":\"http://testasp.vulnweb.com\",\"input\":\"testasp.vulnweb.com\",\"title\":\"acuforum forums\",\"scheme\":\"http\",\"webserver\":\"Microsoft-IIS/8.5\",\"content_type\":\"text/html\",\"method\":\"GET\",\"host\":\"44.238.29.244\",\"path\":\"/\",\"time\":\"361.175397ms\",\"a\":[\"44.238.29.244\"],\"tech\":[\"DreamWeaver\",\"IIS:8.5\",\"Microsoft ASP.NET\",\"Windows Server\"],\"words\":328,\"lines\":46,\"status_code\":200,\"content_length\":3537,\"failed\":false,\"knowledgebase\":{\"PageType\":\"nonerror\",\"pHash\":0}}\n{\"timestamp\":\"2024-09-03T21:28:20.801057773Z\",\"url\":\"http://httptestaspnet.vulnweb.com\",\"input\":\"httptestaspnet.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.805003305Z\",\"url\":\"http://ttestphp.vulnweb.com\",\"input\":\"ttestphp.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.805102451Z\",\"url\":\"http://2f-2fwww.vulnweb.com\",\"input\":\"2f-2fwww.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.806546145Z\",\"url\":\"http://odincovo.vulnweb.com\",\"input\":\"odincovo.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.808176239Z\",\"url\":\"http://2fwww.vulnweb.com\",\"input\":\"2fwww.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.808798495Z\",\"url\":\"http://7ctestasp.vulnweb.com\",\"input\":\"7ctestasp.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.80931386Z\",\"url\":\"http://estphp.vulnweb.com\",\"input\":\"estphp.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.812391837Z\",\"url\":\"http://restasp.vulnweb.com\",\"input\":\"restasp.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.814600876Z\",\"url\":\"http://2ftestasp.vulnweb.com\",\"input\":\"2ftestasp.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.822120875Z\",\"url\":\"http://virus.vulnweb.com\",\"input\":\"virus.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.83703144Z\",\"url\":\"http://edu-rost.rutestasp.vulnweb.com\",\"input\":\"edu-rost.rutestasp.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:20.856906486Z\",\"port\":\"80\",\"url\":\"http://rest.vulnweb.com\",\"input\":\"rest.vulnweb.com\",\"title\":\"Acunetix Vulnerable REST API\",\"scheme\":\"http\",\"webserver\":\"Apache/2.4.25 (Debian)\",\"content_type\":\"text/html\",\"method\":\"GET\",\"host\":\"35.81.188.86\",\"path\":\"/\",\"time\":\"422.205408ms\",\"a\":[\"35.81.188.86\"],\"tech\":[\"Apache HTTP Server:2.4.25\",\"Debian\",\"PHP:7.1.26\"],\"words\":1397,\"lines\":138,\"status_code\":200,\"content_length\":3555,\"failed\":false,\"knowledgebase\":{\"PageType\":\"nonerror\",\"pHash\":0}}\n{\"timestamp\":\"2024-09-03T21:28:20.992940938Z\",\"port\":\"80\",\"url\":\"http://testaspnet.vulnweb.com\",\"input\":\"testaspnet.vulnweb.com\",\"title\":\"acublog news\",\"scheme\":\"http\",\"webserver\":\"Microsoft-IIS/8.5\",\"content_type\":\"text/html\",\"method\":\"GET\",\"host\":\"44.238.29.244\",\"path\":\"/\",\"time\":\"535.234499ms\",\"a\":[\"44.238.29.244\"],\"tech\":[\"IIS:8.5\",\"Microsoft ASP.NET:2.0.50727\",\"Microsoft Visual Studio\",\"Windows Server\"],\"words\":774,\"lines\":89,\"status_code\":200,\"content_length\":14082,\"failed\":false,\"knowledgebase\":{\"PageType\":\"other\",\"pHash\":0}}\n{\"timestamp\":\"2024-09-03T21:28:21.214887749Z\",\"url\":\"http://www.testphp.vulnweb.com\",\"input\":\"www.testphp.vulnweb.com\",\"error\":\"no address found for host\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:21.224643226Z\",\"url\":\"http://www.testasp.vulnweb.com\",\"input\":\"www.testasp.vulnweb.com\",\"error\":\"no address found for host\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:31.115507511Z\",\"url\":\"http://www.test.php.vulnweb.com\",\"input\":\"www.test.php.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:31.121278393Z\",\"url\":\"http://www.virus.vulnweb.com\",\"input\":\"www.virus.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n{\"timestamp\":\"2024-09-03T21:28:31.13080372Z\",\"port\":\"80\",\"url\":\"http://vulnweb.com\",\"input\":\"vulnweb.com\",\"title\":\"Acunetix Web Vulnerability Scanner - Test websites\",\"scheme\":\"http\",\"webserver\":\"nginx/1.19.0\",\"content_type\":\"text/html\",\"method\":\"GET\",\"host\":\"44.228.249.3\",\"path\":\"/\",\"time\":\"352.853777ms\",\"a\":[\"44.228.249.3\"],\"tech\":[\"Nginx:1.19.0\"],\"words\":482,\"lines\":74,\"status_code\":200,\"content_length\":4018,\"failed\":false,\"knowledgebase\":{\"PageType\":\"nonerror\",\"pHash\":0}}\n{\"timestamp\":\"2024-09-03T21:28:31.13762029Z\",\"port\":\"80\",\"url\":\"http://www.vulnweb.com\",\"input\":\"www.vulnweb.com\",\"title\":\"Acunetix Web Vulnerability Scanner - Test websites\",\"scheme\":\"http\",\"webserver\":\"nginx/1.19.0\",\"content_type\":\"text/html\",\"method\":\"GET\",\"host\":\"44.228.249.3\",\"path\":\"/\",\"time\":\"352.635869ms\",\"a\":[\"44.228.249.3\"],\"tech\":[\"Nginx:1.19.0\"],\"words\":482,\"lines\":74,\"status_code\":200,\"content_length\":4018,\"failed\":false,\"knowledgebase\":{\"PageType\":\"nonerror\",\"pHash\":0}}\n{\"timestamp\":\"2024-09-03T21:28:31.142479444Z\",\"url\":\"http://viruswall.vulnweb.com\",\"input\":\"viruswall.vulnweb.com\",\"error\":\"EOF\",\"words\":0,\"lines\":0,\"failed\":true}\n", + "time": "2024-09-03T21:28:09.749Z" + } +}, +{ + "model": "startScan.command", + "pk": 36, + "fields": { + "scan_history": null, + "activity": null, + "command": "geoiplookup 44.238.29.244", + "return_code": null, + "output": null, + "time": "2024-09-03T21:28:20.894Z" + } +}, +{ + "model": "startScan.command", + "pk": 37, + "fields": { + "scan_history": null, + "activity": null, + "command": "geoiplookup 35.81.188.86", + "return_code": null, + "output": null, + "time": "2024-09-03T21:28:20.945Z" + } +}, +{ + "model": "startScan.command", + "pk": 38, + "fields": { + "scan_history": 1, + "activity": 3, + "command": "rm /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/httpx_input.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:28:31.181Z" + } +}, +{ + "model": "startScan.command", + "pk": 39, + "fields": { + "scan_history": 1, + "activity": 4, + "command": "naabu -json -exclude-cdn -list /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/input_subdomains_port_scan.txt -top-ports 100 -c 30 -rate 150 -timeout 5000 -silent", + "return_code": 0, + "output": "\n__\n___ ___ ___ _/ / __ __\n/ _ \\/ _ \\/ _ \\/ _ \\/ // /\n/_//_/\\_,_/\\_,_/_.__/\\_,_/\n\nprojectdiscovery.io\n\n[INF] Current naabu version 2.3.0 (outdated)\n{\"host\":\"rest.vulnweb.com\",\"ip\":\"35.81.188.86\",\"timestamp\":\"2024-09-03T21:28:34.852197905Z\",\"port\":8080,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"testasp.vulnweb.com\",\"ip\":\"44.238.29.244\",\"timestamp\":\"2024-09-03T21:28:38.833012077Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"testaspnet.vulnweb.com\",\"ip\":\"44.238.29.244\",\"timestamp\":\"2024-09-03T21:28:38.833058584Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"rest.vulnweb.com\",\"ip\":\"35.81.188.86\",\"timestamp\":\"2024-09-03T21:28:39.816853087Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"2f-2fwww.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846295137Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"2ftestasp.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846355841Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"5burl-3dhttp-3a-2f-2fwww.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846373614Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"7ctestasp.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846384815Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"antivirus1.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846391618Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"blogger.com.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846396848Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"edu-rost.rutestasp.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846400745Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"edu-rost.ruwww.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846405213Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"estphp.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846411675Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"httptestaspnet.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846415963Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"odincovo.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846419801Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"restasp.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846423748Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"test.php.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846428026Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"test.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846438285Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"testap.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846442483Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"testaps.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.84644639Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"testapsnet.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846450137Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"testaspx.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846456559Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"testhtml5.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846461439Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"testphp.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846469183Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"testpphp.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846474944Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"tetphp.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846479302Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"ttestphp.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.84648337Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"u003erest.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846487688Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"u003etestasp.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846492607Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"virus.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.8464998Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"viruswall.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846504209Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"www.test.php.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846508697Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n{\"host\":\"www.virus.vulnweb.com\",\"ip\":\"44.228.249.3\",\"timestamp\":\"2024-09-03T21:28:39.846512795Z\",\"port\":80,\"protocol\":\"tcp\",\"tls\":false}\n", + "time": "2024-09-03T21:28:31.494Z" + } +}, +{ + "model": "startScan.command", + "pk": 40, + "fields": { + "scan_history": 1, + "activity": null, + "command": "httpx -cl -ct -rt -location -td -websocket -cname -asn -cdn -probe -random-agent -t 1 -json -u rest.vulnweb.com:8080 -silent", + "return_code": 0, + "output": "{\"timestamp\":\"2024-09-03T21:28:36.659376329Z\",\"port\":\"8080\",\"url\":\"http://rest.vulnweb.com:8080\",\"input\":\"rest.vulnweb.com:8080\",\"scheme\":\"http\",\"webserver\":\"Apache/2.4.25 (Debian)\",\"content_type\":\"application/json\",\"method\":\"GET\",\"host\":\"35.81.188.86\",\"path\":\"/\",\"time\":\"353.948066ms\",\"a\":[\"35.81.188.86\"],\"tech\":[\"Apache HTTP Server:2.4.25\",\"Debian\",\"PHP:7.1.26\"],\"words\":4,\"lines\":1,\"status_code\":200,\"content_length\":36,\"failed\":false,\"knowledgebase\":{\"PageType\":\"error\",\"pHash\":0}}\n", + "time": "2024-09-03T21:28:34.882Z" + } +}, +{ + "model": "startScan.command", + "pk": 41, + "fields": { + "scan_history": 1, + "activity": null, + "command": "rm /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/httpx_input.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:28:36.698Z" + } +}, +{ + "model": "startScan.command", + "pk": 42, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://rest.vulnweb.com\" | gospider --js -d 2 --sitemap --robots -w -r -a -t 30 --no-redirect | grep -Eo 'https?://rest\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_gospider_rest.vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:11.700Z" + } +}, +{ + "model": "startScan.command", + "pk": 43, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://rest.vulnweb.com\" | hakrawler -subs -u -t 30 -dr | grep -Eo 'https?://rest\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_hakrawler_rest.vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:11.724Z" + } +}, +{ + "model": "startScan.command", + "pk": 44, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://rest.vulnweb.com\" | waybackurls | grep -Eo 'https?://rest\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_waybackurls_rest.vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:11.740Z" + } +}, +{ + "model": "startScan.command", + "pk": 45, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://rest.vulnweb.com\" | katana -silent -jc -kf all -d 3 -fs rdn -c 30 -dr | grep -Eo 'https?://rest\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_katana_rest.vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:11.756Z" + } +}, +{ + "model": "startScan.command", + "pk": 46, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://rest.vulnweb.com\" | gau --threads 30 | grep -Eo 'https?://rest\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_gau_rest.vulnweb.com.txt", + "return_code": 1, + "output": "", + "time": "2024-09-03T21:29:11.771Z" + } +}, +{ + "model": "startScan.command", + "pk": 47, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://testasp.vulnweb.com\" | gospider --js -d 2 --sitemap --robots -w -r -a -t 30 --no-redirect | grep -Eo 'https?://testasp\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_gospider_testasp.vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:11.789Z" + } +}, +{ + "model": "startScan.command", + "pk": 48, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://testasp.vulnweb.com\" | hakrawler -subs -u -t 30 -dr | grep -Eo 'https?://testasp\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_hakrawler_testasp.vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:11.807Z" + } +}, +{ + "model": "startScan.command", + "pk": 49, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://testasp.vulnweb.com\" | waybackurls | grep -Eo 'https?://testasp\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_waybackurls_testasp.vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:11.822Z" + } +}, +{ + "model": "startScan.command", + "pk": 50, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://testasp.vulnweb.com\" | katana -silent -jc -kf all -d 3 -fs rdn -c 30 -dr | grep -Eo 'https?://testasp\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_katana_testasp.vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:11.836Z" + } +}, +{ + "model": "startScan.command", + "pk": 51, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://testasp.vulnweb.com\" | gau --threads 30 | grep -Eo 'https?://testasp\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_gau_testasp.vulnweb.com.txt", + "return_code": 1, + "output": "", + "time": "2024-09-03T21:29:11.849Z" + } +}, +{ + "model": "startScan.command", + "pk": 52, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://testaspnet.vulnweb.com\" | gospider --js -d 2 --sitemap --robots -w -r -a -t 30 --no-redirect | grep -Eo 'https?://testaspnet\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_gospider_testaspnet.vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:11.863Z" + } +}, +{ + "model": "startScan.command", + "pk": 53, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://testaspnet.vulnweb.com\" | hakrawler -subs -u -t 30 -dr | grep -Eo 'https?://testaspnet\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_hakrawler_testaspnet.vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:11.878Z" + } +}, +{ + "model": "startScan.command", + "pk": 54, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://testaspnet.vulnweb.com\" | waybackurls | grep -Eo 'https?://testaspnet\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_waybackurls_testaspnet.vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:11.892Z" + } +}, +{ + "model": "startScan.command", + "pk": 55, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://testaspnet.vulnweb.com\" | katana -silent -jc -kf all -d 3 -fs rdn -c 30 -dr | grep -Eo 'https?://testaspnet\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_katana_testaspnet.vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:11.906Z" + } +}, +{ + "model": "startScan.command", + "pk": 56, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://testaspnet.vulnweb.com\" | gau --threads 30 | grep -Eo 'https?://testaspnet\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_gau_testaspnet.vulnweb.com.txt", + "return_code": 1, + "output": "", + "time": "2024-09-03T21:29:11.921Z" + } +}, +{ + "model": "startScan.command", + "pk": 57, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://testhtml5.vulnweb.com\" | gospider --js -d 2 --sitemap --robots -w -r -a -t 30 --no-redirect | grep -Eo 'https?://testhtml5\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_gospider_testhtml5.vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:11.935Z" + } +}, +{ + "model": "startScan.command", + "pk": 58, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://testhtml5.vulnweb.com\" | hakrawler -subs -u -t 30 -dr | grep -Eo 'https?://testhtml5\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_hakrawler_testhtml5.vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:11.952Z" + } +}, +{ + "model": "startScan.command", + "pk": 59, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://testhtml5.vulnweb.com\" | waybackurls | grep -Eo 'https?://testhtml5\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_waybackurls_testhtml5.vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:11.966Z" + } +}, +{ + "model": "startScan.command", + "pk": 60, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://testhtml5.vulnweb.com\" | katana -silent -jc -kf all -d 3 -fs rdn -c 30 -dr | grep -Eo 'https?://testhtml5\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_katana_testhtml5.vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:11.981Z" + } +}, +{ + "model": "startScan.command", + "pk": 61, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://testhtml5.vulnweb.com\" | gau --threads 30 | grep -Eo 'https?://testhtml5\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_gau_testhtml5.vulnweb.com.txt", + "return_code": 1, + "output": "", + "time": "2024-09-03T21:29:11.995Z" + } +}, +{ + "model": "startScan.command", + "pk": 62, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://testphp.vulnweb.com\" | gospider --js -d 2 --sitemap --robots -w -r -a -t 30 --no-redirect | grep -Eo 'https?://testphp\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_gospider_testphp.vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:12.011Z" + } +}, +{ + "model": "startScan.command", + "pk": 63, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://testphp.vulnweb.com\" | hakrawler -subs -u -t 30 -dr | grep -Eo 'https?://testphp\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_hakrawler_testphp.vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:12.028Z" + } +}, +{ + "model": "startScan.command", + "pk": 64, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://testphp.vulnweb.com\" | waybackurls | grep -Eo 'https?://testphp\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_waybackurls_testphp.vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:12.042Z" + } +}, +{ + "model": "startScan.command", + "pk": 65, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://testphp.vulnweb.com\" | katana -silent -jc -kf all -d 3 -fs rdn -c 30 -dr | grep -Eo 'https?://testphp\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_katana_testphp.vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:12.057Z" + } +}, +{ + "model": "startScan.command", + "pk": 66, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://testphp.vulnweb.com\" | gau --threads 30 | grep -Eo 'https?://testphp\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_gau_testphp.vulnweb.com.txt", + "return_code": 1, + "output": "", + "time": "2024-09-03T21:29:12.072Z" + } +}, +{ + "model": "startScan.command", + "pk": 67, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://vulnweb.com\" | gospider --js -d 2 --sitemap --robots -w -r -a -t 30 --no-redirect | grep -Eo 'https?://vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_gospider_vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:12.087Z" + } +}, +{ + "model": "startScan.command", + "pk": 68, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://vulnweb.com\" | hakrawler -subs -u -t 30 -dr | grep -Eo 'https?://vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_hakrawler_vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:12.100Z" + } +}, +{ + "model": "startScan.command", + "pk": 69, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://vulnweb.com\" | waybackurls | grep -Eo 'https?://vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_waybackurls_vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:12.113Z" + } +}, +{ + "model": "startScan.command", + "pk": 70, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://vulnweb.com\" | katana -silent -jc -kf all -d 3 -fs rdn -c 30 -dr | grep -Eo 'https?://vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_katana_vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:12.128Z" + } +}, +{ + "model": "startScan.command", + "pk": 71, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://vulnweb.com\" | gau --threads 30 | grep -Eo 'https?://vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_gau_vulnweb.com.txt", + "return_code": 1, + "output": "", + "time": "2024-09-03T21:29:12.143Z" + } +}, +{ + "model": "startScan.command", + "pk": 72, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://www.vulnweb.com\" | gospider --js -d 2 --sitemap --robots -w -r -a -t 30 --no-redirect | grep -Eo 'https?://www\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_gospider_www.vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:12.159Z" + } +}, +{ + "model": "startScan.command", + "pk": 73, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://www.vulnweb.com\" | hakrawler -subs -u -t 30 -dr | grep -Eo 'https?://www\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_hakrawler_www.vulnweb.com.txt", + "return_code": 1, + "output": "", + "time": "2024-09-03T21:29:12.176Z" + } +}, +{ + "model": "startScan.command", + "pk": 74, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://www.vulnweb.com\" | waybackurls | grep -Eo 'https?://www\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_waybackurls_www.vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:12.191Z" + } +}, +{ + "model": "startScan.command", + "pk": 75, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://www.vulnweb.com\" | katana -silent -jc -kf all -d 3 -fs rdn -c 30 -dr | grep -Eo 'https?://www\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_katana_www.vulnweb.com.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:29:12.207Z" + } +}, +{ + "model": "startScan.command", + "pk": 76, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "echo \"http://www.vulnweb.com\" | gau --threads 30 | grep -Eo 'https?://www\\.vulnweb\\.com(:[0-9]+)?(/.*)?$' > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_gau_www.vulnweb.com.txt", + "return_code": 1, + "output": "", + "time": "2024-09-03T21:29:12.222Z" + } +}, +{ + "model": "startScan.command", + "pk": 77, + "fields": { + "scan_history": null, + "activity": null, + "command": "tlsx -san -cn -silent -ro -host vulnweb.com -o /tmp/ip_domain_tlsx.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:33:01.109Z" + } +}, +{ + "model": "startScan.command", + "pk": 78, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "cat /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_* > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/#1_fetch_url.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:35:38.599Z" + } +}, +{ + "model": "startScan.command", + "pk": 79, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "cat /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/input_endpoints_fetch_url.txt >> /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/#1_fetch_url.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:35:38.631Z" + } +}, +{ + "model": "startScan.command", + "pk": 80, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "sort -u /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/#1_fetch_url.txt -o /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/#1_fetch_url.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:35:38.659Z" + } +}, +{ + "model": "startScan.command", + "pk": 81, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "cat /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/#1_fetch_url.txt | grep -Eiv \"\\.(png|jpg|jpeg|gif|mp4|mpeg|mp3).*\" > /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_filtered.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:35:38.696Z" + } +}, +{ + "model": "startScan.command", + "pk": 82, + "fields": { + "scan_history": 1, + "activity": 5, + "command": "mv /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_filtered.txt /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/#1_fetch_url.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T21:35:38.727Z" + } +}, +{ + "model": "startScan.command", + "pk": 106, + "fields": { + "scan_history": 1, + "activity": 14, + "command": "nuclei -j -irr -l /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/urls_unfurled.txt -c 50 -retries 1 -rl 150 -timeout 5 -silent -t /home/rengine/nuclei-templates -severity medium", + "return_code": 0, + "output": null, + "time": "2024-09-03T22:02:49.873Z" + } +}, +{ + "model": "startScan.command", + "pk": 117, + "fields": { + "scan_history": 1, + "activity": null, + "command": "httpx -cl -ct -rt -location -td -websocket -cname -asn -cdn -probe -random-agent -t 7 -json -l /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/httpx_input.txt -silent", + "return_code": 0, + "output": "{\"timestamp\":\"2024-09-03T22:13:05.027538389Z\",\"port\":\"80\",\"url\":\"http://vulnweb.com\",\"input\":\"http://vulnweb.com\",\"title\":\"Acunetix Web Vulnerability Scanner - Test websites\",\"scheme\":\"http\",\"webserver\":\"nginx/1.19.0\",\"content_type\":\"text/html\",\"method\":\"GET\",\"host\":\"44.228.249.3\",\"path\":\"/\",\"time\":\"341.066989ms\",\"a\":[\"44.228.249.3\"],\"tech\":[\"Nginx:1.19.0\"],\"words\":482,\"lines\":74,\"status_code\":200,\"content_length\":4018,\"failed\":false,\"knowledgebase\":{\"PageType\":\"nonerror\",\"pHash\":0}}\n{\"timestamp\":\"2024-09-03T22:13:05.034995861Z\",\"port\":\"80\",\"url\":\"http://www.vulnweb.com\",\"input\":\"http://www.vulnweb.com\",\"title\":\"Acunetix Web Vulnerability Scanner - Test websites\",\"scheme\":\"http\",\"webserver\":\"nginx/1.19.0\",\"content_type\":\"text/html\",\"method\":\"GET\",\"host\":\"44.228.249.3\",\"path\":\"/\",\"time\":\"347.273586ms\",\"a\":[\"44.228.249.3\"],\"tech\":[\"Nginx:1.19.0\"],\"words\":482,\"lines\":74,\"status_code\":200,\"content_length\":4018,\"failed\":false,\"knowledgebase\":{\"PageType\":\"nonerror\",\"pHash\":0}}\n{\"timestamp\":\"2024-09-03T22:13:05.035316875Z\",\"port\":\"80\",\"url\":\"http://testasp.vulnweb.com\",\"input\":\"http://testasp.vulnweb.com\",\"title\":\"acuforum forums\",\"scheme\":\"http\",\"webserver\":\"Microsoft-IIS/8.5\",\"content_type\":\"text/html\",\"method\":\"GET\",\"host\":\"44.238.29.244\",\"path\":\"/\",\"time\":\"347.420538ms\",\"a\":[\"44.238.29.244\"],\"tech\":[\"DreamWeaver\",\"IIS:8.5\",\"Microsoft ASP.NET\",\"Windows Server\"],\"words\":328,\"lines\":46,\"status_code\":200,\"content_length\":3538,\"failed\":false,\"knowledgebase\":{\"PageType\":\"nonerror\",\"pHash\":0}}\n{\"timestamp\":\"2024-09-03T22:13:05.036463189Z\",\"port\":\"80\",\"url\":\"http://testhtml5.vulnweb.com\",\"input\":\"http://testhtml5.vulnweb.com\",\"title\":\"SecurityTweets - HTML5 test website for Acunetix Web Vulnerability Scanner\",\"scheme\":\"http\",\"webserver\":\"nginx/1.19.0\",\"content_type\":\"text/html\",\"method\":\"GET\",\"host\":\"44.228.249.3\",\"path\":\"/\",\"time\":\"342.066622ms\",\"a\":[\"44.228.249.3\"],\"tech\":[\"AngularJS\",\"Nginx:1.19.0\"],\"words\":1483,\"lines\":164,\"status_code\":200,\"content_length\":6940,\"failed\":false,\"knowledgebase\":{\"PageType\":\"nonerror\",\"pHash\":0}}\n{\"timestamp\":\"2024-09-03T22:13:05.053531252Z\",\"port\":\"80\",\"url\":\"http://rest.vulnweb.com\",\"input\":\"http://rest.vulnweb.com\",\"title\":\"Acunetix Vulnerable REST API\",\"scheme\":\"http\",\"webserver\":\"Apache/2.4.25 (Debian)\",\"content_type\":\"text/html\",\"method\":\"GET\",\"host\":\"35.81.188.86\",\"path\":\"/\",\"time\":\"367.185091ms\",\"a\":[\"35.81.188.86\"],\"tech\":[\"Apache HTTP Server:2.4.25\",\"Debian\",\"PHP:7.1.26\"],\"words\":1397,\"lines\":138,\"status_code\":200,\"content_length\":3555,\"failed\":false,\"knowledgebase\":{\"PageType\":\"nonerror\",\"pHash\":0}}\n{\"timestamp\":\"2024-09-03T22:13:05.066012087Z\",\"port\":\"80\",\"url\":\"http://testphp.vulnweb.com\",\"input\":\"http://testphp.vulnweb.com\",\"title\":\"Home of Acunetix Art\",\"scheme\":\"http\",\"webserver\":\"nginx/1.19.0\",\"content_type\":\"text/html\",\"method\":\"GET\",\"host\":\"44.228.249.3\",\"path\":\"/\",\"time\":\"374.757261ms\",\"a\":[\"44.228.249.3\"],\"tech\":[\"DreamWeaver\",\"Nginx:1.19.0\",\"PHP:5.6.40\",\"Ubuntu\"],\"words\":514,\"lines\":110,\"status_code\":200,\"content_length\":4958,\"failed\":false,\"knowledgebase\":{\"PageType\":\"nonerror\",\"pHash\":0}}\n{\"timestamp\":\"2024-09-03T22:13:05.228722074Z\",\"port\":\"80\",\"url\":\"http://testaspnet.vulnweb.com\",\"input\":\"http://testaspnet.vulnweb.com\",\"title\":\"acublog news\",\"scheme\":\"http\",\"webserver\":\"Microsoft-IIS/8.5\",\"content_type\":\"text/html\",\"method\":\"GET\",\"host\":\"44.238.29.244\",\"path\":\"/\",\"time\":\"513.247119ms\",\"a\":[\"44.238.29.244\"],\"tech\":[\"IIS:8.5\",\"Microsoft ASP.NET:2.0.50727\",\"Microsoft Visual Studio\",\"Windows Server\"],\"words\":774,\"lines\":89,\"status_code\":200,\"content_length\":14081,\"failed\":false,\"knowledgebase\":{\"PageType\":\"other\",\"pHash\":0}}\n", + "time": "2024-09-03T22:12:53.901Z" + } +}, +{ + "model": "startScan.command", + "pk": 118, + "fields": { + "scan_history": 1, + "activity": null, + "command": "rm /home/rengine/scan_results/vulnweb.com/scans/14c80983-6a3b-11ef-b939-73cc220d8f97/httpx_input.txt", + "return_code": 0, + "output": "", + "time": "2024-09-03T22:13:05.280Z" + } +}, +{ + "model": "startScan.command", + "pk": 119, + "fields": { + "scan_history": null, + "activity": null, + "command": "gf -list", + "return_code": 0, + "output": "\naws-keys\nbase64\ncors\ndebug-pages\ndebug_logic\nfirebase\nfw\ngo-functions\nhttp-auth\nidor\nimg-traversal\ninterestingEXT\ninterestingparams\ninterestingsubs\nip\njson-sec\njsvar\nlfi\nmeg-headers\nphp-curl\nphp-errors\nphp-serialized\nphp-sinks\nphp-sources\nrce\nredirect\ns3-buckets\nsec\nservers\nsqli\nssrf\nssti\nstrings\ntakeovers\nupload-fields\nurls\nxss", + "time": "2024-09-03T23:36:10.771Z" + } +}, +{ + "model": "startScan.technology", + "pk": 1, + "fields": { + "name": "DreamWeaver" + } +}, +{ + "model": "startScan.technology", + "pk": 2, + "fields": { + "name": "Nginx:1.19.0" + } +}, +{ + "model": "startScan.technology", + "pk": 3, + "fields": { + "name": "PHP:5.6.40" + } +}, +{ + "model": "startScan.technology", + "pk": 4, + "fields": { + "name": "Ubuntu" + } +}, +{ + "model": "startScan.technology", + "pk": 5, + "fields": { + "name": "AngularJS" + } +}, +{ + "model": "startScan.technology", + "pk": 6, + "fields": { + "name": "IIS:8.5" + } +}, +{ + "model": "startScan.technology", + "pk": 7, + "fields": { + "name": "Microsoft ASP.NET" + } +}, +{ + "model": "startScan.technology", + "pk": 8, + "fields": { + "name": "Windows Server" + } +}, +{ + "model": "startScan.technology", + "pk": 9, + "fields": { + "name": "Apache HTTP Server:2.4.25" + } +}, +{ + "model": "startScan.technology", + "pk": 10, + "fields": { + "name": "Debian" + } +}, +{ + "model": "startScan.technology", + "pk": 11, + "fields": { + "name": "PHP:7.1.26" + } +}, +{ + "model": "startScan.technology", + "pk": 12, + "fields": { + "name": "Microsoft ASP.NET:2.0.50727" + } +}, +{ + "model": "startScan.technology", + "pk": 13, + "fields": { + "name": "Microsoft Visual Studio" + } +}, +{ + "model": "startScan.technology", + "pk": 14, + "fields": { + "name": "Bootstrap" + } +}, +{ + "model": "startScan.technology", + "pk": 15, + "fields": { + "name": "Basic" + } +}, +{ + "model": "startScan.ipaddress", + "pk": 1, + "fields": { + "address": "44.228.249.3", + "is_cdn": false, + "geo_iso": null, + "version": null, + "is_private": false, + "reverse_pointer": null, + "ports": [ + 2 + ], + "ip_subscan_ids": [] + } +}, +{ + "model": "startScan.ipaddress", + "pk": 2, + "fields": { + "address": "44.238.29.244", + "is_cdn": false, + "geo_iso": null, + "version": null, + "is_private": false, + "reverse_pointer": null, + "ports": [ + 2 + ], + "ip_subscan_ids": [] + } +}, +{ + "model": "startScan.ipaddress", + "pk": 3, + "fields": { + "address": "35.81.188.86", + "is_cdn": false, + "geo_iso": null, + "version": null, + "is_private": false, + "reverse_pointer": null, + "ports": [ + 1, + 2 + ], + "ip_subscan_ids": [] + } +}, +{ + "model": "startScan.port", + "pk": 1, + "fields": { + "number": 8080, + "service_name": "unknown", + "description": "", + "is_uncommon": true + } +}, +{ + "model": "startScan.port", + "pk": 2, + "fields": { + "number": 80, + "service_name": "unknown", + "description": "", + "is_uncommon": false + } +}, +{ + "model": "startScan.directoryfile", + "pk": 1, + "fields": { + "length": 28674, + "lines": 688, + "http_status": 200, + "words": 5389, + "name": "ZG9jcy8=", + "url": "http://rest.vulnweb.com/docs/", + "content_type": "text/html; charset=UTF-8" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 2, + "fields": { + "length": 3555, + "lines": 138, + "http_status": 200, + "words": 1397, + "name": "aW5kZXgucGhw", + "url": "http://rest.vulnweb.com/index.php", + "content_type": "text/html; charset=UTF-8" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 3, + "fields": { + "length": 3555, + "lines": 138, + "http_status": 200, + "words": 1397, + "name": "aW5kZXgucEhw", + "url": "http://rest.vulnweb.com/index.pHp", + "content_type": "text/html; charset=UTF-8" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 4, + "fields": { + "length": 926, + "lines": 16, + "http_status": 200, + "words": 29, + "name": "X3Z0aV9jbmYv", + "url": "http://testasp.vulnweb.com/_vti_cnf/", + "content_type": "text/html" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 5, + "fields": { + "length": 3194, + "lines": 55, + "http_status": 200, + "words": 429, + "name": "bG9naW4uYXNw", + "url": "http://testasp.vulnweb.com/login.asp", + "content_type": "text/html" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 6, + "fields": { + "length": 13, + "lines": 1, + "http_status": 200, + "words": 2, + "name": "cm9ib3RzLnR4dA==", + "url": "http://testasp.vulnweb.com/robots.txt", + "content_type": "text/plain" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 7, + "fields": { + "length": 894, + "lines": 4, + "http_status": 200, + "words": 2, + "name": "ZmF2aWNvbi5pY28=", + "url": "http://testaspnet.vulnweb.com/favicon.ico", + "content_type": "image/x-icon" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 8, + "fields": { + "length": 13, + "lines": 1, + "http_status": 200, + "words": 2, + "name": "cm9ib3RzLnR4dA==", + "url": "http://testaspnet.vulnweb.com/robots.txt", + "content_type": "text/plain" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 9, + "fields": { + "length": 3, + "lines": 1, + "http_status": 200, + "words": 1, + "name": "dGVzdC50eHQ=", + "url": "http://testaspnet.vulnweb.com/test.txt", + "content_type": "text/plain" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 10, + "fields": { + "length": 4472, + "lines": 101, + "http_status": 200, + "words": 899, + "name": "Y29tbWVudA==", + "url": "http://testhtml5.vulnweb.com/comment", + "content_type": "text/html; charset=utf-8" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 11, + "fields": { + "length": 493, + "lines": 15, + "http_status": 200, + "words": 87, + "name": "ZXhhbXBsZXM=", + "url": "http://testhtml5.vulnweb.com/examples", + "content_type": "text/html; charset=utf-8" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 12, + "fields": { + "length": 894, + "lines": 81, + "http_status": 200, + "words": 4, + "name": "ZmF2aWNvbi5pY28=", + "url": "http://testhtml5.vulnweb.com/favicon.ico", + "content_type": "image/x-icon" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 13, + "fields": { + "length": 4462, + "lines": 101, + "http_status": 200, + "words": 898, + "name": "cmVwb3J0", + "url": "http://testhtml5.vulnweb.com/report", + "content_type": "text/html; charset=utf-8" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 14, + "fields": { + "length": 386, + "lines": 14, + "http_status": 200, + "words": 54, + "name": "c2FtcGxlcy8=", + "url": "http://testhtml5.vulnweb.com/samples/", + "content_type": "text/html; charset=utf-8" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 15, + "fields": { + "length": 386, + "lines": 14, + "http_status": 200, + "words": 54, + "name": "c2FtcGxlcw==", + "url": "http://testhtml5.vulnweb.com/samples", + "content_type": "text/html; charset=utf-8" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 16, + "fields": { + "length": 6, + "lines": 1, + "http_status": 200, + "words": 1, + "name": "LmlkZWEvLm5hbWU=", + "url": "http://testphp.vulnweb.com/.idea/.name", + "content_type": "application/octet-stream" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 17, + "fields": { + "length": 951, + "lines": 14, + "http_status": 200, + "words": 427, + "name": "LmlkZWEv", + "url": "http://testphp.vulnweb.com/.idea/", + "content_type": "text/html" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 18, + "fields": { + "length": 171, + "lines": 6, + "http_status": 200, + "words": 10, + "name": "LmlkZWEvZW5jb2RpbmdzLnhtbA==", + "url": "http://testphp.vulnweb.com/.idea/encodings.xml", + "content_type": "text/xml" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 19, + "fields": { + "length": 275, + "lines": 10, + "http_status": 200, + "words": 26, + "name": "LmlkZWEvbW9kdWxlcy54bWw=", + "url": "http://testphp.vulnweb.com/.idea/modules.xml", + "content_type": "text/xml" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 20, + "fields": { + "length": 143, + "lines": 5, + "http_status": 200, + "words": 13, + "name": "LmlkZWEvc2NvcGVzL3Njb3BlX3NldHRpbmdzLnhtbA==", + "url": "http://testphp.vulnweb.com/.idea/scopes/scope_settings.xml", + "content_type": "text/xml" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 21, + "fields": { + "length": 266, + "lines": 9, + "http_status": 200, + "words": 18, + "name": "LmlkZWEvbWlzYy54bWw=", + "url": "http://testphp.vulnweb.com/.idea/misc.xml", + "content_type": "text/xml" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 22, + "fields": { + "length": 173, + "lines": 8, + "http_status": 200, + "words": 16, + "name": "LmlkZWEvdmNzLnhtbA==", + "url": "http://testphp.vulnweb.com/.idea/vcs.xml", + "content_type": "text/xml" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 23, + "fields": { + "length": 12473, + "lines": 217, + "http_status": 200, + "words": 1702, + "name": "LmlkZWEvd29ya3NwYWNlLnhtbA==", + "url": "http://testphp.vulnweb.com/.idea/workspace.xml", + "content_type": "text/xml" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 24, + "fields": { + "length": 400, + "lines": 9, + "http_status": 200, + "words": 122, + "name": "X21tU2VydmVyU2NyaXB0cy8=", + "url": "http://testphp.vulnweb.com/_mmServerScripts/", + "content_type": "text/html" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 25, + "fields": { + "length": 93, + "lines": 1, + "http_status": 200, + "words": 4, + "name": "X21tU2VydmVyU2NyaXB0cy9NTUhUVFBEQi5waHA=", + "url": "http://testphp.vulnweb.com/_mmServerScripts/MMHTTPDB.php", + "content_type": "text/html; charset=UTF-8" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 26, + "fields": { + "length": 262, + "lines": 8, + "http_status": 200, + "words": 66, + "name": "YWRtaW4v", + "url": "http://testphp.vulnweb.com/admin/", + "content_type": "text/html" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 27, + "fields": { + "length": 224, + "lines": 5, + "http_status": 200, + "words": 8, + "name": "Y3Jvc3Nkb21haW4ueG1s", + "url": "http://testphp.vulnweb.com/crossdomain.xml", + "content_type": "text/xml" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 28, + "fields": { + "length": 595, + "lines": 11, + "http_status": 200, + "words": 262, + "name": "Q1ZTLw==", + "url": "http://testphp.vulnweb.com/CVS/", + "content_type": "text/html" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 29, + "fields": { + "length": 1, + "lines": 1, + "http_status": 200, + "words": 2, + "name": "Q1ZTL0VudHJpZXM=", + "url": "http://testphp.vulnweb.com/CVS/Entries", + "content_type": "application/octet-stream" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 30, + "fields": { + "length": 1, + "lines": 1, + "http_status": 200, + "words": 2, + "name": "Q1ZTL1Jvb3Q=", + "url": "http://testphp.vulnweb.com/CVS/Root", + "content_type": "application/octet-stream" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 31, + "fields": { + "length": 894, + "lines": 4, + "http_status": 200, + "words": 2, + "name": "ZmF2aWNvbi5pY28=", + "url": "http://testphp.vulnweb.com/favicon.ico", + "content_type": "image/x-icon" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 32, + "fields": { + "length": 377, + "lines": 9, + "http_status": 200, + "words": 128, + "name": "aW1hZ2VzLw==", + "url": "http://testphp.vulnweb.com/images/", + "content_type": "text/html" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 33, + "fields": { + "length": 3265, + "lines": 91, + "http_status": 200, + "words": 350, + "name": "aW5kZXguYmFr", + "url": "http://testphp.vulnweb.com/index.bak", + "content_type": "application/octet-stream" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 34, + "fields": { + "length": 4958, + "lines": 110, + "http_status": 200, + "words": 514, + "name": "aW5kZXgucGhw", + "url": "http://testphp.vulnweb.com/index.php", + "content_type": "text/html; charset=UTF-8" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 35, + "fields": { + "length": 2586, + "lines": 2, + "http_status": 200, + "words": 9, + "name": "aW5kZXguemlw", + "url": "http://testphp.vulnweb.com/index.zip", + "content_type": "application/zip" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 36, + "fields": { + "length": 5523, + "lines": 120, + "http_status": 200, + "words": 557, + "name": "bG9naW4ucGhw", + "url": "http://testphp.vulnweb.com/login.php", + "content_type": "text/html; charset=UTF-8" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 37, + "fields": { + "length": 268, + "lines": 8, + "http_status": 200, + "words": 60, + "name": "dmVuZG9yLw==", + "url": "http://testphp.vulnweb.com/vendor/", + "content_type": "text/html" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 38, + "fields": { + "length": 4018, + "lines": 74, + "http_status": 200, + "words": 482, + "name": "aW5kZXguaHRtbA==", + "url": "http://vulnweb.com/index.html", + "content_type": "text/html" + } +}, +{ + "model": "startScan.directoryfile", + "pk": 39, + "fields": { + "length": 4018, + "lines": 74, + "http_status": 200, + "words": 482, + "name": "aW5kZXguaHRtbA==", + "url": "http://www.vulnweb.com/index.html", + "content_type": "text/html" + } +}, +{ + "model": "startScan.directoryscan", + "pk": 1, + "fields": { + "command_line": "ffuf -w /home/rengine/wordlists/fuzz-Bo0oM.txt -p 0.05 -t 30 -timeout 5 -ac -mc 200,204 -u http://rest.vulnweb.com/FUZZ -json", + "scanned_date": "2024-09-03T22:02:45.119Z", + "directory_files": [ + 1, + 2, + 3 + ], + "dir_subscan_ids": [] + } +}, +{ + "model": "startScan.directoryscan", + "pk": 2, + "fields": { + "command_line": "ffuf -w /home/rengine/wordlists/fuzz-Bo0oM.txt -p 0.05 -t 30 -timeout 5 -ac -mc 200,204 -u http://testasp.vulnweb.com/FUZZ -json", + "scanned_date": "2024-09-03T22:05:24.142Z", + "directory_files": [ + 4, + 5, + 6 + ], + "dir_subscan_ids": [] + } +}, +{ + "model": "startScan.directoryscan", + "pk": 3, + "fields": { + "command_line": "ffuf -w /home/rengine/wordlists/fuzz-Bo0oM.txt -p 0.05 -t 30 -timeout 5 -ac -mc 200,204 -u http://testaspnet.vulnweb.com/FUZZ -json", + "scanned_date": "2024-09-03T22:06:42.524Z", + "directory_files": [ + 7, + 8, + 9 + ], + "dir_subscan_ids": [] + } +}, +{ + "model": "startScan.directoryscan", + "pk": 4, + "fields": { + "command_line": "ffuf -w /home/rengine/wordlists/fuzz-Bo0oM.txt -p 0.05 -t 30 -timeout 5 -ac -mc 200,204 -u http://testhtml5.vulnweb.com/FUZZ -json", + "scanned_date": "2024-09-03T22:07:57.153Z", + "directory_files": [ + 10, + 11, + 12, + 13, + 14, + 15 + ], + "dir_subscan_ids": [] + } +}, +{ + "model": "startScan.directoryscan", + "pk": 5, + "fields": { + "command_line": "ffuf -w /home/rengine/wordlists/fuzz-Bo0oM.txt -p 0.05 -t 30 -timeout 5 -ac -mc 200,204 -u http://testphp.vulnweb.com/FUZZ -json", + "scanned_date": "2024-09-03T22:09:11.505Z", + "directory_files": [ + 16, + 17, + 18, + 19, + 20, + 21, + 22, + 23, + 24, + 25, + 26, + 27, + 28, + 29, + 30, + 31, + 32, + 33, + 34, + 35, + 36, + 37 + ], + "dir_subscan_ids": [] + } +}, +{ + "model": "startScan.directoryscan", + "pk": 6, + "fields": { + "command_line": "ffuf -w /home/rengine/wordlists/fuzz-Bo0oM.txt -p 0.05 -t 30 -timeout 5 -ac -mc 200,204 -u http://vulnweb.com/FUZZ -json", + "scanned_date": "2024-09-03T22:10:26.882Z", + "directory_files": [ + 38 + ], + "dir_subscan_ids": [] + } +}, +{ + "model": "startScan.directoryscan", + "pk": 7, + "fields": { + "command_line": "ffuf -w /home/rengine/wordlists/fuzz-Bo0oM.txt -p 0.05 -t 30 -timeout 5 -ac -mc 200,204 -u http://www.vulnweb.com/FUZZ -json", + "scanned_date": "2024-09-03T22:11:40.431Z", + "directory_files": [ + 39 + ], + "dir_subscan_ids": [] + } +}, +{ + "model": "startScan.dork", + "pk": 1, + "fields": { + "type": "login_pages", + "url": "http://rest.vulnweb.com/basic_authentication/api/" + } +}, +{ + "model": "startScan.dork", + "pk": 2, + "fields": { + "type": "login_pages", + "url": "http://rest.vulnweb.com/docs/" + } +}, +{ + "model": "startScan.dork", + "pk": 3, + "fields": { + "type": "login_pages", + "url": "http://testasp.vulnweb.com/Search.asp" + } +}, +{ + "model": "startScan.dork", + "pk": 4, + "fields": { + "type": "login_pages", + "url": "http://testaspnet.vulnweb.com/login.aspx" + } +}, +{ + "model": "startScan.dork", + "pk": 5, + "fields": { + "type": "login_pages", + "url": "http://testhtml5.vulnweb.com/" + } +}, +{ + "model": "startScan.dork", + "pk": 6, + "fields": { + "type": "login_pages", + "url": "http://testphp.vulnweb.com/" + } +}, +{ + "model": "startScan.dork", + "pk": 7, + "fields": { + "type": "login_pages", + "url": "http://testphp.vulnweb.com/admin/" + } +}, +{ + "model": "startScan.dork", + "pk": 8, + "fields": { + "type": "login_pages", + "url": "http://testphp.vulnweb.com/artists.php" + } +}, +{ + "model": "startScan.dork", + "pk": 9, + "fields": { + "type": "login_pages", + "url": "http://testphp.vulnweb.com/artists.php?artist" + } +}, +{ + "model": "startScan.dork", + "pk": 10, + "fields": { + "type": "login_pages", + "url": "http://testphp.vulnweb.com/artists.php?artist=1" + } +}, +{ + "model": "startScan.dork", + "pk": 11, + "fields": { + "type": "login_pages", + "url": "http://testphp.vulnweb.com/artists.php?artist=2" + } +}, +{ + "model": "startScan.dork", + "pk": 12, + "fields": { + "type": "login_pages", + "url": "http://testphp.vulnweb.com/categories.php" + } +}, +{ + "model": "startScan.dork", + "pk": 13, + "fields": { + "type": "login_pages", + "url": "http://testphp.vulnweb.com/disclaimer.php" + } +}, +{ + "model": "startScan.dork", + "pk": 14, + "fields": { + "type": "login_pages", + "url": "http://testphp.vulnweb.com/listproducts.php?cat" + } +}, +{ + "model": "startScan.dork", + "pk": 15, + "fields": { + "type": "login_pages", + "url": "http://testphp.vulnweb.com/listproducts.php?cat=1" + } +}, +{ + "model": "startScan.dork", + "pk": 16, + "fields": { + "type": "login_pages", + "url": "http://testphp.vulnweb.com/login.php" + } +}, +{ + "model": "startScan.dork", + "pk": 17, + "fields": { + "type": "login_pages", + "url": "http://testphp.vulnweb.com/search.php" + } +}, +{ + "model": "startScan.dork", + "pk": 18, + "fields": { + "type": "login_pages", + "url": "http://www.vulnweb.com/" + } +}, +{ + "model": "startScan.dork", + "pk": 19, + "fields": { + "type": "admin_panels", + "url": "http://rest.vulnweb.com/basic_authentication/api/" + } +}, +{ + "model": "startScan.dork", + "pk": 20, + "fields": { + "type": "admin_panels", + "url": "http://rest.vulnweb.com/docs/" + } +}, +{ + "model": "startScan.dork", + "pk": 21, + "fields": { + "type": "admin_panels", + "url": "http://rest.vulnweb.com/images/1_basic_authentication.png" + } +}, +{ + "model": "startScan.dork", + "pk": 22, + "fields": { + "type": "admin_panels", + "url": "http://rest.vulnweb.com/images/2_jwt.png" + } +}, +{ + "model": "startScan.dork", + "pk": 23, + "fields": { + "type": "admin_panels", + "url": "http://rest.vulnweb.com/images/3_oauth2.png" + } +}, +{ + "model": "startScan.dork", + "pk": 24, + "fields": { + "type": "admin_panels", + "url": "http://testphp.vulnweb.com/admin/" + } +}, +{ + "model": "startScan.dork", + "pk": 25, + "fields": { + "type": "admin_panels", + "url": "http://testphp.vulnweb.com/login.php" + } +}, +{ + "model": "startScan.dork", + "pk": 26, + "fields": { + "type": "admin_panels", + "url": "http://testphp.vulnweb.com/secured/phpinfo.php" + } +}, +{ + "model": "startScan.dork", + "pk": 27, + "fields": { + "type": "social_media", + "url": "https://m.facebook.com/QwertyAloneC/posts/dork-sqli-just-add-inurl-before-themphpcatidcartphpidphpcidindexphpshowtopiccont/142828509609539/" + } +}, +{ + "model": "startScan.dork", + "pk": 28, + "fields": { + "type": "social_media", + "url": "https://m.facebook.com/groups/3201261889909211/posts/3624197390948990/" + } +}, +{ + "model": "startScan.dork", + "pk": 29, + "fields": { + "type": "social_media", + "url": "https://m.facebook.com/groups/3201261889909211/posts/4007010936000965/" + } +}, +{ + "model": "startScan.dork", + "pk": 30, + "fields": { + "type": "social_media", + "url": "https://m.facebook.com/groups/3201261889909211/posts/4007311169304275/" + } +}, +{ + "model": "startScan.dork", + "pk": 31, + "fields": { + "type": "social_media", + "url": "https://m.facebook.com/groups/3201261889909211/posts/4188319924536731/" + } +}, +{ + "model": "startScan.dork", + "pk": 32, + "fields": { + "type": "social_media", + "url": "https://m.facebook.com/groups/3201261889909211/posts/4367877359914319/" + } +}, +{ + "model": "startScan.dork", + "pk": 33, + "fields": { + "type": "social_media", + "url": "https://m.facebook.com/groups/3201261889909211/posts/6293716407330395/" + } +}, +{ + "model": "startScan.dork", + "pk": 34, + "fields": { + "type": "social_media", + "url": "https://m.facebook.com/groups/hackingteam2022/posts/2726773620796174/" + } +}, +{ + "model": "startScan.dork", + "pk": 35, + "fields": { + "type": "social_media", + "url": "https://www.facebook.com/KaliLinuxBrazil/photos/tenha-certeza-de-estar-com-o-tor-instaladoinstala%C3%A7%C3%A3o-tor-rootkaliapt-get-install/972928206082146/" + } +}, +{ + "model": "startScan.dork", + "pk": 36, + "fields": { + "type": "social_media", + "url": "https://www.facebook.com/KaliLinuxBrazil/photos/tenha-certeza-de-estar-com-o-tor-instaladoinstala&C3&A7&C3&A3o-tor-rootkaliapt-get-install/972928206082146/" + } +}, +{ + "model": "startScan.dork", + "pk": 37, + "fields": { + "type": "social_media", + "url": "https://www.facebook.com/groups/3201261889909211/" + } +}, +{ + "model": "startScan.dork", + "pk": 38, + "fields": { + "type": "social_media", + "url": "https://mobile.twitter.com/MrHades2020" + } +}, +{ + "model": "startScan.dork", + "pk": 39, + "fields": { + "type": "social_media", + "url": "https://twitter.com/0xSwayamm" + } +}, +{ + "model": "startScan.dork", + "pk": 40, + "fields": { + "type": "social_media", + "url": "https://twitter.com/7h3r4bb17" + } +}, +{ + "model": "startScan.dork", + "pk": 41, + "fields": { + "type": "social_media", + "url": "https://twitter.com/SadatTamzit" + } +}, +{ + "model": "startScan.dork", + "pk": 42, + "fields": { + "type": "social_media", + "url": "https://twitter.com/deathpoolx1" + } +}, +{ + "model": "startScan.dork", + "pk": 43, + "fields": { + "type": "social_media", + "url": "https://twitter.com/hunterabubakar" + } +}, +{ + "model": "startScan.dork", + "pk": 44, + "fields": { + "type": "social_media", + "url": "https://twitter.com/kg4409" + } +}, +{ + "model": "startScan.dork", + "pk": 45, + "fields": { + "type": "social_media", + "url": "https://twitter.com/ravidutt04?lang" + } +}, +{ + "model": "startScan.dork", + "pk": 46, + "fields": { + "type": "social_media", + "url": "https://twitter.com/ravidutt04?lang=ca" + } +}, +{ + "model": "startScan.dork", + "pk": 47, + "fields": { + "type": "social_media", + "url": "https://twitter.com/therceman/status/1711473903934054427" + } +}, +{ + "model": "startScan.dork", + "pk": 48, + "fields": { + "type": "social_media", + "url": "https://twitter.com/vishack81" + } +}, +{ + "model": "startScan.dork", + "pk": 49, + "fields": { + "type": "social_media", + "url": "https://m.youtube.com/watch?v" + } +}, +{ + "model": "startScan.dork", + "pk": 50, + "fields": { + "type": "social_media", + "url": "https://m.youtube.com/watch?v=2_lswM1S264" + } +}, +{ + "model": "startScan.dork", + "pk": 51, + "fields": { + "type": "social_media", + "url": "https://m.youtube.com/watch?v=2tJgPyRITGc" + } +}, +{ + "model": "startScan.dork", + "pk": 52, + "fields": { + "type": "social_media", + "url": "https://m.youtube.com/watch?v=6FDKHewTP4A" + } +}, +{ + "model": "startScan.dork", + "pk": 53, + "fields": { + "type": "social_media", + "url": "https://m.youtube.com/watch?v=cEoPdpVUeyU" + } +}, +{ + "model": "startScan.dork", + "pk": 54, + "fields": { + "type": "social_media", + "url": "https://m.youtube.com/watch?v=gHkGuVb9vX8" + } +}, +{ + "model": "startScan.dork", + "pk": 55, + "fields": { + "type": "social_media", + "url": "https://www.youtube.com/watch?v" + } +}, +{ + "model": "startScan.dork", + "pk": 56, + "fields": { + "type": "social_media", + "url": "https://www.youtube.com/watch?v=IbDAHDSlgYg" + } +}, +{ + "model": "startScan.dork", + "pk": 57, + "fields": { + "type": "social_media", + "url": "https://www.youtube.com/watch?v=ZstyFyfS3g4" + } +}, +{ + "model": "startScan.dork", + "pk": 58, + "fields": { + "type": "social_media", + "url": "https://www.youtube.com/watch?v=cEoPdpVUeyU" + } +}, +{ + "model": "startScan.dork", + "pk": 59, + "fields": { + "type": "social_media", + "url": "https://www.youtube.com/watch?v=dabm-7CcHaE" + } +}, +{ + "model": "startScan.dork", + "pk": 60, + "fields": { + "type": "social_media", + "url": "https://www.youtube.com/watch?v=tAxMpoKkvCw" + } +}, +{ + "model": "startScan.dork", + "pk": 61, + "fields": { + "type": "social_media", + "url": "https://www.reddit.com/r/sysadmin/comments/gs031c/how_to_login_to_web_by_submitting_username/" + } +}, +{ + "model": "startScan.dork", + "pk": 62, + "fields": { + "type": "code_sharing", + "url": "https://github.com/dradis/dradis-acunetix/blob/master/spec/fixtures/files/testphp.vulnweb.com.export.acunetix.xml" + } +}, +{ + "model": "startScan.dork", + "pk": 63, + "fields": { + "type": "code_sharing", + "url": "https://github.com/dtag-dev-sec/explo/blob/master/examples/SQLI_LOGIN_testphp.vulnweb.com.yaml" + } +}, +{ + "model": "startScan.dork", + "pk": 64, + "fields": { + "type": "code_sharing", + "url": "https://github.com/dtag-dev-sec/explo/blob/master/examples/SQLI_testphp.vulnweb.com.yaml" + } +}, +{ + "model": "startScan.dork", + "pk": 65, + "fields": { + "type": "code_sharing", + "url": "https://github.com/yangbh/Hammer/blob/master/output/testphp.vulnweb.com/http_testphp.vulnweb.com" + } +}, +{ + "model": "startScan.dork", + "pk": 66, + "fields": { + "type": "code_sharing", + "url": "https://bitbucket.org/snippets/alms/KrG4LL" + } +}, +{ + "model": "startScan.dork", + "pk": 67, + "fields": { + "type": "code_sharing", + "url": "https://bitbucket.org/snippets/bakueikozo/rex9ar/playstation-classic-uart-login-console" + } +}, +{ + "model": "startScan.dork", + "pk": 68, + "fields": { + "type": "code_sharing", + "url": "https://bitbucket.org/snippets/especnorthamerica/neb6dq/examples-of-web-controller-rest-api-usage" + } +}, +{ + "model": "startScan.dork", + "pk": 69, + "fields": { + "type": "code_sharing", + "url": "https://bitbucket.org/snippets/faridani/pRX6r" + } +}, +{ + "model": "startScan.dork", + "pk": 70, + "fields": { + "type": "code_sharing", + "url": "https://bitbucket.org/snippets/orrp/xeGzXB/interactive-mgg" + } +}, +{ + "model": "startScan.dork", + "pk": 71, + "fields": { + "type": "code_sharing", + "url": "https://bitbucket.org/snippets/raerose01/5enKR5" + } +}, +{ + "model": "startScan.dork", + "pk": 72, + "fields": { + "type": "code_sharing", + "url": "https://bitbucket.org/snippets/sglienke/64LG6b/introsort" + } +}, +{ + "model": "startScan.dork", + "pk": 73, + "fields": { + "type": "code_sharing", + "url": "https://bitbucket.org/snippets/sglienke/6oBqMb" + } +}, +{ + "model": "startScan.dork", + "pk": 74, + "fields": { + "type": "code_sharing", + "url": "https://bitbucket.org/snippets/suntorytime/rAGXar/wellness-sources-overview" + } +}, +{ + "model": "startScan.dork", + "pk": 75, + "fields": { + "type": "code_sharing", + "url": "https://bitbucket.org/snippets/wmgodyak/6bXKj" + } +}, +{ + "model": "startScan.dork", + "pk": 76, + "fields": { + "type": "config_files", + "url": "http://testphp.vulnweb.com/.idea/workspace.xml" + } +}, +{ + "model": "startScan.dork", + "pk": 77, + "fields": { + "type": "config_files", + "url": "http://testphp.vulnweb.com/crossdomain.xml" + } +} +] diff --git a/web/startScan/migrations/0056_alter_endpoint_techs.py b/web/startScan/migrations/0056_alter_endpoint_techs.py index 17f30caad..aec285a75 100644 --- a/web/startScan/migrations/0056_alter_endpoint_techs.py +++ b/web/startScan/migrations/0056_alter_endpoint_techs.py @@ -15,4 +15,4 @@ class Migration(migrations.Migration): name='techs', field=models.ManyToManyField(blank=True, related_name='techs', to='startScan.Technology'), ), - ] + ] \ No newline at end of file diff --git a/web/startScan/migrations/0057_auto_20231201_2354.py b/web/startScan/migrations/0057_auto_20231201_2354.py new file mode 100644 index 000000000..7b684a802 --- /dev/null +++ b/web/startScan/migrations/0057_auto_20231201_2354.py @@ -0,0 +1,26 @@ +# Generated by Django 3.2.4 on 2023-12-01 23:54 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ('startScan', '0056_alter_endpoint_techs'), + ] + + operations = [ + migrations.AddField( + model_name='scanhistory', + name='aborted_by', + field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='aborted_scans', to=settings.AUTH_USER_MODEL), + ), + migrations.AddField( + model_name='scanhistory', + name='initiated_by', + field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='initiated_scans', to=settings.AUTH_USER_MODEL), + ), + ] diff --git a/web/startScan/models.py b/web/startScan/models.py index d698fe8f0..f49c310e0 100644 --- a/web/startScan/models.py +++ b/web/startScan/models.py @@ -1,5 +1,6 @@ from urllib.parse import urlparse from django.apps import apps +from django.contrib.auth.models import User from django.contrib.postgres.fields import ArrayField from django.db import models from django.utils import timezone @@ -45,6 +46,9 @@ class ScanHistory(models.Model): employees = models.ManyToManyField('Employee', related_name='employees', blank=True) buckets = models.ManyToManyField('S3Bucket', related_name='buckets', blank=True) dorks = models.ManyToManyField('Dork', related_name='dorks', blank=True) + initiated_by = models.ForeignKey(User, on_delete=models.CASCADE, related_name='initiated_scans', blank=True, null=True) + aborted_by = models.ForeignKey(User, on_delete=models.SET_NULL, null=True, blank=True, related_name='aborted_scans') + def __str__(self): return self.domain.name diff --git a/web/startScan/static/startScan/js/detail_scan.js b/web/startScan/static/startScan/js/detail_scan.js index 0a8d5ba60..e32cd2e98 100644 --- a/web/startScan/static/startScan/js/detail_scan.js +++ b/web/startScan/static/startScan/js/detail_scan.js @@ -41,11 +41,11 @@ function render_ips(data) } -function get_endpoints(project, scan_history_id=null, domain_id=null, gf_tags=null){ +function get_endpoints(endpoint_endpoint_url, endpoint_subdomain_url, project, scan_history_id=null, domain_id=null, gf_tags=null){ var is_endpoint_grouping = false; var endpoint_grouping_col = 6; - var lookup_url = '/api/listEndpoints/?format=datatables&project=' + project; + var lookup_url = endpoint_endpoint_url + '?format=datatables&project=' + project; if (scan_history_id) { lookup_url += `&scan_history=${scan_history_id}`; @@ -111,7 +111,7 @@ function get_endpoints(project, scan_history_id=null, domain_id=null, gf_tags=nu var tech_badge = ''; var web_server = ''; if (row['techs']){ - tech_badge = `
` + parse_technology(row['techs'], "primary", outline=true); + tech_badge = `
` + parse_technology(endpoint_subdomain_url, row['techs'], "primary", true, false, true); } if (row['webserver']) { @@ -145,6 +145,12 @@ function get_endpoints(project, scan_history_id=null, domain_id=null, gf_tags=nu }, "targets": 2, }, + { + "render": function ( data, type, row ) { + return htmlEncode(data); + }, + "targets": 3, + }, { "render": function ( data, type, row ) { if (data){ @@ -269,7 +275,7 @@ function get_endpoints(project, scan_history_id=null, domain_id=null, gf_tags=nu }); } -function get_subdomain_changes(scan_history_id){ +function get_subdomain_changes(endpoint, scan_history_id){ $('#table-subdomain-changes').DataTable({ "drawCallback": function(settings, start, end, max, total, pre) { if (this.fnSettings().fnRecordsTotal() > 0) { @@ -297,7 +303,7 @@ function get_subdomain_changes(scan_history_id){ "destroy": true, "stripeClasses": [], 'serverSide': true, - "ajax": `/api/listSubdomainChanges/?scan_id=${scan_history_id}&format=datatables`, + "ajax": `${endpoint}?scan_id=${scan_history_id}&format=datatables`, "order": [[ 3, "desc" ]], "columns": [ {'data': 'name'}, @@ -345,6 +351,15 @@ function get_subdomain_changes(scan_history_id){ }, "targets": 0 }, + { + "render": function ( data, type, row ) { + if (data){ + return htmlEncode(data); + } + return ""; + }, + "targets": 1, + }, { "render": function ( data, type, row ) { // display badge based on http status @@ -387,7 +402,7 @@ function get_subdomain_changes(scan_history_id){ }); } -function get_endpoint_changes(scan_history_id){ +function get_endpoint_changes(endpoint, scan_history_id){ $('#table-endpoint-changes').DataTable({ "drawCallback": function(settings, start, end, max, total, pre) { if (this.fnSettings().fnRecordsTotal() > 0) { @@ -414,7 +429,7 @@ function get_endpoint_changes(scan_history_id){ "destroy": true, "stripeClasses": [], 'serverSide': true, - "ajax": `/api/listEndPointChanges/?scan_id=${scan_history_id}&format=datatables`, + "ajax": `${endpoint}?scan_id=${scan_history_id}&format=datatables`, "order": [[ 3, "desc" ]], "columns": [ {'data': 'http_url'}, @@ -481,7 +496,7 @@ function get_osint_users(scan_id){ }); } -function get_screenshot(scan_id){ +function get_screenshot(endpoint, scan_id){ var port_array = []; var service_array = []; var tech_array = []; @@ -493,7 +508,7 @@ function get_screenshot(scan_id){ gridzyElement.setAttribute('data-gridzy-desiredwidth', 350); gridzyElement.setAttribute('data-gridzySearchField', "#screenshot-search"); var interesting_badge = `Interesting`; - $.getJSON(`/api/listSubdomains/?scan_id=${scan_id}&no_page&only_screenshot`, function(data) { + $.getJSON(`${endpoint}?scan_id=${scan_id}&no_page&only_screenshot`, function(data) { $("#screenshot-loader").remove(); $("#filter-screenshot").show(); for (var subdomain in data) { @@ -847,14 +862,14 @@ function get_dorks(scan_id){ function get_dork_details(dork_type, scan_id){ // render tab modal - $('.modal-title').html('Dorking Results in category: ' + dork_type + ''); + $('#modal_dialog .modal-title').html('Dorking Results in category: ' + dork_type + ''); $('#modal_dialog').modal('show'); - $('.modal-text').empty(); $('#modal-footer').empty(); - $('.modal-text').append(``); + $('#modal_dialog .modal-text').empty(); $('#modal_dialog .modal-footer').empty(); + $('#modal_dialog .modal-text').append(``); $.getJSON(`/api/queryDorks/?scan_id=${scan_id}&type=${dork_type}&format=json`, function(data) { - $('#modal-loader').empty(); - $('#modal-content').append(`${data['dorks'].length} results found in this dork category.`); - $('#modal-content').append(`
    `); + $('#modal_dialog #modal-loader').empty(); + $('#modal_dialog .modal-text').append(`${data['dorks'].length} results found in this dork category.`); + $('#modal_dialog .modal-text').append(`
      `); for (dork in data['dorks']){ dork_obj = data['dorks'][dork]; $("#dork-detail-modal-ul").append(`
    • ${dork_obj['description']}
    • `); @@ -863,8 +878,8 @@ function get_dork_details(dork_type, scan_id){ } -function get_vulnerability_modal(scan_id=null, severity=null, subdomain_id=null, subdomain_name=null){ - var url = `/api/listVulnerability/?&format=json`; +function get_vulnerability_modal(endpoint_url, scan_id=null, severity=null, subdomain_id=null, subdomain_name=null){ + var url = `${endpoint_url}?&format=json`; if (scan_id) { url += `&scan_history=${scan_id}`; @@ -880,7 +895,7 @@ function get_vulnerability_modal(scan_id=null, severity=null, subdomain_id=null, // else{ - // url = `/api/listVulnerability/?severity=${severity}&subdomain_name=${subdomain_name}&format=json`; + // url = `${endpoint_url}?severity=${severity}&subdomain_name=${subdomain_name}&format=json`; // } switch (severity) { case 0: @@ -921,7 +936,7 @@ function get_vulnerability_modal(scan_id=null, severity=null, subdomain_id=null, }).then(response => response.json()).then(function(response) { swal.close(); $('#xl-modal_title').html(`${subdomain_name}`); - render_vulnerability_in_xl_modal(response['count'], subdomain_name, response['results']) + render_vulnerability_in_xl_modal(endpoint_url, response['count'], subdomain_name, response['results']) }); $('#modal_xl_scroll_dialog').modal('show'); $("body").tooltip({ @@ -931,18 +946,19 @@ function get_vulnerability_modal(scan_id=null, severity=null, subdomain_id=null, } -function get_endpoint_modal(project, scan_id, subdomain_id, subdomain_name){ +function get_endpoint_modal(endpoint_url, project, scan_id, subdomain_id, subdomain_name){ // This function will display a xl modal with datatable for displaying endpoints // associated with the subdomain $('#xl-modal-title').empty(); $('#xl-modal-content').empty(); $('#xl-modal-footer').empty(); + let url = ''; if (scan_id) { - url = `/api/listEndpoints/?project=${project}&scan_id=${scan_id}&subdomain_id=${subdomain_id}&format=json` + url = `${endpoint_url}?project=${project}&scan_id=${scan_id}&subdomain_id=${subdomain_id}&format=json` } else{ - url = `/api/listEndpoints/?project=${project}&subdomain_id=${subdomain_id}&format=json` + url = `${endpoint_url}?project=${project}&subdomain_id=${subdomain_id}&format=json` } Swal.fire({ @@ -960,7 +976,7 @@ function get_endpoint_modal(project, scan_id, subdomain_id, subdomain_name){ }).then(response => response.json()).then(function(response) { swal.close(); $('#xl-modal_title').html(`${subdomain_name}`); - render_endpoint_in_xlmodal(response['count'], subdomain_name, response['results']) + render_endpoint_in_xl_modal(response['count'], subdomain_name, response['results']) }); $('#modal_xl_scroll_dialog').modal('show'); $("body").tooltip({ @@ -969,18 +985,19 @@ function get_endpoint_modal(project, scan_id, subdomain_id, subdomain_name){ } -function get_directory_modal(scan_id=null, subdomain_id=null, subdomain_name=null){ +function get_directory_modal(endpoint_url, scan_id=null, subdomain_id=null, subdomain_name=null){ // This function will display a xl modal with datatable for displaying endpoints // associated with the subdomain $('#xl-modal-title').empty(); $('#xl-modal-content').empty(); $('#xl-modal-footer').empty(); + let url = ''; if (scan_id) { - url = `/api/listDirectories/?scan_id=${scan_id}&subdomain_id=${subdomain_id}&format=json` + url = `${endpoint_url}?scan_id=${scan_id}&subdomain_id=${subdomain_id}&format=json` } else{ - url = `/api/listDirectories/?subdomain_id=${subdomain_id}&format=json` + url = `${endpoint_url}?subdomain_id=${subdomain_id}&format=json` } Swal.fire({ @@ -1109,7 +1126,7 @@ $(".add-scan-history-todo").click(function(){ .then(function (response) { if (response.status) { Snackbar.show({ - text: 'Todo Added.', + text: 'To-do Added.', pos: 'top-right', duration: 1500, }); @@ -1125,12 +1142,12 @@ $(".add-scan-history-todo").click(function(){ }); -function add_note_for_subdomain(subdomain_id, subdomain_name){ +function add_note_for_subdomain(subdomain_id, subdomain_name, current_project){ $('#todo-modal-subdomain-name').html(subdomain_name); $("#subdomainTodoTitle").val(''); $("#subdomainTodoDescription").val(''); - $('#add-todo-subdomain-submit-button').attr('onClick', `add_note_for_subdomain_handler(${subdomain_id});`); + $('#add-todo-subdomain-submit-button').attr('onClick', `add_note_for_subdomain_handler(${subdomain_id}, '${current_project}');`); $('#addSubdomainTaskModal').modal('show'); @@ -1138,17 +1155,16 @@ function add_note_for_subdomain(subdomain_id, subdomain_name){ } -function add_note_for_subdomain_handler(subdomain_id){ +function add_note_for_subdomain_handler(subdomain_id, current_project){ var title = document.getElementById('subdomainTodoTitle').value; var description = document.getElementById('subdomainTodoDescription').value; - var project = document.querySelector('input[name="current_project"]').value; var scan_id = parseInt(document.getElementById('summary_identifier_val').value); data = { 'title': title, 'description': description, 'subdomain_id': subdomain_id, - 'project': project, + 'project': current_project, 'scan_history_id': scan_id } @@ -1164,7 +1180,7 @@ function add_note_for_subdomain_handler(subdomain_id){ if (response.status) { Snackbar.show({ - text: 'Todo Added.', + text: 'To-do Added.', pos: 'top-right', duration: 1500, }); @@ -1201,7 +1217,7 @@ function download_subdomains(scan_id=null, domain_id=null, domain_name=null){ $('.modal-title').html(count + ' Subdomains'); } - $('.modal-text').empty(); $('#modal-footer').empty(); + $('.modal-text').empty(); $('#modal_dialog .modal-footer').empty(); $('.modal-text').append(``); // query subdomains $.getJSON(url, function(data) { @@ -1209,17 +1225,17 @@ function download_subdomains(scan_id=null, domain_id=null, domain_name=null){ if (data['subdomains'].length) { $('#modal_dialog').modal('show'); $('.modal_count').html(data['subdomains'].length); - $('#modal-content').empty(); + $('#modal_dialog .modal-text').empty(); subdomains = ''; - $('#modal-content').append(``); + $('#modal_dialog .modal-text').append(``); for (subdomain in data['subdomains']){ subdomain_obj = data['subdomains'][subdomain]; subdomains += subdomain_obj['name'] + '\n' } $('#all_subdomains_text_area').append(subdomains); - $("#modal-footer").empty(); - $("#modal-footer").append(` Download Subdomains as txt`); - $("#modal-footer").append(` Copy Subdomains`); + $("#modal_dialog .modal-footer").empty(); + $("#modal_dialog .modal-footer").append(` Download Subdomains as txt`); + $("#modal_dialog .modal-footer").append(` Copy Subdomains`); } else{ swal.fire("No Subdomains", "Could not find any subdomains.", "warning", { @@ -1253,23 +1269,23 @@ function download_interesting_subdomains(project, scan_id=null, domain_id=null, else{ $('.modal-title').html( count + ' Interesting Subdomains'); } - $('.modal-text').empty(); $('#modal-footer').empty(); + $('.modal-text').empty(); $('#modal_dialog .modal-footer').empty(); // query subdomains $.getJSON(url, function(data) { swal.close() if (data.length) { $('#modal_dialog').modal('show'); $('.modal_count').html(data.length); - $('#modal-content').empty(); + $('#modal_dialog .modal-text').empty(); subdomains = ''; - $('#modal-content').append(``); + $('#modal_dialog .modal-text').append(``); for (subdomain in data){ subdomains += data[subdomain]['name'] + '\n' } $('#interesting_subdomains_text_area').append(subdomains); - $("#modal-footer").empty(); - $("#modal-footer").append(` Download Subdomains as txt`); - $("#modal-footer").append(` Copy Subdomains`); + $("#modal_dialog .modal-footer").empty(); + $("#modal_dialog .modal-footer").append(` Download Subdomains as txt`); + $("#modal_dialog .modal-footer").append(` Copy Subdomains`); } else{ swal.fire("No Interesting Subdomains", "Could not find any interesting subdomains.", "warning", { @@ -1302,23 +1318,23 @@ function download_interesting_endpoints(scan_id, domain_name){ else{ $('.modal-title').html( count + ' Interesting Endpoints'); } - $('.modal-text').empty(); $('#modal-footer').empty(); + $('.modal-text').empty(); $('#modal_dialog .modal-footer').empty(); // query subdomains $.getJSON(url, function(data) { swal.close(); if (data.length) { $('#modal_dialog').modal('show'); $('.modal_count').html(data.length); - $('#modal-content').empty(); + $('#modal_dialog .modal-text').empty(); endpoints = ''; - $('#modal-content').append(``); + $('#modal_dialog .modal-text').append(``); for (endpoint in data){ endpoints += data[endpoint]['http_url'] + '\n' } $('#interesting_endpoints_text_area').append(endpoints); - $("#modal-footer").empty(); - $("#modal-footer").append(` Download Endpoints as txt`); - $("#modal-footer").append(` Copy Endpoints`); + $("#modal_dialog .modal-footer").empty(); + $("#modal_dialog .modal-footer").append(` Download Endpoints as txt`); + $("#modal_dialog .modal-footer").append(` Copy Endpoints`); } else{ swal.fire("No Interesting Endpoints", "Could not find any interesting Endpoints.", "warning", { @@ -1353,24 +1369,24 @@ function download_important_subdomains(scan_id=null, domain_id=null, domain_name else{ $('.modal-title').html(count + ' Subdomains marked as important'); } - $('.modal-text').empty(); $('#modal-footer').empty(); + $('.modal-text').empty(); $('#modal_dialog .modal-footer').empty(); // query subdomains $.getJSON(url, function(data) { swal.close(); if (data['subdomains'].length) { $('#modal_dialog').modal('show'); $('.modal_count').html(data['subdomains'].length); - $('#modal-content').empty(); + $('#modal_dialog .modal-text').empty(); subdomains = ''; - $('#modal-content').append(``); + $('#modal_dialog .modal-text').append(``); for (subdomain in data['subdomains']){ subdomain_obj = data['subdomains'][subdomain]; subdomains += subdomain_obj['name'] + '\n' } $('#all_subdomains_text_area').append(subdomains); - $("#modal-footer").empty(); - $("#modal-footer").append(` Download Subdomains as txt`); - $("#modal-footer").append(` Copy Subdomains`); + $("#modal_dialog .modal-footer").empty(); + $("#modal_dialog .modal-footer").append(` Download Subdomains as txt`); + $("#modal_dialog .modal-footer").append(` Copy Subdomains`); } else{ swal.fire("No Important Endpoints", "No subdomains has been marked as important.", "warning", { @@ -1410,28 +1426,28 @@ function download_endpoints(scan_id=null, domain_id=null, domain_name='', patter else{ $('.modal-title').html(count + ' Endpoints'); } - $('.modal-text').empty(); $('#modal-footer').empty(); + $('.modal-text').empty(); $('#modal_dialog .modal-footer').empty(); // query subdomains $.getJSON(url, function(data) { swal.close(); $('#modal_dialog').modal('show'); $('.modal_count').html(data['endpoints'].length); - $('#modal-content').empty(); + $('#modal_dialog .modal-text').empty(); endpoints = ''; - $('#modal-content').append(``); + $('#modal_dialog .modal-text').append(``); for (endpoint in data['endpoints']){ endpoint_obj = data['endpoints'][endpoint]; endpoints += endpoint_obj['http_url'] + '\n' } $('#all_endpoints_text_area').append(endpoints); - $("#modal-footer").empty(); + $("#modal_dialog .modal-footer").empty(); if (domain_name) { - $("#modal-footer").append(` Download Endpoints as txt`); + $("#modal_dialog .modal-footer").append(` Download Endpoints as txt`); } else{ - $("#modal-footer").append(` Download Endpoints as txt`); + $("#modal_dialog .modal-footer").append(` Download Endpoints as txt`); } - $("#modal-footer").append(` Copy Endpoints`); + $("#modal_dialog .modal-footer").append(` Copy Endpoints`); }).fail(function(){ }); } @@ -1585,17 +1601,17 @@ function downloadSelectedSubdomains(domain_name){ if (response['status']) { $('#modal_dialog').modal('show'); $('.modal_count').html(response['results'].length); - $('#modal-content').empty(); + $('#modal_dialog .modal-text').empty(); subdomains = ''; - $('#modal-content').append(``); + $('#modal_dialog .modal-text').append(``); for (subdomain in response['results']){ subdomain_obj = response['results'][subdomain]; subdomains += subdomain_obj + '\n' } $('#selected_subdomains_text_area').append(subdomains); - $("#modal-footer").empty(); - $("#modal-footer").append(` Download Subdomains as txt`); - $("#modal-footer").append(` Copy Subdomains`); + $("#modal_dialog .modal-footer").empty(); + $("#modal_dialog .modal-footer").append(` Download Subdomains as txt`); + $("#modal_dialog .modal-footer").append(` Copy Subdomains`); } else{ Swal.fire({ @@ -1685,7 +1701,10 @@ function initiateMultipleSubscan(){ } -function detect_subdomain_cms(http_url, http_status){ +$(document).on('click', '.detect_subdomain_cms_link', function(){ + var url = $(this).data('cms-url'); + var http_status = $(this).data('http-status'); + var cmsDetectorUrl = $(this).data('url'); if (http_status == 0) { var message = `reNgine has earlier identified that this subdomain did not return any HTTP status and likely the subdomain is not alive. reNgine may not be able to detect any CMS, would you still like to continue?`; } @@ -1693,6 +1712,7 @@ function detect_subdomain_cms(http_url, http_status){ var message = `reNgine has earlier identified that this subdomain has HTTP status as ${http_status} and likely that reNgine will not detect any CMS, would you still like to continue?`; } + var cmsDetectorUrl = $(this).data('url'); if (http_status != 200 || http_status == 0) { Swal.fire({ showCancelButton: true, @@ -1702,11 +1722,11 @@ function detect_subdomain_cms(http_url, http_status){ confirmButtonText: 'Detect CMS', }).then((result) => { if (result.isConfirmed) { - cms_detector_api_call(http_url); + cms_detector_api_call(cmsDetectorUrl, url); } }); } else{ - cms_detector_api_call(http_url); + cms_detector_api_call(cmsDetectorUrl,url); } -} +}); diff --git a/web/startScan/templates/organization/schedule_scan_ui.html b/web/startScan/templates/organization/schedule_scan_ui.html index 931c0ae46..df64f1226 100644 --- a/web/startScan/templates/organization/schedule_scan_ui.html +++ b/web/startScan/templates/organization/schedule_scan_ui.html @@ -82,7 +82,7 @@

      Select Scan Engine

      {% if custom_engine_count == 0 %} {% endif %} {% include "startScan/_items/scanEngine_select.html" %} diff --git a/web/startScan/templates/organization/start_scan.html b/web/startScan/templates/organization/start_scan.html index 96c932832..44b407776 100644 --- a/web/startScan/templates/organization/start_scan.html +++ b/web/startScan/templates/organization/start_scan.html @@ -37,7 +37,7 @@

      Select Scan Engine

      {% if custom_engine_count == 0 %} {% endif %} {% include "startScan/_items/scanEngine_select.html" %} diff --git a/web/startScan/templates/startScan/detail_scan.html b/web/startScan/templates/startScan/detail_scan.html index b24d1bb86..67d725c50 100644 --- a/web/startScan/templates/startScan/detail_scan.html +++ b/web/startScan/templates/startScan/detail_scan.html @@ -1,7 +1,7 @@ {% extends 'base/base.html' %} {% load static %} {% load humanize %} -{% load custom_tags %} +{% load custom_filters %} {% load mathfilters %} {% block title %} Detailed Scan @@ -239,7 +239,7 @@
        {% for scan in most_recent_scans %}
      • - +
        {% if history.id == scan.id %} @@ -410,7 +410,7 @@
         
        - +

         Target Information

        @@ -461,7 +461,7 @@

        TXT Records

        {% if not history.domain.domain_info %} {% endif %}
        @@ -1543,7 +1543,7 @@

        \nc

        `; + return `
        \nc
        `; } else{ - return `
        \nc
        `; + return `
        \nc
        `; } }, "targets": 0, @@ -1658,7 +1658,7 @@

        ${cve.name.toUpperCase()}`; + cve_cwe_badge += `${cve.name.toUpperCase()}`; }); } if (row['cwe_ids']) { @@ -1730,8 +1730,8 @@

        `; @@ -1827,7 +1827,7 @@

        '; + tech_badge = '
        ' + parse_technology('{% url 'api:querySubdomains' %}', row['technologies'], "primary", scan_id={{scan_history_id}}, domain_id=null) + '
        '; } if(row['is_interesting']) { @@ -2214,7 +2214,7 @@

        ${row['endpoint_count']} `; + endpoint_count_badge = `${row['endpoint_count']} `; } if(row['waf'].length){ @@ -2227,12 +2227,12 @@

        ${row['info_count'] + row['low_count'] + row['high_count'] + row['medium_count'] + row['critical_count']} `; - info_badge = `${row['info_count']} Info`; - low_badge = `${row['low_count']} Low`; - medium_badge = `${row['medium_count']} Med`; - high_badge = `${row['high_count']} High`; - critical_badge = `${row['critical_count']} Critical`; + total_vuln_badge = `${row['info_count'] + row['low_count'] + row['high_count'] + row['medium_count'] + row['critical_count']} `; + info_badge = `${row['info_count']} Info`; + low_badge = `${row['low_count']} Low`; + medium_badge = `${row['medium_count']} Med`; + high_badge = `${row['high_count']} High`; + critical_badge = `${row['critical_count']} Critical`; vuln_count_badge = `
        ` + (row['info_count'] > 0? info_badge : '') + (row['low_count'] > 0? low_badge : '') + (row['medium_count'] > 0? medium_badge : '') + (row['high_count'] > 0? high_badge : '') + (row['critical_count'] > 0? critical_badge : '') + `
        `; vuln_count_badge = (row['info_count'] + row['low_count'] + row['high_count'] + row['medium_count'] + row['critical_count'] > 0? total_vuln_badge : '') + vuln_count_badge; @@ -2243,12 +2243,12 @@

        ${row['directories_count']} `; + directory_count_badge = `${row['directories_count']} `; } var subscan_count_badge = ''; if (row['subscan_count']){ - subscan_count_badge = `${row['subscan_count']} `; + subscan_count_badge = `${row['subscan_count']} `; } tech_badge += content_type; @@ -2304,10 +2304,10 @@

        ${value.address}` + ip_badge += `${value.address}` } else{ - ip_badge += `${value.address}` + ip_badge += `${value.address}` } }); return ip_badge; @@ -2327,7 +2327,7 @@

        ${port_obj['number']}/${port_obj['service_name']}` + port_badge += `${port_obj['number']}/${port_obj['service_name']}` } } return port_badge; @@ -2373,13 +2373,13 @@

        - + - + @@ -2438,6 +2438,10 @@

        $(document).ready(function() { const ps = new PerfectScrollbar(document.querySelector('.endpoint-search')); - get_endpoints('{{current_project.slug}}', null, null, null); + get_endpoints('{% url 'api:endpoints-list' %}', '{% url 'api:subdomains-list' %}', '{{current_project.slug}}', null, null, null); var filter_cols = [ 'end_http_status_filter_checkbox', 'end_page_title_filter_checkbox', diff --git a/web/startScan/templates/startScan/history.html b/web/startScan/templates/startScan/history.html index 4af7877e6..e246df2cb 100644 --- a/web/startScan/templates/startScan/history.html +++ b/web/startScan/templates/startScan/history.html @@ -59,7 +59,7 @@

        Filters

        @@ -80,6 +80,7 @@

        Filters

        Summary Scan Engine Used Last Scan + Initiated By Status Progress Action @@ -108,6 +109,9 @@

        Filters

        {{scan_history.start_scan_date|naturaltime}} + + {{scan_history.initiated_by.username}} + {% if scan_history.scan_status == -1 %} Pending @@ -120,6 +124,8 @@

        Filters

        Successful {% elif scan_history.scan_status == 3 %} Aborted +
        + Aborted by: {{scan_history.aborted_by}} {% else %} Unknown {% endif %} @@ -158,7 +164,7 @@

        Filters

        - View Results + View Results
        + - +
        @@ -349,6 +349,10 @@ subdomain_datatables.search($("#subdomains-search").val()).draw() ; }); + $('#load_important_subdomain_table_btn').click(function() { + subdomain_datatables.search('is_important=true').draw(); + }); + $('input[name=sub_http_status_filter_checkbox]').change(function() { if ($(this).is(':checked')) { subdomain_datatables.column(4).visible(true); diff --git a/web/startScan/templates/startScan/subscan_history.html b/web/startScan/templates/startScan/subscan_history.html index add9e21bd..9abca3013 100644 --- a/web/startScan/templates/startScan/subscan_history.html +++ b/web/startScan/templates/startScan/subscan_history.html @@ -97,7 +97,7 @@

        Filters

        {{ organization.name }} {% endfor %}
        - Recent Scan < svg> + Recent Scan < svg> {{ subscan.get_task_name_str }} @@ -126,7 +126,7 @@

        Filters

        - View Results + View Results
        diff --git a/web/startScan/templates/startScan/vulnerabilities.html b/web/startScan/templates/startScan/vulnerabilities.html index 65c339228..76717a0bd 100644 --- a/web/startScan/templates/startScan/vulnerabilities.html +++ b/web/startScan/templates/startScan/vulnerabilities.html @@ -1,7 +1,7 @@ {% extends 'base/base.html' %} {% load static %} {% load humanize %} -{% load custom_tags %} +{% load custom_filters %} {% block title %} All Vulnerabilities {% endblock title %} @@ -28,13 +28,13 @@ var is_vuln_grouping = false; var vuln_grouping_col = 3; {% if request.GET.domain %} - var vuln_ajax_url = '/api/listVulnerability/?project={{current_project.slug}}&format=datatables&domain={{request.GET.domain}}'; + var vuln_ajax_url = '{% url 'api:vulnerabilities-list' %}?project={{current_project.slug}}&format=datatables&domain={{request.GET.domain}}'; {% elif request.GET.vulnerability_name %} - var vuln_ajax_url = '/api/listVulnerability/?project={{current_project.slug}}&format=datatables&vulnerability_name={{request.GET.vulnerability_name}}'; + var vuln_ajax_url = '{% url 'api:vulnerabilities-list' %}?project={{current_project.slug}}&format=datatables&vulnerability_name={{request.GET.vulnerability_name}}'; {% elif request.GET.subdomain %} - var vuln_ajax_url = '/api/listVulnerability/?project={{current_project.slug}}&format=datatables&subdomain={{request.GET.subdomain}}'; + var vuln_ajax_url = '{% url 'api:vulnerabilities-list' %}?project={{current_project.slug}}&format=datatables&subdomain={{request.GET.subdomain}}'; {% else %} - var vuln_ajax_url = '/api/listVulnerability/?project={{current_project.slug}}&format=datatables'; + var vuln_ajax_url = '{% url 'api:vulnerabilities-list' %}?project={{current_project.slug}}&format=datatables'; {% endif %} var vulnerability_table = $('#vulnerability_results').DataTable({ "destroy": true, @@ -77,20 +77,13 @@ }, {"className": "text-center", "targets": []}, { - "render": function ( data, type, row ) { - if(row['open_status']){ - return `
        \nc
        `; - } - else{ - return `
        \nc
        `; - } - }, - "targets": 0, + "targets":0, "width":"15px", "className":"", "orderable":!1, render:function(e, a, t, n) { + return `
        \nc
        `; }, }, { "render": function ( data, type, row ) { if (data) { - return `  ${data.toUpperCase()}  `; + return `${data.toUpperCase()}`; } }, "targets": 1, @@ -98,7 +91,7 @@ { "render": function ( data, type, row ) { if (data) { - return `  ${data.toUpperCase()}  `; + return `${data.toUpperCase()}`; } }, "targets": 2, @@ -150,7 +143,7 @@ } if (row['cve_ids']) { row['cve_ids'].forEach(cve => { - cve_cwe_badge += `${cve.name.toUpperCase()}`; + cve_cwe_badge += `${cve.name.toUpperCase()}`; }); } if (row['cwe_ids']) { @@ -205,10 +198,10 @@ { "render": function ( data, type, row ) { if (data){ - return '  OPEN  ' + return 'OPEN' } else{ - return '  RESOLVED  ' + return 'RESOLVED' } }, "targets": 15, @@ -222,9 +215,9 @@
        `; } @@ -297,8 +290,7 @@ }); $('#vulnerability_results').on('click', 'tr' , function (e) { - console.log(e.target); - if ($(e.target).is('input[type="checkbox"]') || $(e.target).is('svg') || $(e.target).is('a') || $(e.target).is('th')) { + if ($(e.target).is('input[type="checkbox"]') || $(e.target).is('svg') || $(e.target).is('a') || $(e.target).is('th') || $(e.target).is('span')) { return; } var data = vulnerability_table.row(this).data(); diff --git a/web/startScan/tests.py b/web/startScan/tests.py deleted file mode 100644 index 7ce503c2d..000000000 --- a/web/startScan/tests.py +++ /dev/null @@ -1,3 +0,0 @@ -from django.test import TestCase - -# Create your tests here. diff --git a/web/startScan/tests/__init__.py b/web/startScan/tests/__init__.py new file mode 100644 index 000000000..cfbb5ef82 --- /dev/null +++ b/web/startScan/tests/__init__.py @@ -0,0 +1,2 @@ +from utils.test_base import * +from .test_start_scan import * diff --git a/web/startScan/tests/test_start_scan.py b/web/startScan/tests/test_start_scan.py new file mode 100644 index 000000000..fff4db232 --- /dev/null +++ b/web/startScan/tests/test_start_scan.py @@ -0,0 +1,227 @@ +""" +This file contains the test cases for the startScan views and models. +""" +import json +from unittest.mock import patch +from django.urls import reverse +from django.utils import timezone +from django.test import override_settings +from utils.test_base import BaseTestCase +from utils.test_utils import MockTemplate +from startScan.models import ScanHistory, Subdomain, EndPoint, Vulnerability, ScanActivity + +__all__ = [ + 'TestStartScanViews', + 'TestStartScanModels', +] + +class TestStartScanViews(BaseTestCase): + """Test cases for startScan views.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + + @override_settings(CELERY_TASK_ALWAYS_EAGER=True) + def test_start_scan_view(self): + """Test the start scan view.""" + data = { + 'domain_name': self.data_generator.domain.name, + 'scan_mode': self.data_generator.engine_type.id, + 'importSubdomainTextArea': "www.example.com\nmail.example.com", + 'outOfScopeSubdomainTextarea': "www.example.com\nmail.example.com", + 'filterPath': "www.example.com", + } + response = self.client.post(reverse('start_scan', kwargs={ + 'slug': self.data_generator.project.slug, + 'domain_id': self.data_generator.domain.id + }), data) + self.assertEqual(response.status_code, 302) + self.assertEqual(response.url, f"/scan/{self.data_generator.project.slug}/history") + + scan = ScanHistory.objects.latest('id') + self.assertEqual(scan.domain, self.data_generator.domain) + self.assertEqual(scan.scan_type.id, self.data_generator.engine_type.id) + + def test_scan_history_view(self): + """Test the scan history view.""" + response = self.client.get(reverse('scan_history', kwargs={ + 'slug': self.data_generator.project.slug, + })) + self.assertEqual(response.status_code, 200) + self.assertIn('scan_history', response.context) + + def test_detail_scan_view(self): + """Test the detail scan view.""" + response = self.client.get(reverse('detail_scan', kwargs={ + 'slug': self.data_generator.project.slug, + 'id': self.data_generator.scan_history.id + })) + self.assertEqual(response.status_code, 200) + #self.assertIn('scan_history', response.context) + + @patch('startScan.views.delete_scan') + def test_delete_scan_view(self, mock_delete_scan): + """Test the delete scan view.""" + mock_delete_scan.return_value = True + response = self.client.post(reverse('delete_scan', kwargs={ + 'slug': self.data_generator.project.slug, + 'id': self.data_generator.scan_history.id, + })) + self.assertEqual(response.status_code, 200) + self.assertEqual(json.loads(response.content), {'status': 'true'}) + + @patch('startScan.views.delete_scan') + @MockTemplate.mock_template('base/_items/top_bar.html') + def test_delete_scan_view_failure(self, mock_delete_scan): + """Test the delete scan view when deletion fails.""" + mock_delete_scan.return_value = False + response = self.client.post(reverse('delete_scan', kwargs={ + 'slug': self.data_generator.project.slug, + 'id': 999, + })) + self.assertEqual(response.status_code, 404) + + def test_stop_scan_view(self): + """Test the stop scan view.""" + response = self.client.post(reverse('stop_scan', kwargs={ + 'id': self.data_generator.scan_history.id, + 'slug': self.data_generator.project.slug, + })) + self.assertEqual(response.status_code, 200) + self.assertIn('status', json.loads(response.content)) + + def test_export_subdomains_view(self): + """Test the export subdomains view.""" + response = self.client.get(reverse('export_subdomains', kwargs={ + 'scan_id': self.data_generator.scan_history.id, + 'slug': self.data_generator.project.slug, + })) + self.assertEqual(response.status_code, 200) + self.assertEqual(response['Content-Type'], 'text/plain') + + def test_export_empty_subdomains_view(self): + """Test the export subdomains view when there are no subdomains.""" + Subdomain.objects.all().delete() + + response = self.client.get(reverse('export_subdomains', kwargs={ + 'scan_id': self.data_generator.scan_history.id, + 'slug': self.data_generator.project.slug, + })) + self.assertEqual(response.status_code, 200) + self.assertEqual(response['Content-Type'], 'text/plain') + self.assertEqual(response.content.decode(), '') + + def test_export_endpoints_view(self): + """Test the export endpoints view.""" + response = self.client.get(reverse('export_endpoints', kwargs={ + 'scan_id': self.data_generator.scan_history.id, + 'slug': self.data_generator.project.slug, + })) + self.assertEqual(response.status_code, 200) + self.assertEqual(response['Content-Type'], 'text/plain') + + def test_export_empty_endpoints_view(self): + """Test the export endpoints view when there are no endpoints.""" + # Delete all endpoints + EndPoint.objects.all().delete() + + response = self.client.get(reverse('export_endpoints', kwargs={ + 'scan_id': self.data_generator.scan_history.id, + 'slug': self.data_generator.project.slug, + })) + self.assertEqual(response.status_code, 200) + self.assertEqual(response['Content-Type'], 'text/plain') + self.assertEqual(response.content.decode(), '') + +class TestStartScanModels(BaseTestCase): + """Test cases for startScan models.""" + + def setUp(self): + """Set up test environment.""" + super().setUp() + self.data_generator.create_project_full() + + def test_scan_history_model(self): + """Test the ScanHistory model.""" + self.assertIsInstance(self.data_generator.scan_history, ScanHistory) + self.assertEqual(str(self.data_generator.scan_history), self.data_generator.domain.name) + + def test_scan_history_model_with_missing_fields(self): + """Test the ScanHistory model with missing fields.""" + minimal_scan_history = ScanHistory.objects.create( + domain=self.data_generator.domain, + scan_type=self.data_generator.engine_type, + start_scan_date=timezone.now(), + ) + self.assertIsInstance(minimal_scan_history, ScanHistory) + self.assertEqual(str(minimal_scan_history), f"{self.data_generator.domain.name}") + self.assertIsNone(minimal_scan_history.initiated_by) + self.assertIsNone(minimal_scan_history.tasks) + + def test_subdomain_model(self): + """Test the Subdomain model.""" + self.assertIsInstance(self.data_generator.subdomain, Subdomain) + self.assertEqual(str(self.data_generator.subdomain), self.data_generator.subdomain.name) + + def test_subdomain_model_with_missing_fields(self): + """Test the Subdomain model with missing fields.""" + minimal_subdomain = Subdomain.objects.create( + name='test.example.com', + target_domain=self.data_generator.domain + ) + self.assertIsInstance(minimal_subdomain, Subdomain) + self.assertEqual(str(minimal_subdomain), 'test.example.com') + self.assertIsNone(minimal_subdomain.http_url) + self.assertIsNone(minimal_subdomain.discovered_date) + + def test_endpoint_model(self): + """Test the EndPoint model.""" + self.assertIsInstance(self.data_generator.endpoint, EndPoint) + self.assertEqual(str(self.data_generator.endpoint), self.data_generator.endpoint.http_url) + + def test_endpoint_model_with_missing_fields(self): + """Test the EndPoint model with missing fields.""" + minimal_endpoint = EndPoint.objects.create( + target_domain=self.data_generator.domain, + http_url='http://test.example.com' + ) + self.assertIsInstance(minimal_endpoint, EndPoint) + self.assertEqual(str(minimal_endpoint), 'http://test.example.com') + self.assertIsNone(minimal_endpoint.response_time) + self.assertIsNone(minimal_endpoint.discovered_date) + + def test_vulnerability_model(self): + """Test the Vulnerability model.""" + self.assertIsInstance(self.data_generator.vulnerabilities[0], Vulnerability) + self.assertEqual(str(self.data_generator.vulnerabilities[0].name), self.data_generator.vulnerabilities[0].name) + + def test_vulnerability_model_with_missing_fields(self): + """Test the Vulnerability model with missing fields.""" + minimal_vulnerability = Vulnerability.objects.create( + name='Test Vulnerability', + target_domain=self.data_generator.domain, + severity=1 + ) + self.assertIsInstance(minimal_vulnerability, Vulnerability) + self.assertEqual(str(minimal_vulnerability.name), 'Test Vulnerability') + self.assertIsNone(minimal_vulnerability.source) + self.assertIsNone(minimal_vulnerability.description) + + def test_scan_activity_model(self): + """Test the ScanActivity model.""" + self.assertIsInstance(self.data_generator.scan_activity, ScanActivity) + self.assertEqual(str(self.data_generator.scan_activity), "Test Type") + + def test_scan_activity_model_with_missing_fields(self): + """Test the ScanActivity model with missing fields.""" + minimal_scan_activity = ScanActivity.objects.create( + scan_of=self.data_generator.scan_history, + name="Test Type", + time=timezone.now(), + status=1 + ) + self.assertIsInstance(minimal_scan_activity, ScanActivity) + self.assertEqual(minimal_scan_activity.name, "Test Type") + self.assertIsNone(minimal_scan_activity.error_message) diff --git a/web/startScan/urls.py b/web/startScan/urls.py index 31054f069..55a06b1b8 100644 --- a/web/startScan/urls.py +++ b/web/startScan/urls.py @@ -3,107 +3,107 @@ urlpatterns = [ path( - '/history/scan', - views.scan_history, - name="scan_history"), - path( - '/history/subscan', - views.subscan_history, - name="subscan_history"), - path( - '/scheduled/', - views.scheduled_scan_view, - name="scheduled_scan_view"), - path( - '/detail/', + '/', views.detail_scan, name='detail_scan'), path( - 'create_report/', + '//create_report', views.create_report, name='create_report'), path( - '/all/subdomains', - views.all_subdomains, - name='all_subdomains'), + '//delete', + views.delete_scan, + name='delete_scan'), path( - 'detail/vuln/', - views.detail_vuln_scan, - name='detail_vuln_scan'), + '/endpoints', + views.all_endpoints, + name='all_endpoints'), path( - '/detail/vuln', - views.detail_vuln_scan, - name='all_vulns'), + '//endpoints/export', + views.export_endpoints, + name='export_endpoints'), path( - 'visualise/', + '//stop', + views.stop_scan, + name='stop_scan'), + path( + '//subdomains/export', + views.export_subdomains, + name='export_subdomains'), + path( + '//urls/export', + views.export_urls, + name='export_http_urls'), + path( + '//visualise', views.visualise, name='visualise'), path( - '/detail/all/endpoint', - views.all_endpoints, - name='all_endpoints'), + '/history', + views.scan_history, + name="scan_history"), path( - '/start/', - views.start_scan_ui, - name='start_scan'), + '/multiple/start', + views.start_multiple_scan, + name='start_multiple_scan'), path( - '/schedule/target/', - views.schedule_scan, - name='schedule_scan'), + '/multiple/delete', + views.delete_scans, + name='delete_multiple_scans'), path( - '/schedule/organization/', + '/organization/schedule/', views.schedule_organization_scan, name='schedule_organization_scan'), path( - 'export/subdomains/', - views.export_subdomains, - name='export_subdomains'), - path( - 'export/endpoints/', - views.export_endpoints, - name='export_endpoints'), + '/organization/start/', + views.start_organization_scan, + name='start_organization_scan'), path( - 'export/urls/', - views.export_urls, - name='export_http_urls'), + '/target/start/', + views.start_scan_ui, + name='start_scan'), path( - 'delete/scan/', - views.delete_scan, - name='delete_scan'), + '/target/schedule/', + views.schedule_scan, + name='schedule_scan'), path( - 'stop/scan/', - views.stop_scan, - name='stop_scan'), + '/scheduled', + views.scheduled_scan_view, + name="scheduled_scan_view"), path( - 'delete/scheduled_task/', + '/scheduled_task/delete/', views.delete_scheduled_task, name='delete_scheduled_task'), path( - 'toggle/scheduled_task/', + '/scheduled_task/toggle/', views.change_scheduled_task_status, name='change_scheduled_task_status'), path( - 'toggle/vuln_status/', - views.change_vuln_status, - name='change_vuln_status'), - path( - '/start/multiple/', - views.start_multiple_scan, - name='start_multiple_scan'), - path( - '/start/organization/', - views.start_organization_scan, - name='start_organization_scan'), - path( - 'delete/scan_results/', + '/scan_results/delete', views.delete_all_scan_results, name='delete_all_scan_results'), path( - 'delete/screenshots/', + '/screenshots/delete', views.delete_all_screenshots, name='delete_all_screenshots'), path( - '/delete/multiple', - views.delete_scans, - name='delete_multiple_scans'), + '/subdomains', + views.all_subdomains, + name='all_subdomains'), + path( + '/subscan/history', + views.subscan_history, + name="subscan_history"), + path( + '/vulnerabilities', + views.detail_vuln_scan, + name='all_vulns'), + path( + '/vulnerability/', + views.detail_vuln_scan, + name='detail_vuln_scan'), + path( + '/vulnerability/toggle/', + views.change_vuln_status, + name='change_vuln_status'), ] diff --git a/web/startScan/views.py b/web/startScan/views.py index 4cd0e82b5..d83e06076 100644 --- a/web/startScan/views.py +++ b/web/startScan/views.py @@ -1,8 +1,9 @@ -import markdown +import markdown, json from celery import group +from pathlib import Path from weasyprint import HTML -from datetime import datetime +from datetime import datetime, timedelta from django.contrib import messages from django.db.models import Count from django.http import HttpResponse, HttpResponseRedirect, JsonResponse @@ -10,16 +11,17 @@ from django.template.loader import get_template from django.urls import reverse from django.utils import timezone -from django_celery_beat.models import (ClockedSchedule, IntervalSchedule, PeriodicTask) +from django_celery_beat.models import ClockedSchedule, IntervalSchedule, PeriodicTask from rolepermissions.decorators import has_permission_decorator from reNgine.celery import app -from reNgine.common_func import * -from reNgine.definitions import ABORTED_TASK, SUCCESS_TASK +from reNgine.common_func import logger, get_interesting_subdomains, create_scan_object, safe_int_cast +from reNgine.settings import RENGINE_RESULTS +from reNgine.definitions import ABORTED_TASK, SUCCESS_TASK, RUNNING_TASK, LIVE_SCAN, SCHEDULED_SCAN, PERM_INITATE_SCANS_SUBSCANS, PERM_MODIFY_SCAN_RESULTS, PERM_MODIFY_SCAN_REPORT, PERM_MODIFY_SYSTEM_CONFIGURATIONS, FOUR_OH_FOUR_URL from reNgine.tasks import create_scan_activity, initiate_scan, run_command -from scanEngine.models import EngineType -from startScan.models import * -from targetApp.models import * +from scanEngine.models import EngineType, VulnerabilityReportSetting +from startScan.models import ScanHistory, SubScan, Email, Employee, Subdomain, EndPoint, Vulnerability, VulnerabilityTags, IpAddress, CountryISO, ScanActivity, CveId, CweId +from targetApp.models import Domain, Organization def scan_history(request, slug): @@ -33,13 +35,12 @@ def subscan_history(request, slug): context = {'scan_history_active': 'active', "subscans": subscans} return render(request, 'startScan/subscan_history.html', context) - def detail_scan(request, id, slug): ctx = {} # Get scan objects scan = get_object_or_404(ScanHistory, id=id) - domain_id = scan.domain.id + domain_id = safe_int_cast( scan.domain.id) scan_engines = EngineType.objects.order_by('engine_name').all() recent_scans = ScanHistory.objects.filter(domain__id=domain_id) last_scans = ( @@ -237,13 +238,11 @@ def all_subdomains(request, slug): def detail_vuln_scan(request, slug, id=None): if id: history = get_object_or_404(ScanHistory, id=id) - history.filter(domain__project__slug=slug) context = {'scan_history_id': id, 'history': history} else: context = {'vuln_scan_active': 'true'} return render(request, 'startScan/vulnerabilities.html', context) - def all_endpoints(request, slug): context = { 'scan_history_active': 'active' @@ -267,10 +266,14 @@ def start_scan_ui(request, slug, domain_id): filterPath = '' # Get engine type - engine_id = request.POST['scan_mode'] + engine_id = safe_int_cast(request.POST['scan_mode']) # Create ScanHistory object - scan_history_id = create_scan_object(domain_id, engine_id) + scan_history_id = create_scan_object( + host_id=domain_id, + engine_id=engine_id, + initiated_by_id=request.user.id + ) scan = ScanHistory.objects.get(pk=scan_history_id) # Start the celery task @@ -279,10 +282,11 @@ def start_scan_ui(request, slug, domain_id): 'domain_id': domain.id, 'engine_id': engine_id, 'scan_type': LIVE_SCAN, - 'results_dir': '/usr/src/scan_results', + 'results_dir': RENGINE_RESULTS, 'imported_subdomains': subdomains_in, 'out_of_scope_subdomains': subdomains_out, - 'url_filter': filterPath + 'url_filter': filterPath, + 'initiated_by_id': request.user.id } initiate_scan.apply_async(kwargs=kwargs) scan.save() @@ -316,14 +320,18 @@ def start_multiple_scan(request, slug): if request.POST.get('scan_mode', 0): # if scan mode is available, then start the scan # get engine type - engine_id = request.POST['scan_mode'] + engine_id = safe_int_cast( request.POST['scan_mode']) list_of_domains = request.POST['list_of_domain_id'] grouped_scans = [] for domain_id in list_of_domains.split(","): # Start the celery task - scan_history_id = create_scan_object(domain_id, engine_id) + scan_history_id = create_scan_object( + host_id=domain_id, + engine_id=engine_id, + initiated_by_id=request.user.id + ) # domain = get_object_or_404(Domain, id=domain_id) kwargs = { @@ -331,7 +339,8 @@ def start_multiple_scan(request, slug): 'domain_id': domain_id, 'engine_id': engine_id, 'scan_type': LIVE_SCAN, - 'results_dir': '/usr/src/scan_results', + 'results_dir': RENGINE_RESULTS, + 'initiated_by_id': request.user.id # TODO: Add this to multiple scan view # 'imported_subdomains': subdomains_in, # 'out_of_scope_subdomains': subdomains_out @@ -379,7 +388,7 @@ def start_multiple_scan(request, slug): } return render(request, 'startScan/start_multiple_scan_ui.html', context) -def export_subdomains(request, scan_id): +def export_subdomains(request, slug, scan_id): subdomain_list = Subdomain.objects.filter(scan_history__id=scan_id) scan = ScanHistory.objects.get(id=scan_id) response_body = "" @@ -394,7 +403,7 @@ def export_subdomains(request, scan_id): return response -def export_endpoints(request, scan_id): +def export_endpoints(request, slug, scan_id): endpoint_list = EndPoint.objects.filter(scan_history__id=scan_id) scan = ScanHistory.objects.get(id=scan_id) response_body = "" @@ -409,7 +418,7 @@ def export_endpoints(request, scan_id): return response -def export_urls(request, scan_id): +def export_urls(request, slug, scan_id): urls_list = Subdomain.objects.filter(scan_history__id=scan_id) scan = ScanHistory.objects.get(id=scan_id) response_body = "" @@ -426,7 +435,7 @@ def export_urls(request, scan_id): @has_permission_decorator(PERM_MODIFY_SCAN_RESULTS, redirect_url=FOUR_OH_FOUR_URL) -def delete_scan(request, id): +def delete_scan(request, slug, id): obj = get_object_or_404(ScanHistory, id=id) if request.method == "POST": delete_dir = obj.results_dir @@ -449,7 +458,7 @@ def delete_scan(request, id): @has_permission_decorator(PERM_INITATE_SCANS_SUBSCANS, redirect_url=FOUR_OH_FOUR_URL) -def stop_scan(request, id): +def stop_scan(request, slug, id): if request.method == "POST": scan = get_object_or_404(ScanHistory, id=id) scan.scan_status = ABORTED_TASK @@ -527,29 +536,42 @@ def schedule_scan(request, host_id, slug): 'scan_history_id': 1, 'scan_type': SCHEDULED_SCAN, 'imported_subdomains': subdomains_in, - 'out_of_scope_subdomains': subdomains_out + 'out_of_scope_subdomains': subdomains_out, + 'initiated_by_id': request.user.id } - PeriodicTask.objects.create(interval=schedule, - name=task_name, - task='reNgine.tasks.initiate_scan', - kwargs=json.dumps(kwargs)) + PeriodicTask.objects.create( + interval=schedule, + name=task_name, + task='initiate_scan', + kwargs=json.dumps(kwargs) + ) elif scheduled_mode == 'clocked': schedule_time = request.POST['scheduled_time'] + timezone_offset = int(request.POST.get('timezone_offset', 0)) + # Convert received hour in UTC + local_time = datetime.strptime(schedule_time, '%Y-%m-%d %H:%M') + # Adjust hour to UTC + utc_time = local_time + timedelta(minutes=timezone_offset) + # Make hour "aware" in UTC + utc_time = timezone.make_aware(utc_time, timezone.utc) clock, _ = ClockedSchedule.objects.get_or_create( - clocked_time=schedule_time) + clocked_time=utc_time) kwargs = { 'scan_history_id': 0, 'domain_id': host_id, 'engine_id': engine.id, 'scan_type': SCHEDULED_SCAN, 'imported_subdomains': subdomains_in, - 'out_of_scope_subdomains': subdomains_out + 'out_of_scope_subdomains': subdomains_out, + 'initiated_by_id': request.user.id } - PeriodicTask.objects.create(clocked=clock, - one_off=True, - name=task_name, - task='reNgine.tasks.initiate_scan', - kwargs=json.dumps(kwargs)) + PeriodicTask.objects.create( + clocked=clock, + one_off=True, + name=task_name, + task='initiate_scan', + kwargs=json.dumps(kwargs) + ) messages.add_message( request, messages.INFO, @@ -586,7 +608,7 @@ def scheduled_scan_view(request, slug): @has_permission_decorator(PERM_MODIFY_SCAN_RESULTS, redirect_url=FOUR_OH_FOUR_URL) -def delete_scheduled_task(request, id): +def delete_scheduled_task(request, slug, id): task_object = get_object_or_404(PeriodicTask, id=id) if request.method == "POST": task_object.delete() @@ -605,7 +627,7 @@ def delete_scheduled_task(request, id): @has_permission_decorator(PERM_MODIFY_SCAN_RESULTS, redirect_url=FOUR_OH_FOUR_URL) -def change_scheduled_task_status(request, id): +def change_scheduled_task_status(request, slug, id): if request.method == 'POST': task = PeriodicTask.objects.get(id=id) task.enabled = not task.enabled @@ -613,7 +635,7 @@ def change_scheduled_task_status(request, id): return HttpResponse('') -def change_vuln_status(request, id): +def change_vuln_status(request, slug, id): if request.method == 'POST': vuln = Vulnerability.objects.get(id=id) vuln.open_status = not vuln.open_status @@ -621,32 +643,10 @@ def change_vuln_status(request, id): return HttpResponse('') -def create_scan_object(host_id, engine_id): - ''' - create task with pending status so that celery task will execute when - threads are free - ''' - # get current time - current_scan_time = timezone.now() - # fetch engine and domain object - engine = EngineType.objects.get(pk=engine_id) - domain = Domain.objects.get(pk=host_id) - scan = ScanHistory() - scan.scan_status = INITIATED_TASK - scan.domain = domain - scan.scan_type = engine - scan.start_scan_date = current_scan_time - scan.save() - # save last scan date for domain model - domain.start_scan_date = current_scan_time - domain.save() - return scan.id - - @has_permission_decorator(PERM_MODIFY_SYSTEM_CONFIGURATIONS, redirect_url=FOUR_OH_FOUR_URL) -def delete_all_scan_results(request): +def delete_all_scan_results(request, slug): if request.method == 'POST': - ScanHistory.objects.all().delete() + ScanHistory.objects.filter(project__slug=slug).delete() messageData = {'status': 'true'} messages.add_message( request, @@ -656,9 +656,11 @@ def delete_all_scan_results(request): @has_permission_decorator(PERM_MODIFY_SYSTEM_CONFIGURATIONS, redirect_url=FOUR_OH_FOUR_URL) -def delete_all_screenshots(request): +def delete_all_screenshots(request, slug): if request.method == 'POST': - run_command('rm -rf /usr/src/scan_results/*') + domains = Domain.objects.filter(project__slug=slug) + for domain in domains: + run_command(f'rm -rf {str(Path(RENGINE_RESULTS) / domain.name)}') messageData = {'status': 'true'} messages.add_message( request, @@ -680,11 +682,15 @@ def visualise(request, id): def start_organization_scan(request, id, slug): organization = get_object_or_404(Organization, id=id) if request.method == "POST": - engine_id = request.POST['scan_mode'] + engine_id = safe_int_cast( request.POST['scan_mode']) # Start Celery task for each organization's domains for domain in organization.get_domains(): - scan_history_id = create_scan_object(domain.id, engine_id) + scan_history_id = create_scan_object( + host_id=domain.id, + engine_id=engine_id, + initiated_by_id=request.user.id + ) scan = ScanHistory.objects.get(pk=scan_history_id) kwargs = { @@ -692,7 +698,8 @@ def start_organization_scan(request, id, slug): 'domain_id': domain.id, 'engine_id': engine_id, 'scan_type': LIVE_SCAN, - 'results_dir': '/usr/src/scan_results', + 'results_dir': RENGINE_RESULTS, + 'initiated_by_id': request.user.id, # TODO: Add this to multiple scan view # 'imported_subdomains': subdomains_in, # 'out_of_scope_subdomains': subdomains_out @@ -760,12 +767,13 @@ def schedule_organization_scan(request, slug, id): 'engine_id': engine.id, 'scan_history_id': 0, 'scan_type': SCHEDULED_SCAN, - 'imported_subdomains': None + 'imported_subdomains': None, + 'initiated_by_id': request.user.id }) PeriodicTask.objects.create( interval=schedule, name=task_name, - task='reNgine.tasks.initiate_scan', + task='initiate_scan', kwargs=_kwargs ) @@ -780,12 +788,13 @@ def schedule_organization_scan(request, slug, id): 'engine_id': engine.id, 'scan_history_id': 0, 'scan_type': LIVE_SCAN, - 'imported_subdomains': None + 'imported_subdomains': None, + 'initiated_by_id': request.user.id }) PeriodicTask.objects.create(clocked=clock, one_off=True, name=task_name, - task='reNgine.tasks.initiate_scan', + task='initiate_scan', kwargs=_kwargs ) @@ -796,7 +805,7 @@ def schedule_organization_scan(request, slug, id): messages.INFO, f'Scan started for {ndomains} domains in organization {organization.name}' ) - return HttpResponseRedirect(reverse('scheduled_scan_view', kwargs={'slug': slug, 'id': id})) + return HttpResponseRedirect(reverse('scheduled_scan_view', kwargs={'slug': slug})) # GET request engine = EngineType.objects @@ -839,7 +848,7 @@ def customize_report(request, id): @has_permission_decorator(PERM_MODIFY_SCAN_REPORT, redirect_url=FOUR_OH_FOUR_URL) -def create_report(request, id): +def create_report(request, slug, id): primary_color = '#FFB74D' secondary_color = '#212121' # get report type diff --git a/web/static/assets/js/app.min.js b/web/static/assets/js/app.min.js index d7719723a..30ceffaea 100644 --- a/web/static/assets/js/app.min.js +++ b/web/static/assets/js/app.min.js @@ -493,4 +493,3 @@ })(), Waves.init(), feather.replace(); -//# sourceMappingURL=app.min.js.map diff --git a/web/static/assets/js/vendor.min.js b/web/static/assets/js/vendor.min.js index 2b9577a68..b894a7164 100644 --- a/web/static/assets/js/vendor.min.js +++ b/web/static/assets/js/vendor.min.js @@ -1,2 +1 @@ !function(e,t){"use strict";"object"==typeof module&&"object"==typeof module.exports?module.exports=e.document?t(e,!0):function(e){if(!e.document)throw new Error("jQuery requires a window with a document");return t(e)}:t(e)}("undefined"!=typeof window?window:this,function(e,t){"use strict";var n=[],i=Object.getPrototypeOf,r=n.slice,o=n.flat?function(e){return n.flat.call(e)}:function(e){return n.concat.apply([],e)},s=n.push,a=n.indexOf,l={},c=l.toString,u=l.hasOwnProperty,h=u.toString,p=h.call(Object),f={},d=function(e){return"function"==typeof e&&"number"!=typeof e.nodeType&&"function"!=typeof e.item},y=function(e){return null!=e&&e===e.window},g=e.document,m={type:!0,src:!0,nonce:!0,noModule:!0};function v(e,t,n){var i,r,o=(n=n||g).createElement("script");if(o.text=e,t)for(i in m)(r=t[i]||t.getAttribute&&t.getAttribute(i))&&o.setAttribute(i,r);n.head.appendChild(o).parentNode.removeChild(o)}function x(e){return null==e?e+"":"object"==typeof e||"function"==typeof e?l[c.call(e)]||"object":typeof e}var b="3.6.0",w=function(e,t){return new w.fn.init(e,t)};function _(e){var t=!!e&&"length"in e&&e.length,n=x(e);return!d(e)&&!y(e)&&("array"===n||0===t||"number"==typeof t&&0+~]|"+P+")"+P+"*"),$=new RegExp(P+"|>"),U=new RegExp(W),X=new RegExp("^"+I+"$"),Y={ID:new RegExp("^#("+I+")"),CLASS:new RegExp("^\\.("+I+")"),TAG:new RegExp("^("+I+"|[*])"),ATTR:new RegExp("^"+R),PSEUDO:new RegExp("^"+W),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+P+"*(even|odd|(([+-]|)(\\d*)n|)"+P+"*(?:([+-]|)"+P+"*(\\d+)|))"+P+"*\\)|)","i"),bool:new RegExp("^(?:"+H+")$","i"),needsContext:new RegExp("^"+P+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+P+"*((?:-\\d)?\\d*)"+P+"*\\)|)(?=[^-]|$)","i")},Q=/HTML$/i,G=/^(?:input|select|textarea|button)$/i,K=/^h\d$/i,J=/^[^{]+\{\s*\[native \w/,Z=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,ee=/[+~]/,te=new RegExp("\\\\[\\da-fA-F]{1,6}"+P+"?|\\\\([^\\r\\n\\f])","g"),ne=function(e,t){var n="0x"+e.slice(1)-65536;return t||(n<0?String.fromCharCode(n+65536):String.fromCharCode(n>>10|55296,1023&n|56320))},ie=/([\0-\x1f\x7f]|^-?\d)|^-$|[^\0-\x1f\x7f-\uFFFF\w-]/g,re=function(e,t){return t?"\0"===e?"�":e.slice(0,-1)+"\\"+e.charCodeAt(e.length-1).toString(16)+" ":"\\"+e},oe=function(){p()},se=be(function(e){return!0===e.disabled&&"fieldset"===e.nodeName.toLowerCase()},{dir:"parentNode",next:"legend"});try{N.apply(M=D.call(w.childNodes),w.childNodes),M[w.childNodes.length].nodeType}catch(t){N={apply:M.length?function(e,t){j.apply(e,D.call(t))}:function(e,t){for(var n=e.length,i=0;e[n++]=t[i++];);e.length=n-1}}}function ae(e,t,i,r){var o,a,c,u,h,d,m,v=t&&t.ownerDocument,w=t?t.nodeType:9;if(i=i||[],"string"!=typeof e||!e||1!==w&&9!==w&&11!==w)return i;if(!r&&(p(t),t=t||f,y)){if(11!==w&&(h=Z.exec(e)))if(o=h[1]){if(9===w){if(!(c=t.getElementById(o)))return i;if(c.id===o)return i.push(c),i}else if(v&&(c=v.getElementById(o))&&x(t,c)&&c.id===o)return i.push(c),i}else{if(h[2])return N.apply(i,t.getElementsByTagName(e)),i;if((o=h[3])&&n.getElementsByClassName&&t.getElementsByClassName)return N.apply(i,t.getElementsByClassName(o)),i}if(n.qsa&&!O[e+" "]&&(!g||!g.test(e))&&(1!==w||"object"!==t.nodeName.toLowerCase())){if(m=e,v=t,1===w&&($.test(e)||F.test(e))){for((v=ee.test(e)&&me(t.parentNode)||t)===t&&n.scope||((u=t.getAttribute("id"))?u=u.replace(ie,re):t.setAttribute("id",u=b)),a=(d=s(e)).length;a--;)d[a]=(u?"#"+u:":scope")+" "+xe(d[a]);m=d.join(",")}try{return N.apply(i,v.querySelectorAll(m)),i}catch(t){O(e,!0)}finally{u===b&&t.removeAttribute("id")}}}return l(e.replace(V,"$1"),t,i,r)}function le(){var e=[];return function t(n,r){return e.push(n+" ")>i.cacheLength&&delete t[e.shift()],t[n+" "]=r}}function ce(e){return e[b]=!0,e}function ue(e){var t=f.createElement("fieldset");try{return!!e(t)}catch(e){return!1}finally{t.parentNode&&t.parentNode.removeChild(t),t=null}}function he(e,t){for(var n=e.split("|"),r=n.length;r--;)i.attrHandle[n[r]]=t}function pe(e,t){var n=t&&e,i=n&&1===e.nodeType&&1===t.nodeType&&e.sourceIndex-t.sourceIndex;if(i)return i;if(n)for(;n=n.nextSibling;)if(n===t)return-1;return e?1:-1}function fe(e){return function(t){return"input"===t.nodeName.toLowerCase()&&t.type===e}}function de(e){return function(t){var n=t.nodeName.toLowerCase();return("input"===n||"button"===n)&&t.type===e}}function ye(e){return function(t){return"form"in t?t.parentNode&&!1===t.disabled?"label"in t?"label"in t.parentNode?t.parentNode.disabled===e:t.disabled===e:t.isDisabled===e||t.isDisabled!==!e&&se(t)===e:t.disabled===e:"label"in t&&t.disabled===e}}function ge(e){return ce(function(t){return t=+t,ce(function(n,i){for(var r,o=e([],n.length,t),s=o.length;s--;)n[r=o[s]]&&(n[r]=!(i[r]=n[r]))})})}function me(e){return e&&void 0!==e.getElementsByTagName&&e}for(t in n=ae.support={},o=ae.isXML=function(e){var t=e&&e.namespaceURI,n=e&&(e.ownerDocument||e).documentElement;return!Q.test(t||n&&n.nodeName||"HTML")},p=ae.setDocument=function(e){var t,r,s=e?e.ownerDocument||e:w;return s!=f&&9===s.nodeType&&s.documentElement&&(d=(f=s).documentElement,y=!o(f),w!=f&&(r=f.defaultView)&&r.top!==r&&(r.addEventListener?r.addEventListener("unload",oe,!1):r.attachEvent&&r.attachEvent("onunload",oe)),n.scope=ue(function(e){return d.appendChild(e).appendChild(f.createElement("div")),void 0!==e.querySelectorAll&&!e.querySelectorAll(":scope fieldset div").length}),n.attributes=ue(function(e){return e.className="i",!e.getAttribute("className")}),n.getElementsByTagName=ue(function(e){return e.appendChild(f.createComment("")),!e.getElementsByTagName("*").length}),n.getElementsByClassName=J.test(f.getElementsByClassName),n.getById=ue(function(e){return d.appendChild(e).id=b,!f.getElementsByName||!f.getElementsByName(b).length}),n.getById?(i.filter.ID=function(e){var t=e.replace(te,ne);return function(e){return e.getAttribute("id")===t}},i.find.ID=function(e,t){if(void 0!==t.getElementById&&y){var n=t.getElementById(e);return n?[n]:[]}}):(i.filter.ID=function(e){var t=e.replace(te,ne);return function(e){var n=void 0!==e.getAttributeNode&&e.getAttributeNode("id");return n&&n.value===t}},i.find.ID=function(e,t){if(void 0!==t.getElementById&&y){var n,i,r,o=t.getElementById(e);if(o){if((n=o.getAttributeNode("id"))&&n.value===e)return[o];for(r=t.getElementsByName(e),i=0;o=r[i++];)if((n=o.getAttributeNode("id"))&&n.value===e)return[o]}return[]}}),i.find.TAG=n.getElementsByTagName?function(e,t){return void 0!==t.getElementsByTagName?t.getElementsByTagName(e):n.qsa?t.querySelectorAll(e):void 0}:function(e,t){var n,i=[],r=0,o=t.getElementsByTagName(e);if("*"===e){for(;n=o[r++];)1===n.nodeType&&i.push(n);return i}return o},i.find.CLASS=n.getElementsByClassName&&function(e,t){if(void 0!==t.getElementsByClassName&&y)return t.getElementsByClassName(e)},m=[],g=[],(n.qsa=J.test(f.querySelectorAll))&&(ue(function(e){var t;d.appendChild(e).innerHTML="",e.querySelectorAll("[msallowcapture^='']").length&&g.push("[*^$]="+P+"*(?:''|\"\")"),e.querySelectorAll("[selected]").length||g.push("\\["+P+"*(?:value|"+H+")"),e.querySelectorAll("[id~="+b+"-]").length||g.push("~="),(t=f.createElement("input")).setAttribute("name",""),e.appendChild(t),e.querySelectorAll("[name='']").length||g.push("\\["+P+"*name"+P+"*="+P+"*(?:''|\"\")"),e.querySelectorAll(":checked").length||g.push(":checked"),e.querySelectorAll("a#"+b+"+*").length||g.push(".#.+[+~]"),e.querySelectorAll("\\\f"),g.push("[\\r\\n\\f]")}),ue(function(e){e.innerHTML="";var t=f.createElement("input");t.setAttribute("type","hidden"),e.appendChild(t).setAttribute("name","D"),e.querySelectorAll("[name=d]").length&&g.push("name"+P+"*[*^$|!~]?="),2!==e.querySelectorAll(":enabled").length&&g.push(":enabled",":disabled"),d.appendChild(e).disabled=!0,2!==e.querySelectorAll(":disabled").length&&g.push(":enabled",":disabled"),e.querySelectorAll("*,:x"),g.push(",.*:")})),(n.matchesSelector=J.test(v=d.matches||d.webkitMatchesSelector||d.mozMatchesSelector||d.oMatchesSelector||d.msMatchesSelector))&&ue(function(e){n.disconnectedMatch=v.call(e,"*"),v.call(e,"[s!='']:x"),m.push("!=",W)}),g=g.length&&new RegExp(g.join("|")),m=m.length&&new RegExp(m.join("|")),t=J.test(d.compareDocumentPosition),x=t||J.test(d.contains)?function(e,t){var n=9===e.nodeType?e.documentElement:e,i=t&&t.parentNode;return e===i||!(!i||1!==i.nodeType||!(n.contains?n.contains(i):e.compareDocumentPosition&&16&e.compareDocumentPosition(i)))}:function(e,t){if(t)for(;t=t.parentNode;)if(t===e)return!0;return!1},S=t?function(e,t){if(e===t)return h=!0,0;var i=!e.compareDocumentPosition-!t.compareDocumentPosition;return i||(1&(i=(e.ownerDocument||e)==(t.ownerDocument||t)?e.compareDocumentPosition(t):1)||!n.sortDetached&&t.compareDocumentPosition(e)===i?e==f||e.ownerDocument==w&&x(w,e)?-1:t==f||t.ownerDocument==w&&x(w,t)?1:u?z(u,e)-z(u,t):0:4&i?-1:1)}:function(e,t){if(e===t)return h=!0,0;var n,i=0,r=e.parentNode,o=t.parentNode,s=[e],a=[t];if(!r||!o)return e==f?-1:t==f?1:r?-1:o?1:u?z(u,e)-z(u,t):0;if(r===o)return pe(e,t);for(n=e;n=n.parentNode;)s.unshift(n);for(n=t;n=n.parentNode;)a.unshift(n);for(;s[i]===a[i];)i++;return i?pe(s[i],a[i]):s[i]==w?-1:a[i]==w?1:0}),f},ae.matches=function(e,t){return ae(e,null,null,t)},ae.matchesSelector=function(e,t){if(p(e),n.matchesSelector&&y&&!O[t+" "]&&(!m||!m.test(t))&&(!g||!g.test(t)))try{var i=v.call(e,t);if(i||n.disconnectedMatch||e.document&&11!==e.document.nodeType)return i}catch(e){O(t,!0)}return 0":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace(te,ne),e[3]=(e[3]||e[4]||e[5]||"").replace(te,ne),"~="===e[2]&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),"nth"===e[1].slice(0,3)?(e[3]||ae.error(e[0]),e[4]=+(e[4]?e[5]+(e[6]||1):2*("even"===e[3]||"odd"===e[3])),e[5]=+(e[7]+e[8]||"odd"===e[3])):e[3]&&ae.error(e[0]),e},PSEUDO:function(e){var t,n=!e[6]&&e[2];return Y.CHILD.test(e[0])?null:(e[3]?e[2]=e[4]||e[5]||"":n&&U.test(n)&&(t=s(n,!0))&&(t=n.indexOf(")",n.length-t)-n.length)&&(e[0]=e[0].slice(0,t),e[2]=n.slice(0,t)),e.slice(0,3))}},filter:{TAG:function(e){var t=e.replace(te,ne).toLowerCase();return"*"===e?function(){return!0}:function(e){return e.nodeName&&e.nodeName.toLowerCase()===t}},CLASS:function(e){var t=A[e+" "];return t||(t=new RegExp("(^|"+P+")"+e+"("+P+"|$)"))&&A(e,function(e){return t.test("string"==typeof e.className&&e.className||void 0!==e.getAttribute&&e.getAttribute("class")||"")})},ATTR:function(e,t,n){return function(i){var r=ae.attr(i,e);return null==r?"!="===t:!t||(r+="","="===t?r===n:"!="===t?r!==n:"^="===t?n&&0===r.indexOf(n):"*="===t?n&&-1:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i;function C(e,t,n){return d(t)?w.grep(e,function(e,i){return!!t.call(e,i,e)!==n}):t.nodeType?w.grep(e,function(e){return e===t!==n}):"string"!=typeof t?w.grep(e,function(e){return-1)[^>]*|#([\w-]+))$/;(w.fn.init=function(e,t,n){var i,r;if(!e)return this;if(n=n||M,"string"==typeof e){if(!(i="<"===e[0]&&">"===e[e.length-1]&&3<=e.length?[null,e,null]:L.exec(e))||!i[1]&&t)return!t||t.jquery?(t||n).find(e):this.constructor(t).find(e);if(i[1]){if(t=t instanceof w?t[0]:t,w.merge(this,w.parseHTML(i[1],t&&t.nodeType?t.ownerDocument||t:g,!0)),S.test(i[1])&&w.isPlainObject(t))for(i in t)d(this[i])?this[i](t[i]):this.attr(i,t[i]);return this}return(r=g.getElementById(i[2]))&&(this[0]=r,this.length=1),this}return e.nodeType?(this[0]=e,this.length=1,this):d(e)?void 0!==n.ready?n.ready(e):e(w):w.makeArray(e,this)}).prototype=w.fn,M=w(g);var j=/^(?:parents|prev(?:Until|All))/,N={children:!0,contents:!0,next:!0,prev:!0};function D(e,t){for(;(e=e[t])&&1!==e.nodeType;);return e}w.fn.extend({has:function(e){var t=w(e,this),n=t.length;return this.filter(function(){for(var e=0;e\x20\t\r\n\f]*)/i,de=/^$|^module$|\/(?:java|ecma)script/i;ue=g.createDocumentFragment().appendChild(g.createElement("div")),(he=g.createElement("input")).setAttribute("type","radio"),he.setAttribute("checked","checked"),he.setAttribute("name","t"),ue.appendChild(he),f.checkClone=ue.cloneNode(!0).cloneNode(!0).lastChild.checked,ue.innerHTML="",f.noCloneChecked=!!ue.cloneNode(!0).lastChild.defaultValue,ue.innerHTML="",f.option=!!ue.lastChild;var ye={thead:[1,"","
        "],col:[2,"","
        "],tr:[2,"","
        "],td:[3,"","
        "],_default:[0,"",""]};function ge(e,t){var n;return n=void 0!==e.getElementsByTagName?e.getElementsByTagName(t||"*"):void 0!==e.querySelectorAll?e.querySelectorAll(t||"*"):[],void 0===t||t&&O(e,t)?w.merge([e],n):n}function me(e,t){for(var n=0,i=e.length;n",""]);var ve=/<|&#?\w+;/;function xe(e,t,n,i,r){for(var o,s,a,l,c,u,h=t.createDocumentFragment(),p=[],f=0,d=e.length;f\s*$/g;function Ce(e,t){return O(e,"table")&&O(11!==t.nodeType?t:t.firstChild,"tr")&&w(e).children("tbody")[0]||e}function Me(e){return e.type=(null!==e.getAttribute("type"))+"/"+e.type,e}function Le(e){return"true/"===(e.type||"").slice(0,5)?e.type=e.type.slice(5):e.removeAttribute("type"),e}function je(e,t){var n,i,r,o,s,a;if(1===t.nodeType){if(Q.hasData(e)&&(a=Q.get(e).events))for(r in Q.remove(t,"handle events"),a)for(n=0,i=a[r].length;n").attr(e.scriptAttrs||{}).prop({charset:e.scriptCharset,src:e.url}).on("load error",n=function(e){t.remove(),n=null,e&&r("error"===e.type?404:200,e.type)}),g.head.appendChild(t[0])},abort:function(){n&&n()}}});var Bt,Ft=[],$t=/(=)\?(?=&|$)|\?\?/;w.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var e=Ft.pop()||w.expando+"_"+wt.guid++;return this[e]=!0,e}}),w.ajaxPrefilter("json jsonp",function(t,n,i){var r,o,s,a=!1!==t.jsonp&&($t.test(t.url)?"url":"string"==typeof t.data&&0===(t.contentType||"").indexOf("application/x-www-form-urlencoded")&&$t.test(t.data)&&"data");if(a||"jsonp"===t.dataTypes[0])return r=t.jsonpCallback=d(t.jsonpCallback)?t.jsonpCallback():t.jsonpCallback,a?t[a]=t[a].replace($t,"$1"+r):!1!==t.jsonp&&(t.url+=(_t.test(t.url)?"&":"?")+t.jsonp+"="+r),t.converters["script json"]=function(){return s||w.error(r+" was not called"),s[0]},t.dataTypes[0]="json",o=e[r],e[r]=function(){s=arguments},i.always(function(){void 0===o?w(e).removeProp(r):e[r]=o,t[r]&&(t.jsonpCallback=n.jsonpCallback,Ft.push(r)),s&&d(o)&&o(s[0]),s=o=void 0}),"script"}),f.createHTMLDocument=((Bt=g.implementation.createHTMLDocument("").body).innerHTML="
        ",2===Bt.childNodes.length),w.parseHTML=function(e,t,n){return"string"!=typeof e?[]:("boolean"==typeof t&&(n=t,t=!1),t||(f.createHTMLDocument?((i=(t=g.implementation.createHTMLDocument("")).createElement("base")).href=g.location.href,t.head.appendChild(i)):t=g),o=!n&&[],(r=S.exec(e))?[t.createElement(r[1])]:(r=xe([e],t,o),o&&o.length&&w(o).remove(),w.merge([],r.childNodes)));var i,r,o},w.fn.load=function(e,t,n){var i,r,o,s=this,a=e.indexOf(" ");return-1").append(w.parseHTML(e)).find(i):e)}).always(n&&function(e,t){s.each(function(){n.apply(this,o||[e.responseText,t,e])})}),this},w.expr.pseudos.animated=function(e){return w.grep(w.timers,function(t){return e===t.elem}).length},w.offset={setOffset:function(e,t,n){var i,r,o,s,a,l,c=w.css(e,"position"),u=w(e),h={};"static"===c&&(e.style.position="relative"),a=u.offset(),o=w.css(e,"top"),l=w.css(e,"left"),("absolute"===c||"fixed"===c)&&-1<(o+l).indexOf("auto")?(s=(i=u.position()).top,r=i.left):(s=parseFloat(o)||0,r=parseFloat(l)||0),d(t)&&(t=t.call(e,n,w.extend({},a))),null!=t.top&&(h.top=t.top-a.top+s),null!=t.left&&(h.left=t.left-a.left+r),"using"in t?t.using.call(e,h):u.css(h)}},w.fn.extend({offset:function(e){if(arguments.length)return void 0===e?this:this.each(function(t){w.offset.setOffset(this,e,t)});var t,n,i=this[0];return i?i.getClientRects().length?(t=i.getBoundingClientRect(),n=i.ownerDocument.defaultView,{top:t.top+n.pageYOffset,left:t.left+n.pageXOffset}):{top:0,left:0}:void 0},position:function(){if(this[0]){var e,t,n,i=this[0],r={top:0,left:0};if("fixed"===w.css(i,"position"))t=i.getBoundingClientRect();else{for(t=this.offset(),n=i.ownerDocument,e=i.offsetParent||n.documentElement;e&&(e===n.body||e===n.documentElement)&&"static"===w.css(e,"position");)e=e.parentNode;e&&e!==i&&1===e.nodeType&&((r=w(e).offset()).top+=w.css(e,"borderTopWidth",!0),r.left+=w.css(e,"borderLeftWidth",!0))}return{top:t.top-r.top-w.css(i,"marginTop",!0),left:t.left-r.left-w.css(i,"marginLeft",!0)}}},offsetParent:function(){return this.map(function(){for(var e=this.offsetParent;e&&"static"===w.css(e,"position");)e=e.offsetParent;return e||ie})}}),w.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(e,t){var n="pageYOffset"===t;w.fn[e]=function(i){return V(this,function(e,i,r){var o;if(y(e)?o=e:9===e.nodeType&&(o=e.defaultView),void 0===r)return o?o[t]:e[i];o?o.scrollTo(n?o.pageXOffset:r,n?r:o.pageYOffset):e[i]=r},e,i,arguments.length)}}),w.each(["top","left"],function(e,t){w.cssHooks[t]=We(f.pixelPosition,function(e,n){if(n)return n=Re(e,t),ze.test(n)?w(e).position()[t]+"px":n})}),w.each({Height:"height",Width:"width"},function(e,t){w.each({padding:"inner"+e,content:t,"":"outer"+e},function(n,i){w.fn[i]=function(r,o){var s=arguments.length&&(n||"boolean"!=typeof r),a=n||(!0===r||!0===o?"margin":"border");return V(this,function(t,n,r){var o;return y(t)?0===i.indexOf("outer")?t["inner"+e]:t.document.documentElement["client"+e]:9===t.nodeType?(o=t.documentElement,Math.max(t.body["scroll"+e],o["scroll"+e],t.body["offset"+e],o["offset"+e],o["client"+e])):void 0===r?w.css(t,n,a):w.style(t,n,r,a)},t,s?r:void 0,s)}})}),w.each(["ajaxStart","ajaxStop","ajaxComplete","ajaxError","ajaxSuccess","ajaxSend"],function(e,t){w.fn[t]=function(e){return this.on(t,e)}}),w.fn.extend({bind:function(e,t,n){return this.on(e,null,t,n)},unbind:function(e,t){return this.off(e,null,t)},delegate:function(e,t,n,i){return this.on(t,e,n,i)},undelegate:function(e,t,n){return 1===arguments.length?this.off(e,"**"):this.off(t,e||"**",n)},hover:function(e,t){return this.mouseenter(e).mouseleave(t||e)}}),w.each("blur focus focusin focusout resize scroll click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup contextmenu".split(" "),function(e,t){w.fn[t]=function(e,n){return 0{let t=e.getAttribute("data-bs-target");if(!t||"#"===t){let n=e.getAttribute("href");if(!n||!n.includes("#")&&!n.startsWith("."))return null;n.includes("#")&&!n.startsWith("#")&&(n="#"+n.split("#")[1]),t=n&&"#"!==n?n.trim():null}return t},t=t=>{const n=e(t);return n&&document.querySelector(n)?n:null},n=t=>{const n=e(t);return n?document.querySelector(n):null},i=e=>{e.dispatchEvent(new Event("transitionend"))},r=e=>!(!e||"object"!=typeof e)&&(void 0!==e.jquery&&(e=e[0]),void 0!==e.nodeType),o=e=>r(e)?e.jquery?e[0]:e:"string"==typeof e&&e.length>0?document.querySelector(e):null,s=(e,t,n)=>{Object.keys(n).forEach(i=>{const o=n[i],s=t[i],a=s&&r(s)?"element":null==(l=s)?""+l:{}.toString.call(l).match(/\s([a-z]+)/i)[1].toLowerCase();var l;if(!new RegExp(o).test(a))throw new TypeError(`${e.toUpperCase()}: Option "${i}" provided type "${a}" but expected type "${o}".`)})},a=e=>!(!r(e)||0===e.getClientRects().length)&&"visible"===getComputedStyle(e).getPropertyValue("visibility"),l=e=>!e||e.nodeType!==Node.ELEMENT_NODE||!!e.classList.contains("disabled")||(void 0!==e.disabled?e.disabled:e.hasAttribute("disabled")&&"false"!==e.getAttribute("disabled")),c=e=>{if(!document.documentElement.attachShadow)return null;if("function"==typeof e.getRootNode){const t=e.getRootNode();return t instanceof ShadowRoot?t:null}return e instanceof ShadowRoot?e:e.parentNode?c(e.parentNode):null},u=()=>{},h=e=>{e.offsetHeight},p=()=>{const{jQuery:e}=window;return e&&!document.body.hasAttribute("data-bs-no-jquery")?e:null},f=[],d=()=>"rtl"===document.documentElement.dir,y=e=>{var t;t=(()=>{const t=p();if(t){const n=e.NAME,i=t.fn[n];t.fn[n]=e.jQueryInterface,t.fn[n].Constructor=e,t.fn[n].noConflict=(()=>(t.fn[n]=i,e.jQueryInterface))}}),"loading"===document.readyState?(f.length||document.addEventListener("DOMContentLoaded",()=>{f.forEach(e=>e())}),f.push(t)):t()},g=e=>{"function"==typeof e&&e()},m=(e,t,n=!0)=>{if(!n)return void g(e);const r=(e=>{if(!e)return 0;let{transitionDuration:t,transitionDelay:n}=window.getComputedStyle(e);const i=Number.parseFloat(t),r=Number.parseFloat(n);return i||r?(t=t.split(",")[0],n=n.split(",")[0],1e3*(Number.parseFloat(t)+Number.parseFloat(n))):0})(t)+5;let o=!1;const s=({target:n})=>{n===t&&(o=!0,t.removeEventListener("transitionend",s),g(e))};t.addEventListener("transitionend",s),setTimeout(()=>{o||i(t)},r)},v=(e,t,n,i)=>{let r=e.indexOf(t);if(-1===r)return e[!n&&i?e.length-1:0];const o=e.length;return r+=n?1:-1,i&&(r=(r+o)%o),e[Math.max(0,Math.min(r,o-1))]},x=/[^.]*(?=\..*)\.|.*/,b=/\..*/,w=/::\d+$/,_={};let E=1;const A={mouseenter:"mouseover",mouseleave:"mouseout"},T=/^(mouseenter|mouseleave)/i,k=new Set(["click","dblclick","mouseup","mousedown","contextmenu","mousewheel","DOMMouseScroll","mouseover","mouseout","mousemove","selectstart","selectend","keydown","keypress","keyup","orientationchange","touchstart","touchmove","touchend","touchcancel","pointerdown","pointermove","pointerup","pointerleave","pointercancel","gesturestart","gesturechange","gestureend","focus","blur","change","reset","select","submit","focusin","focusout","load","unload","beforeunload","resize","move","DOMContentLoaded","readystatechange","error","abort","scroll"]);function O(e,t){return t&&`${t}::${E++}`||e.uidEvent||E++}function S(e){const t=O(e);return e.uidEvent=t,_[t]=_[t]||{},_[t]}function C(e,t,n=null){const i=Object.keys(e);for(let r=0,o=i.length;r(function(t){if(!t.relatedTarget||t.relatedTarget!==t.delegateTarget&&!t.delegateTarget.contains(t.relatedTarget))return e.call(this,t)});i?i=e(i):n=e(n)}const[o,s,a]=M(t,n,i),l=S(e),c=l[a]||(l[a]={}),u=C(c,s,o?n:null);if(u)return void(u.oneOff=u.oneOff&&r);const h=O(s,t.replace(x,"")),p=o?function(e,t,n){return function i(r){const o=e.querySelectorAll(t);for(let{target:s}=r;s&&s!==this;s=s.parentNode)for(let a=o.length;a--;)if(o[a]===s)return r.delegateTarget=s,i.oneOff&&D.off(e,r.type,t,n),n.apply(s,[r]);return null}}(e,n,i):function(e,t){return function n(i){return i.delegateTarget=e,n.oneOff&&D.off(e,i.type,t),t.apply(e,[i])}}(e,n);p.delegationSelector=o?n:null,p.originalHandler=s,p.oneOff=r,p.uidEvent=h,c[h]=p,e.addEventListener(a,p,o)}function j(e,t,n,i,r){const o=C(t[n],i,r);o&&(e.removeEventListener(n,o,Boolean(r)),delete t[n][o.uidEvent])}function N(e){return e=e.replace(b,""),A[e]||e}const D={on(e,t,n,i){L(e,t,n,i,!1)},one(e,t,n,i){L(e,t,n,i,!0)},off(e,t,n,i){if("string"!=typeof t||!e)return;const[r,o,s]=M(t,n,i),a=s!==t,l=S(e),c=t.startsWith(".");if(void 0!==o){if(!l||!l[s])return;return void j(e,l,s,o,r?n:null)}c&&Object.keys(l).forEach(n=>{!function(e,t,n,i){const r=t[n]||{};Object.keys(r).forEach(o=>{if(o.includes(i)){const i=r[o];j(e,t,n,i.originalHandler,i.delegationSelector)}})}(e,l,n,t.slice(1))});const u=l[s]||{};Object.keys(u).forEach(n=>{const i=n.replace(w,"");if(!a||t.includes(i)){const t=u[n];j(e,l,s,t.originalHandler,t.delegationSelector)}})},trigger(e,t,n){if("string"!=typeof t||!e)return null;const i=p(),r=N(t),o=t!==r,s=k.has(r);let a,l=!0,c=!0,u=!1,h=null;return o&&i&&(a=i.Event(t,n),i(e).trigger(a),l=!a.isPropagationStopped(),c=!a.isImmediatePropagationStopped(),u=a.isDefaultPrevented()),s?(h=document.createEvent("HTMLEvents")).initEvent(r,l,!0):h=new CustomEvent(t,{bubbles:l,cancelable:!0}),void 0!==n&&Object.keys(n).forEach(e=>{Object.defineProperty(h,e,{get:()=>n[e]})}),u&&h.preventDefault(),c&&e.dispatchEvent(h),h.defaultPrevented&&void 0!==a&&a.preventDefault(),h}},z=new Map;var H={set(e,t,n){z.has(e)||z.set(e,new Map);const i=z.get(e);i.has(t)||0===i.size?i.set(t,n):console.error(`Bootstrap doesn't allow more than one instance per element. Bound instance: ${Array.from(i.keys())[0]}.`)},get:(e,t)=>z.has(e)&&z.get(e).get(t)||null,remove(e,t){if(!z.has(e))return;const n=z.get(e);n.delete(t),0===n.size&&z.delete(e)}};class P{constructor(e){(e=o(e))&&(this._element=e,H.set(this._element,this.constructor.DATA_KEY,this))}dispose(){H.remove(this._element,this.constructor.DATA_KEY),D.off(this._element,this.constructor.EVENT_KEY),Object.getOwnPropertyNames(this).forEach(e=>{this[e]=null})}_queueCallback(e,t,n=!0){m(e,t,n)}static getInstance(e){return H.get(o(e),this.DATA_KEY)}static getOrCreateInstance(e,t={}){return this.getInstance(e)||new this(e,"object"==typeof t?t:null)}static get VERSION(){return"5.1.0"}static get NAME(){throw new Error('You have to implement the static method "NAME", for each component!')}static get DATA_KEY(){return"bs."+this.NAME}static get EVENT_KEY(){return"."+this.DATA_KEY}}const I=(e,t="hide")=>{const i="click.dismiss"+e.EVENT_KEY,r=e.NAME;D.on(document,i,`[data-bs-dismiss="${r}"]`,function(i){if(["A","AREA"].includes(this.tagName)&&i.preventDefault(),l(this))return;const o=n(this)||this.closest("."+r);e.getOrCreateInstance(o)[t]()})};class R extends P{static get NAME(){return"alert"}close(){if(D.trigger(this._element,"close.bs.alert").defaultPrevented)return;this._element.classList.remove("show");const e=this._element.classList.contains("fade");this._queueCallback(()=>this._destroyElement(),this._element,e)}_destroyElement(){this._element.remove(),D.trigger(this._element,"closed.bs.alert"),this.dispose()}static jQueryInterface(e){return this.each(function(){const t=R.getOrCreateInstance(this);if("string"==typeof e){if(void 0===t[e]||e.startsWith("_")||"constructor"===e)throw new TypeError(`No method named "${e}"`);t[e](this)}})}}I(R,"close"),y(R);class W extends P{static get NAME(){return"button"}toggle(){this._element.setAttribute("aria-pressed",this._element.classList.toggle("active"))}static jQueryInterface(e){return this.each(function(){const t=W.getOrCreateInstance(this);"toggle"===e&&t[e]()})}}function q(e){return"true"===e||"false"!==e&&(e===Number(e).toString()?Number(e):""===e||"null"===e?null:e)}function V(e){return e.replace(/[A-Z]/g,e=>"-"+e.toLowerCase())}D.on(document,"click.bs.button.data-api",'[data-bs-toggle="button"]',e=>{e.preventDefault();const t=e.target.closest('[data-bs-toggle="button"]');W.getOrCreateInstance(t).toggle()}),y(W);const B={setDataAttribute(e,t,n){e.setAttribute("data-bs-"+V(t),n)},removeDataAttribute(e,t){e.removeAttribute("data-bs-"+V(t))},getDataAttributes(e){if(!e)return{};const t={};return Object.keys(e.dataset).filter(e=>e.startsWith("bs")).forEach(n=>{let i=n.replace(/^bs/,"");i=i.charAt(0).toLowerCase()+i.slice(1,i.length),t[i]=q(e.dataset[n])}),t},getDataAttribute:(e,t)=>q(e.getAttribute("data-bs-"+V(t))),offset(e){const t=e.getBoundingClientRect();return{top:t.top+window.pageYOffset,left:t.left+window.pageXOffset}},position:e=>({top:e.offsetTop,left:e.offsetLeft})},F={find:(e,t=document.documentElement)=>[].concat(...Element.prototype.querySelectorAll.call(t,e)),findOne:(e,t=document.documentElement)=>Element.prototype.querySelector.call(t,e),children:(e,t)=>[].concat(...e.children).filter(e=>e.matches(t)),parents(e,t){const n=[];let i=e.parentNode;for(;i&&i.nodeType===Node.ELEMENT_NODE&&3!==i.nodeType;)i.matches(t)&&n.push(i),i=i.parentNode;return n},prev(e,t){let n=e.previousElementSibling;for(;n;){if(n.matches(t))return[n];n=n.previousElementSibling}return[]},next(e,t){let n=e.nextElementSibling;for(;n;){if(n.matches(t))return[n];n=n.nextElementSibling}return[]},focusableChildren(e){const t=["a","button","input","textarea","select","details","[tabindex]",'[contenteditable="true"]'].map(e=>e+':not([tabindex^="-"])').join(", ");return this.find(t,e).filter(e=>!l(e)&&a(e))}},$={interval:5e3,keyboard:!0,slide:!1,pause:"hover",wrap:!0,touch:!0},U={interval:"(number|boolean)",keyboard:"boolean",slide:"(boolean|string)",pause:"(string|boolean)",wrap:"boolean",touch:"boolean"},X="next",Y="prev",Q="left",G="right",K={ArrowLeft:G,ArrowRight:Q};class J extends P{constructor(e,t){super(e),this._items=null,this._interval=null,this._activeElement=null,this._isPaused=!1,this._isSliding=!1,this.touchTimeout=null,this.touchStartX=0,this.touchDeltaX=0,this._config=this._getConfig(t),this._indicatorsElement=F.findOne(".carousel-indicators",this._element),this._touchSupported="ontouchstart"in document.documentElement||navigator.maxTouchPoints>0,this._pointerEvent=Boolean(window.PointerEvent),this._addEventListeners()}static get Default(){return $}static get NAME(){return"carousel"}next(){this._slide(X)}nextWhenVisible(){!document.hidden&&a(this._element)&&this.next()}prev(){this._slide(Y)}pause(e){e||(this._isPaused=!0),F.findOne(".carousel-item-next, .carousel-item-prev",this._element)&&(i(this._element),this.cycle(!0)),clearInterval(this._interval),this._interval=null}cycle(e){e||(this._isPaused=!1),this._interval&&(clearInterval(this._interval),this._interval=null),this._config&&this._config.interval&&!this._isPaused&&(this._updateInterval(),this._interval=setInterval((document.visibilityState?this.nextWhenVisible:this.next).bind(this),this._config.interval))}to(e){this._activeElement=F.findOne(".active.carousel-item",this._element);const t=this._getItemIndex(this._activeElement);if(e>this._items.length-1||e<0)return;if(this._isSliding)return void D.one(this._element,"slid.bs.carousel",()=>this.to(e));if(t===e)return this.pause(),void this.cycle();const n=e>t?X:Y;this._slide(n,this._items[e])}_getConfig(e){return e={...$,...B.getDataAttributes(this._element),..."object"==typeof e?e:{}},s("carousel",e,U),e}_handleSwipe(){const e=Math.abs(this.touchDeltaX);if(e<=40)return;const t=e/this.touchDeltaX;this.touchDeltaX=0,t&&this._slide(t>0?G:Q)}_addEventListeners(){this._config.keyboard&&D.on(this._element,"keydown.bs.carousel",e=>this._keydown(e)),"hover"===this._config.pause&&(D.on(this._element,"mouseenter.bs.carousel",e=>this.pause(e)),D.on(this._element,"mouseleave.bs.carousel",e=>this.cycle(e))),this._config.touch&&this._touchSupported&&this._addTouchEventListeners()}_addTouchEventListeners(){const e=e=>{!this._pointerEvent||"pen"!==e.pointerType&&"touch"!==e.pointerType?this._pointerEvent||(this.touchStartX=e.touches[0].clientX):this.touchStartX=e.clientX},t=e=>{this.touchDeltaX=e.touches&&e.touches.length>1?0:e.touches[0].clientX-this.touchStartX},n=e=>{!this._pointerEvent||"pen"!==e.pointerType&&"touch"!==e.pointerType||(this.touchDeltaX=e.clientX-this.touchStartX),this._handleSwipe(),"hover"===this._config.pause&&(this.pause(),this.touchTimeout&&clearTimeout(this.touchTimeout),this.touchTimeout=setTimeout(e=>this.cycle(e),500+this._config.interval))};F.find(".carousel-item img",this._element).forEach(e=>{D.on(e,"dragstart.bs.carousel",e=>e.preventDefault())}),this._pointerEvent?(D.on(this._element,"pointerdown.bs.carousel",t=>e(t)),D.on(this._element,"pointerup.bs.carousel",e=>n(e)),this._element.classList.add("pointer-event")):(D.on(this._element,"touchstart.bs.carousel",t=>e(t)),D.on(this._element,"touchmove.bs.carousel",e=>t(e)),D.on(this._element,"touchend.bs.carousel",e=>n(e)))}_keydown(e){if(/input|textarea/i.test(e.target.tagName))return;const t=K[e.key];t&&(e.preventDefault(),this._slide(t))}_getItemIndex(e){return this._items=e&&e.parentNode?F.find(".carousel-item",e.parentNode):[],this._items.indexOf(e)}_getItemByOrder(e,t){const n=e===X;return v(this._items,t,n,this._config.wrap)}_triggerSlideEvent(e,t){const n=this._getItemIndex(e),i=this._getItemIndex(F.findOne(".active.carousel-item",this._element));return D.trigger(this._element,"slide.bs.carousel",{relatedTarget:e,direction:t,from:i,to:n})}_setActiveIndicatorElement(e){if(this._indicatorsElement){const t=F.findOne(".active",this._indicatorsElement);t.classList.remove("active"),t.removeAttribute("aria-current");const n=F.find("[data-bs-target]",this._indicatorsElement);for(let t=0;t{D.trigger(this._element,"slid.bs.carousel",{relatedTarget:o,direction:p,from:r,to:s})};if(this._element.classList.contains("slide")){o.classList.add(u),h(o),i.classList.add(c),o.classList.add(c);const e=()=>{o.classList.remove(c,u),o.classList.add("active"),i.classList.remove("active",u,c),this._isSliding=!1,setTimeout(f,0)};this._queueCallback(e,i,!0)}else i.classList.remove("active"),o.classList.add("active"),this._isSliding=!1,f();a&&this.cycle()}_directionToOrder(e){return[G,Q].includes(e)?d()?e===Q?Y:X:e===Q?X:Y:e}_orderToDirection(e){return[X,Y].includes(e)?d()?e===Y?Q:G:e===Y?G:Q:e}static carouselInterface(e,t){const n=J.getOrCreateInstance(e,t);let{_config:i}=n;"object"==typeof t&&(i={...i,...t});const r="string"==typeof t?t:i.slide;if("number"==typeof t)n.to(t);else if("string"==typeof r){if(void 0===n[r])throw new TypeError(`No method named "${r}"`);n[r]()}else i.interval&&i.ride&&(n.pause(),n.cycle())}static jQueryInterface(e){return this.each(function(){J.carouselInterface(this,e)})}static dataApiClickHandler(e){const t=n(this);if(!t||!t.classList.contains("carousel"))return;const i={...B.getDataAttributes(t),...B.getDataAttributes(this)},r=this.getAttribute("data-bs-slide-to");r&&(i.interval=!1),J.carouselInterface(t,i),r&&J.getInstance(t).to(r),e.preventDefault()}}D.on(document,"click.bs.carousel.data-api","[data-bs-slide], [data-bs-slide-to]",J.dataApiClickHandler),D.on(window,"load.bs.carousel.data-api",()=>{const e=F.find('[data-bs-ride="carousel"]');for(let t=0,n=e.length;te===this._element);null!==r&&o.length&&(this._selector=r,this._triggerArray.push(n))}this._initializeChildren(),this._config.parent||this._addAriaAndCollapsedClass(this._triggerArray,this._isShown()),this._config.toggle&&this.toggle()}static get Default(){return Z}static get NAME(){return"collapse"}toggle(){this._isShown()?this.hide():this.show()}show(){if(this._isTransitioning||this._isShown())return;let e,t=[];if(this._config.parent){const e=F.find(".collapse .collapse",this._config.parent);t=F.find(".show, .collapsing",this._config.parent).filter(t=>!e.includes(t))}const n=F.findOne(this._selector);if(t.length){const i=t.find(e=>n!==e);if((e=i?te.getInstance(i):null)&&e._isTransitioning)return}if(D.trigger(this._element,"show.bs.collapse").defaultPrevented)return;t.forEach(t=>{n!==t&&te.getOrCreateInstance(t,{toggle:!1}).hide(),e||H.set(t,"bs.collapse",null)});const i=this._getDimension();this._element.classList.remove("collapse"),this._element.classList.add("collapsing"),this._element.style[i]=0,this._addAriaAndCollapsedClass(this._triggerArray,!0),this._isTransitioning=!0;const r="scroll"+(i[0].toUpperCase()+i.slice(1));this._queueCallback(()=>{this._isTransitioning=!1,this._element.classList.remove("collapsing"),this._element.classList.add("collapse","show"),this._element.style[i]="",D.trigger(this._element,"shown.bs.collapse")},this._element,!0),this._element.style[i]=this._element[r]+"px"}hide(){if(this._isTransitioning||!this._isShown())return;if(D.trigger(this._element,"hide.bs.collapse").defaultPrevented)return;const e=this._getDimension();this._element.style[e]=this._element.getBoundingClientRect()[e]+"px",h(this._element),this._element.classList.add("collapsing"),this._element.classList.remove("collapse","show");const t=this._triggerArray.length;for(let e=0;e{this._isTransitioning=!1,this._element.classList.remove("collapsing"),this._element.classList.add("collapse"),D.trigger(this._element,"hidden.bs.collapse")},this._element,!0)}_isShown(e=this._element){return e.classList.contains("show")}_getConfig(e){return(e={...Z,...B.getDataAttributes(this._element),...e}).toggle=Boolean(e.toggle),e.parent=o(e.parent),s("collapse",e,ee),e}_getDimension(){return this._element.classList.contains("collapse-horizontal")?"width":"height"}_initializeChildren(){if(!this._config.parent)return;const e=F.find(".collapse .collapse",this._config.parent);F.find('[data-bs-toggle="collapse"]',this._config.parent).filter(t=>!e.includes(t)).forEach(e=>{const t=n(e);t&&this._addAriaAndCollapsedClass([e],this._isShown(t))})}_addAriaAndCollapsedClass(e,t){e.length&&e.forEach(e=>{t?e.classList.remove("collapsed"):e.classList.add("collapsed"),e.setAttribute("aria-expanded",t)})}static jQueryInterface(e){return this.each(function(){const t={};"string"==typeof e&&/show|hide/.test(e)&&(t.toggle=!1);const n=te.getOrCreateInstance(this,t);if("string"==typeof e){if(void 0===n[e])throw new TypeError(`No method named "${e}"`);n[e]()}})}}D.on(document,"click.bs.collapse.data-api",'[data-bs-toggle="collapse"]',function(e){("A"===e.target.tagName||e.delegateTarget&&"A"===e.delegateTarget.tagName)&&e.preventDefault();const n=t(this);F.find(n).forEach(e=>{te.getOrCreateInstance(e,{toggle:!1}).toggle()})}),y(te);var ne="top",ie="bottom",re="right",oe="left",se=[ne,ie,re,oe],ae=se.reduce(function(e,t){return e.concat([t+"-start",t+"-end"])},[]),le=[].concat(se,["auto"]).reduce(function(e,t){return e.concat([t,t+"-start",t+"-end"])},[]),ce=["beforeRead","read","afterRead","beforeMain","main","afterMain","beforeWrite","write","afterWrite"];function ue(e){return e?(e.nodeName||"").toLowerCase():null}function he(e){if(null==e)return window;if("[object Window]"!==e.toString()){var t=e.ownerDocument;return t&&t.defaultView||window}return e}function pe(e){return e instanceof he(e).Element||e instanceof Element}function fe(e){return e instanceof he(e).HTMLElement||e instanceof HTMLElement}function de(e){return"undefined"!=typeof ShadowRoot&&(e instanceof he(e).ShadowRoot||e instanceof ShadowRoot)}var ye={name:"applyStyles",enabled:!0,phase:"write",fn:function(e){var t=e.state;Object.keys(t.elements).forEach(function(e){var n=t.styles[e]||{},i=t.attributes[e]||{},r=t.elements[e];fe(r)&&ue(r)&&(Object.assign(r.style,n),Object.keys(i).forEach(function(e){var t=i[e];!1===t?r.removeAttribute(e):r.setAttribute(e,!0===t?"":t)}))})},effect:function(e){var t=e.state,n={popper:{position:t.options.strategy,left:"0",top:"0",margin:"0"},arrow:{position:"absolute"},reference:{}};return Object.assign(t.elements.popper.style,n.popper),t.styles=n,t.elements.arrow&&Object.assign(t.elements.arrow.style,n.arrow),function(){Object.keys(t.elements).forEach(function(e){var i=t.elements[e],r=t.attributes[e]||{},o=Object.keys(t.styles.hasOwnProperty(e)?t.styles[e]:n[e]).reduce(function(e,t){return e[t]="",e},{});fe(i)&&ue(i)&&(Object.assign(i.style,o),Object.keys(r).forEach(function(e){i.removeAttribute(e)}))})}},requires:["computeStyles"]};function ge(e){return e.split("-")[0]}var me=Math.round;function ve(e,t){void 0===t&&(t=!1);var n=e.getBoundingClientRect(),i=1,r=1;return fe(e)&&t&&(i=n.width/e.offsetWidth||1,r=n.height/e.offsetHeight||1),{width:me(n.width/i),height:me(n.height/r),top:me(n.top/r),right:me(n.right/i),bottom:me(n.bottom/r),left:me(n.left/i),x:me(n.left/i),y:me(n.top/r)}}function xe(e){var t=ve(e),n=e.offsetWidth,i=e.offsetHeight;return Math.abs(t.width-n)<=1&&(n=t.width),Math.abs(t.height-i)<=1&&(i=t.height),{x:e.offsetLeft,y:e.offsetTop,width:n,height:i}}function be(e,t){var n=t.getRootNode&&t.getRootNode();if(e.contains(t))return!0;if(n&&de(n)){var i=t;do{if(i&&e.isSameNode(i))return!0;i=i.parentNode||i.host}while(i)}return!1}function we(e){return he(e).getComputedStyle(e)}function _e(e){return["table","td","th"].indexOf(ue(e))>=0}function Ee(e){return((pe(e)?e.ownerDocument:e.document)||window.document).documentElement}function Ae(e){return"html"===ue(e)?e:e.assignedSlot||e.parentNode||(de(e)?e.host:null)||Ee(e)}function Te(e){return fe(e)&&"fixed"!==we(e).position?e.offsetParent:null}function ke(e){for(var t=he(e),n=Te(e);n&&_e(n)&&"static"===we(n).position;)n=Te(n);return n&&("html"===ue(n)||"body"===ue(n)&&"static"===we(n).position)?t:n||function(e){var t=-1!==navigator.userAgent.toLowerCase().indexOf("firefox");if(-1!==navigator.userAgent.indexOf("Trident")&&fe(e)&&"fixed"===we(e).position)return null;for(var n=Ae(e);fe(n)&&["html","body"].indexOf(ue(n))<0;){var i=we(n);if("none"!==i.transform||"none"!==i.perspective||"paint"===i.contain||-1!==["transform","perspective"].indexOf(i.willChange)||t&&"filter"===i.willChange||t&&i.filter&&"none"!==i.filter)return n;n=n.parentNode}return null}(e)||t}function Oe(e){return["top","bottom"].indexOf(e)>=0?"x":"y"}var Se=Math.max,Ce=Math.min,Me=Math.round;function Le(e,t,n){return Se(e,Ce(t,n))}function je(e){return Object.assign({},{top:0,right:0,bottom:0,left:0},e)}function Ne(e,t){return t.reduce(function(t,n){return t[n]=e,t},{})}var De={name:"arrow",enabled:!0,phase:"main",fn:function(e){var t,n=e.state,i=e.name,r=e.options,o=n.elements.arrow,s=n.modifiersData.popperOffsets,a=ge(n.placement),l=Oe(a),c=[oe,re].indexOf(a)>=0?"height":"width";if(o&&s){var u=function(e,t){return je("number"!=typeof(e="function"==typeof e?e(Object.assign({},t.rects,{placement:t.placement})):e)?e:Ne(e,se))}(r.padding,n),h=xe(o),p="y"===l?ne:oe,f="y"===l?ie:re,d=n.rects.reference[c]+n.rects.reference[l]-s[l]-n.rects.popper[c],y=s[l]-n.rects.reference[l],g=ke(o),m=g?"y"===l?g.clientHeight||0:g.clientWidth||0:0,v=d/2-y/2,x=u[p],b=m-h[c]-u[f],w=m/2-h[c]/2+v,_=Le(x,w,b),E=l;n.modifiersData[i]=((t={})[E]=_,t.centerOffset=_-w,t)}},effect:function(e){var t=e.state,n=e.options.element,i=void 0===n?"[data-popper-arrow]":n;null!=i&&("string"!=typeof i||(i=t.elements.popper.querySelector(i)))&&be(t.elements.popper,i)&&(t.elements.arrow=i)},requires:["popperOffsets"],requiresIfExists:["preventOverflow"]},ze={top:"auto",right:"auto",bottom:"auto",left:"auto"};function He(e){var t,n=e.popper,i=e.popperRect,r=e.placement,o=e.offsets,s=e.position,a=e.gpuAcceleration,l=e.adaptive,c=e.roundOffsets,u=!0===c?function(e){var t=e.x,n=e.y,i=window.devicePixelRatio||1;return{x:Me(Me(t*i)/i)||0,y:Me(Me(n*i)/i)||0}}(o):"function"==typeof c?c(o):o,h=u.x,p=void 0===h?0:h,f=u.y,d=void 0===f?0:f,y=o.hasOwnProperty("x"),g=o.hasOwnProperty("y"),m=oe,v=ne,x=window;if(l){var b=ke(n),w="clientHeight",_="clientWidth";b===he(n)&&"static"!==we(b=Ee(n)).position&&(w="scrollHeight",_="scrollWidth"),b=b,r===ne&&(v=ie,d-=b[w]-i.height,d*=a?1:-1),r===oe&&(m=re,p-=b[_]-i.width,p*=a?1:-1)}var E,A=Object.assign({position:s},l&&ze);return a?Object.assign({},A,((E={})[v]=g?"0":"",E[m]=y?"0":"",E.transform=(x.devicePixelRatio||1)<2?"translate("+p+"px, "+d+"px)":"translate3d("+p+"px, "+d+"px, 0)",E)):Object.assign({},A,((t={})[v]=g?d+"px":"",t[m]=y?p+"px":"",t.transform="",t))}var Pe={name:"computeStyles",enabled:!0,phase:"beforeWrite",fn:function(e){var t=e.state,n=e.options,i=n.gpuAcceleration,r=void 0===i||i,o=n.adaptive,s=void 0===o||o,a=n.roundOffsets,l=void 0===a||a,c={placement:ge(t.placement),popper:t.elements.popper,popperRect:t.rects.popper,gpuAcceleration:r};null!=t.modifiersData.popperOffsets&&(t.styles.popper=Object.assign({},t.styles.popper,He(Object.assign({},c,{offsets:t.modifiersData.popperOffsets,position:t.options.strategy,adaptive:s,roundOffsets:l})))),null!=t.modifiersData.arrow&&(t.styles.arrow=Object.assign({},t.styles.arrow,He(Object.assign({},c,{offsets:t.modifiersData.arrow,position:"absolute",adaptive:!1,roundOffsets:l})))),t.attributes.popper=Object.assign({},t.attributes.popper,{"data-popper-placement":t.placement})},data:{}},Ie={passive:!0},Re={name:"eventListeners",enabled:!0,phase:"write",fn:function(){},effect:function(e){var t=e.state,n=e.instance,i=e.options,r=i.scroll,o=void 0===r||r,s=i.resize,a=void 0===s||s,l=he(t.elements.popper),c=[].concat(t.scrollParents.reference,t.scrollParents.popper);return o&&c.forEach(function(e){e.addEventListener("scroll",n.update,Ie)}),a&&l.addEventListener("resize",n.update,Ie),function(){o&&c.forEach(function(e){e.removeEventListener("scroll",n.update,Ie)}),a&&l.removeEventListener("resize",n.update,Ie)}},data:{}},We={left:"right",right:"left",bottom:"top",top:"bottom"};function qe(e){return e.replace(/left|right|bottom|top/g,function(e){return We[e]})}var Ve={start:"end",end:"start"};function Be(e){return e.replace(/start|end/g,function(e){return Ve[e]})}function Fe(e){var t=he(e);return{scrollLeft:t.pageXOffset,scrollTop:t.pageYOffset}}function $e(e){return ve(Ee(e)).left+Fe(e).scrollLeft}function Ue(e){var t=we(e),n=t.overflow,i=t.overflowX,r=t.overflowY;return/auto|scroll|overlay|hidden/.test(n+r+i)}function Xe(e,t){var n;void 0===t&&(t=[]);var i=function e(t){return["html","body","#document"].indexOf(ue(t))>=0?t.ownerDocument.body:fe(t)&&Ue(t)?t:e(Ae(t))}(e),r=i===(null==(n=e.ownerDocument)?void 0:n.body),o=he(i),s=r?[o].concat(o.visualViewport||[],Ue(i)?i:[]):i,a=t.concat(s);return r?a:a.concat(Xe(Ae(s)))}function Ye(e){return Object.assign({},e,{left:e.x,top:e.y,right:e.x+e.width,bottom:e.y+e.height})}function Qe(e,t){return"viewport"===t?Ye(function(e){var t=he(e),n=Ee(e),i=t.visualViewport,r=n.clientWidth,o=n.clientHeight,s=0,a=0;return i&&(r=i.width,o=i.height,/^((?!chrome|android).)*safari/i.test(navigator.userAgent)||(s=i.offsetLeft,a=i.offsetTop)),{width:r,height:o,x:s+$e(e),y:a}}(e)):fe(t)?function(e){var t=ve(e);return t.top=t.top+e.clientTop,t.left=t.left+e.clientLeft,t.bottom=t.top+e.clientHeight,t.right=t.left+e.clientWidth,t.width=e.clientWidth,t.height=e.clientHeight,t.x=t.left,t.y=t.top,t}(t):Ye(function(e){var t,n=Ee(e),i=Fe(e),r=null==(t=e.ownerDocument)?void 0:t.body,o=Se(n.scrollWidth,n.clientWidth,r?r.scrollWidth:0,r?r.clientWidth:0),s=Se(n.scrollHeight,n.clientHeight,r?r.scrollHeight:0,r?r.clientHeight:0),a=-i.scrollLeft+$e(e),l=-i.scrollTop;return"rtl"===we(r||n).direction&&(a+=Se(n.clientWidth,r?r.clientWidth:0)-o),{width:o,height:s,x:a,y:l}}(Ee(e)))}function Ge(e){return e.split("-")[1]}function Ke(e){var t,n=e.reference,i=e.element,r=e.placement,o=r?ge(r):null,s=r?Ge(r):null,a=n.x+n.width/2-i.width/2,l=n.y+n.height/2-i.height/2;switch(o){case ne:t={x:a,y:n.y-i.height};break;case ie:t={x:a,y:n.y+n.height};break;case re:t={x:n.x+n.width,y:l};break;case oe:t={x:n.x-i.width,y:l};break;default:t={x:n.x,y:n.y}}var c=o?Oe(o):null;if(null!=c){var u="y"===c?"height":"width";switch(s){case"start":t[c]=t[c]-(n[u]/2-i[u]/2);break;case"end":t[c]=t[c]+(n[u]/2-i[u]/2)}}return t}function Je(e,t){void 0===t&&(t={});var n=t,i=n.placement,r=void 0===i?e.placement:i,o=n.boundary,s=void 0===o?"clippingParents":o,a=n.rootBoundary,l=void 0===a?"viewport":a,c=n.elementContext,u=void 0===c?"popper":c,h=n.altBoundary,p=void 0!==h&&h,f=n.padding,d=void 0===f?0:f,y=je("number"!=typeof d?d:Ne(d,se)),g="popper"===u?"reference":"popper",m=e.elements.reference,v=e.rects.popper,x=e.elements[p?g:u],b=function(e,t,n){var i="clippingParents"===t?function(e){var t=Xe(Ae(e)),n=["absolute","fixed"].indexOf(we(e).position)>=0&&fe(e)?ke(e):e;return pe(n)?t.filter(function(e){return pe(e)&&be(e,n)&&"body"!==ue(e)}):[]}(e):[].concat(t),r=[].concat(i,[n]),o=r[0],s=r.reduce(function(t,n){var i=Qe(e,n);return t.top=Se(i.top,t.top),t.right=Ce(i.right,t.right),t.bottom=Ce(i.bottom,t.bottom),t.left=Se(i.left,t.left),t},Qe(e,o));return s.width=s.right-s.left,s.height=s.bottom-s.top,s.x=s.left,s.y=s.top,s}(pe(x)?x:x.contextElement||Ee(e.elements.popper),s,l),w=ve(m),_=Ke({reference:w,element:v,strategy:"absolute",placement:r}),E=Ye(Object.assign({},v,_)),A="popper"===u?E:w,T={top:b.top-A.top+y.top,bottom:A.bottom-b.bottom+y.bottom,left:b.left-A.left+y.left,right:A.right-b.right+y.right},k=e.modifiersData.offset;if("popper"===u&&k){var O=k[r];Object.keys(T).forEach(function(e){var t=[re,ie].indexOf(e)>=0?1:-1,n=[ne,ie].indexOf(e)>=0?"y":"x";T[e]+=O[n]*t})}return T}var Ze={name:"flip",enabled:!0,phase:"main",fn:function(e){var t=e.state,n=e.options,i=e.name;if(!t.modifiersData[i]._skip){for(var r=n.mainAxis,o=void 0===r||r,s=n.altAxis,a=void 0===s||s,l=n.fallbackPlacements,c=n.padding,u=n.boundary,h=n.rootBoundary,p=n.altBoundary,f=n.flipVariations,d=void 0===f||f,y=n.allowedAutoPlacements,g=t.options.placement,m=ge(g),v=l||(m!==g&&d?function(e){if("auto"===ge(e))return[];var t=qe(e);return[Be(e),t,Be(t)]}(g):[qe(g)]),x=[g].concat(v).reduce(function(e,n){return e.concat("auto"===ge(n)?function(e,t){void 0===t&&(t={});var n=t,i=n.placement,r=n.boundary,o=n.rootBoundary,s=n.padding,a=n.flipVariations,l=n.allowedAutoPlacements,c=void 0===l?le:l,u=Ge(i),h=u?a?ae:ae.filter(function(e){return Ge(e)===u}):se,p=h.filter(function(e){return c.indexOf(e)>=0});0===p.length&&(p=h);var f=p.reduce(function(t,n){return t[n]=Je(e,{placement:n,boundary:r,rootBoundary:o,padding:s})[ge(n)],t},{});return Object.keys(f).sort(function(e,t){return f[e]-f[t]})}(t,{placement:n,boundary:u,rootBoundary:h,padding:c,flipVariations:d,allowedAutoPlacements:y}):n)},[]),b=t.rects.reference,w=t.rects.popper,_=new Map,E=!0,A=x[0],T=0;T=0,M=C?"width":"height",L=Je(t,{placement:k,boundary:u,rootBoundary:h,altBoundary:p,padding:c}),j=C?S?re:oe:S?ie:ne;b[M]>w[M]&&(j=qe(j));var N=qe(j),D=[];if(o&&D.push(L[O]<=0),a&&D.push(L[j]<=0,L[N]<=0),D.every(function(e){return e})){A=k,E=!1;break}_.set(k,D)}if(E)for(var z=function(e){var t=x.find(function(t){var n=_.get(t);if(n)return n.slice(0,e).every(function(e){return e})});if(t)return A=t,"break"},H=d?3:1;H>0&&"break"!==z(H);H--);t.placement!==A&&(t.modifiersData[i]._skip=!0,t.placement=A,t.reset=!0)}},requiresIfExists:["offset"],data:{_skip:!1}};function et(e,t,n){return void 0===n&&(n={x:0,y:0}),{top:e.top-t.height-n.y,right:e.right-t.width+n.x,bottom:e.bottom-t.height+n.y,left:e.left-t.width-n.x}}function tt(e){return[ne,re,ie,oe].some(function(t){return e[t]>=0})}var nt={name:"hide",enabled:!0,phase:"main",requiresIfExists:["preventOverflow"],fn:function(e){var t=e.state,n=e.name,i=t.rects.reference,r=t.rects.popper,o=t.modifiersData.preventOverflow,s=Je(t,{elementContext:"reference"}),a=Je(t,{altBoundary:!0}),l=et(s,i),c=et(a,r,o),u=tt(l),h=tt(c);t.modifiersData[n]={referenceClippingOffsets:l,popperEscapeOffsets:c,isReferenceHidden:u,hasPopperEscaped:h},t.attributes.popper=Object.assign({},t.attributes.popper,{"data-popper-reference-hidden":u,"data-popper-escaped":h})}},it={name:"offset",enabled:!0,phase:"main",requires:["popperOffsets"],fn:function(e){var t=e.state,n=e.options,i=e.name,r=n.offset,o=void 0===r?[0,0]:r,s=le.reduce(function(e,n){return e[n]=function(e,t,n){var i=ge(e),r=[oe,ne].indexOf(i)>=0?-1:1,o="function"==typeof n?n(Object.assign({},t,{placement:e})):n,s=o[0],a=o[1];return s=s||0,a=(a||0)*r,[oe,re].indexOf(i)>=0?{x:a,y:s}:{x:s,y:a}}(n,t.rects,o),e},{}),a=s[t.placement],l=a.x,c=a.y;null!=t.modifiersData.popperOffsets&&(t.modifiersData.popperOffsets.x+=l,t.modifiersData.popperOffsets.y+=c),t.modifiersData[i]=s}},rt={name:"popperOffsets",enabled:!0,phase:"read",fn:function(e){var t=e.state,n=e.name;t.modifiersData[n]=Ke({reference:t.rects.reference,element:t.rects.popper,strategy:"absolute",placement:t.placement})},data:{}},ot={name:"preventOverflow",enabled:!0,phase:"main",fn:function(e){var t=e.state,n=e.options,i=e.name,r=n.mainAxis,o=void 0===r||r,s=n.altAxis,a=void 0!==s&&s,l=n.boundary,c=n.rootBoundary,u=n.altBoundary,h=n.padding,p=n.tether,f=void 0===p||p,d=n.tetherOffset,y=void 0===d?0:d,g=Je(t,{boundary:l,rootBoundary:c,padding:h,altBoundary:u}),m=ge(t.placement),v=Ge(t.placement),x=!v,b=Oe(m),w="x"===b?"y":"x",_=t.modifiersData.popperOffsets,E=t.rects.reference,A=t.rects.popper,T="function"==typeof y?y(Object.assign({},t.rects,{placement:t.placement})):y,k={x:0,y:0};if(_){if(o||a){var O="y"===b?ne:oe,S="y"===b?ie:re,C="y"===b?"height":"width",M=_[b],L=_[b]+g[O],j=_[b]-g[S],N=f?-A[C]/2:0,D="start"===v?E[C]:A[C],z="start"===v?-A[C]:-E[C],H=t.elements.arrow,P=f&&H?xe(H):{width:0,height:0},I=t.modifiersData["arrow#persistent"]?t.modifiersData["arrow#persistent"].padding:{top:0,right:0,bottom:0,left:0},R=I[O],W=I[S],q=Le(0,E[C],P[C]),V=x?E[C]/2-N-q-R-T:D-q-R-T,B=x?-E[C]/2+N+q+W+T:z+q+W+T,F=t.elements.arrow&&ke(t.elements.arrow),$=F?"y"===b?F.clientTop||0:F.clientLeft||0:0,U=t.modifiersData.offset?t.modifiersData.offset[t.placement][b]:0,X=_[b]+V-U-$,Y=_[b]+B-U;if(o){var Q=Le(f?Ce(L,X):L,M,f?Se(j,Y):j);_[b]=Q,k[b]=Q-M}if(a){var G="x"===b?ne:oe,K="x"===b?ie:re,J=_[w],Z=J+g[G],ee=J-g[K],te=Le(f?Ce(Z,X):Z,J,f?Se(ee,Y):ee);_[w]=te,k[w]=te-J}}t.modifiersData[i]=k}},requiresIfExists:["offset"]};function st(e,t,n){void 0===n&&(n=!1);var i,r,o=fe(t),s=fe(t)&&function(e){var t=e.getBoundingClientRect(),n=t.width/e.offsetWidth||1,i=t.height/e.offsetHeight||1;return 1!==n||1!==i}(t),a=Ee(t),l=ve(e,s),c={scrollLeft:0,scrollTop:0},u={x:0,y:0};return(o||!o&&!n)&&(("body"!==ue(t)||Ue(a))&&(c=(i=t)!==he(i)&&fe(i)?{scrollLeft:(r=i).scrollLeft,scrollTop:r.scrollTop}:Fe(i)),fe(t)?((u=ve(t,!0)).x+=t.clientLeft,u.y+=t.clientTop):a&&(u.x=$e(a))),{x:l.left+c.scrollLeft-u.x,y:l.top+c.scrollTop-u.y,width:l.width,height:l.height}}var at={placement:"bottom",modifiers:[],strategy:"absolute"};function lt(){for(var e=arguments.length,t=new Array(e),n=0;nD.on(e,"mouseover",u)),this._element.focus(),this._element.setAttribute("aria-expanded",!0),this._menu.classList.add("show"),this._element.classList.add("show"),D.trigger(this._element,"shown.bs.dropdown",e)}hide(){if(l(this._element)||!this._isShown(this._menu))return;const e={relatedTarget:this._element};this._completeHide(e)}dispose(){this._popper&&this._popper.destroy(),super.dispose()}update(){this._inNavbar=this._detectNavbar(),this._popper&&this._popper.update()}_completeHide(e){D.trigger(this._element,"hide.bs.dropdown",e).defaultPrevented||("ontouchstart"in document.documentElement&&[].concat(...document.body.children).forEach(e=>D.off(e,"mouseover",u)),this._popper&&this._popper.destroy(),this._menu.classList.remove("show"),this._element.classList.remove("show"),this._element.setAttribute("aria-expanded","false"),B.removeDataAttribute(this._menu,"popper"),D.trigger(this._element,"hidden.bs.dropdown",e))}_getConfig(e){if(e={...this.constructor.Default,...B.getDataAttributes(this._element),...e},s("dropdown",e,this.constructor.DefaultType),"object"==typeof e.reference&&!r(e.reference)&&"function"!=typeof e.reference.getBoundingClientRect)throw new TypeError("dropdown".toUpperCase()+': Option "reference" provided type "object" without a required "getBoundingClientRect" method.');return e}_createPopper(e){if(void 0===ft)throw new TypeError("Bootstrap's dropdowns require Popper (https://popper.js.org)");let t=this._element;"parent"===this._config.reference?t=e:r(this._config.reference)?t=o(this._config.reference):"object"==typeof this._config.reference&&(t=this._config.reference);const n=this._getPopperConfig(),i=n.modifiers.find(e=>"applyStyles"===e.name&&!1===e.enabled);this._popper=pt(t,this._menu,n),i&&B.setDataAttribute(this._menu,"popper","static")}_isShown(e=this._element){return e.classList.contains("show")}_getMenuElement(){return F.next(this._element,".dropdown-menu")[0]}_getPlacement(){const e=this._element.parentNode;if(e.classList.contains("dropend"))return xt;if(e.classList.contains("dropstart"))return bt;const t="end"===getComputedStyle(this._menu).getPropertyValue("--bs-position").trim();return e.classList.contains("dropup")?t?gt:yt:t?vt:mt}_detectNavbar(){return null!==this._element.closest(".navbar")}_getOffset(){const{offset:e}=this._config;return"string"==typeof e?e.split(",").map(e=>Number.parseInt(e,10)):"function"==typeof e?t=>e(t,this._element):e}_getPopperConfig(){const e={placement:this._getPlacement(),modifiers:[{name:"preventOverflow",options:{boundary:this._config.boundary}},{name:"offset",options:{offset:this._getOffset()}}]};return"static"===this._config.display&&(e.modifiers=[{name:"applyStyles",enabled:!1}]),{...e,..."function"==typeof this._config.popperConfig?this._config.popperConfig(e):this._config.popperConfig}}_selectMenuItem({key:e,target:t}){const n=F.find(".dropdown-menu .dropdown-item:not(.disabled):not(:disabled)",this._menu).filter(a);n.length&&v(n,t,"ArrowDown"===e,!n.includes(t)).focus()}static jQueryInterface(e){return this.each(function(){const t=Et.getOrCreateInstance(this,e);if("string"==typeof e){if(void 0===t[e])throw new TypeError(`No method named "${e}"`);t[e]()}})}static clearMenus(e){if(e&&(2===e.button||"keyup"===e.type&&"Tab"!==e.key))return;const t=F.find('[data-bs-toggle="dropdown"]');for(let n=0,i=t.length;nt+e),this._setElementAttributes(".fixed-top, .fixed-bottom, .is-fixed, .sticky-top","paddingRight",t=>t+e),this._setElementAttributes(".sticky-top","marginRight",t=>t-e)}_disableOverFlow(){this._saveInitialAttribute(this._element,"overflow"),this._element.style.overflow="hidden"}_setElementAttributes(e,t,n){const i=this.getWidth();this._applyManipulationCallback(e,e=>{if(e!==this._element&&window.innerWidth>e.clientWidth+i)return;this._saveInitialAttribute(e,t);const r=window.getComputedStyle(e)[t];e.style[t]=n(Number.parseFloat(r))+"px"})}reset(){this._resetElementAttributes(this._element,"overflow"),this._resetElementAttributes(this._element,"paddingRight"),this._resetElementAttributes(".fixed-top, .fixed-bottom, .is-fixed, .sticky-top","paddingRight"),this._resetElementAttributes(".sticky-top","marginRight")}_saveInitialAttribute(e,t){const n=e.style[t];n&&B.setDataAttribute(e,t,n)}_resetElementAttributes(e,t){this._applyManipulationCallback(e,e=>{const n=B.getDataAttribute(e,t);void 0===n?e.style.removeProperty(t):(B.removeDataAttribute(e,t),e.style[t]=n)})}_applyManipulationCallback(e,t){r(e)?t(e):F.find(e,this._element).forEach(t)}isOverflowing(){return this.getWidth()>0}}const Tt={className:"modal-backdrop",isVisible:!0,isAnimated:!1,rootElement:"body",clickCallback:null},kt={className:"string",isVisible:"boolean",isAnimated:"boolean",rootElement:"(element|string)",clickCallback:"(function|null)"};class Ot{constructor(e){this._config=this._getConfig(e),this._isAppended=!1,this._element=null}show(e){this._config.isVisible?(this._append(),this._config.isAnimated&&h(this._getElement()),this._getElement().classList.add("show"),this._emulateAnimation(()=>{g(e)})):g(e)}hide(e){this._config.isVisible?(this._getElement().classList.remove("show"),this._emulateAnimation(()=>{this.dispose(),g(e)})):g(e)}_getElement(){if(!this._element){const e=document.createElement("div");e.className=this._config.className,this._config.isAnimated&&e.classList.add("fade"),this._element=e}return this._element}_getConfig(e){return(e={...Tt,..."object"==typeof e?e:{}}).rootElement=o(e.rootElement),s("backdrop",e,kt),e}_append(){this._isAppended||(this._config.rootElement.append(this._getElement()),D.on(this._getElement(),"mousedown.bs.backdrop",()=>{g(this._config.clickCallback)}),this._isAppended=!0)}dispose(){this._isAppended&&(D.off(this._element,"mousedown.bs.backdrop"),this._element.remove(),this._isAppended=!1)}_emulateAnimation(e){m(e,this._getElement(),this._config.isAnimated)}}const St={trapElement:null,autofocus:!0},Ct={trapElement:"element",autofocus:"boolean"};class Mt{constructor(e){this._config=this._getConfig(e),this._isActive=!1,this._lastTabNavDirection=null}activate(){const{trapElement:e,autofocus:t}=this._config;this._isActive||(t&&e.focus(),D.off(document,".bs.focustrap"),D.on(document,"focusin.bs.focustrap",e=>this._handleFocusin(e)),D.on(document,"keydown.tab.bs.focustrap",e=>this._handleKeydown(e)),this._isActive=!0)}deactivate(){this._isActive&&(this._isActive=!1,D.off(document,".bs.focustrap"))}_handleFocusin(e){const{target:t}=e,{trapElement:n}=this._config;if(t===document||t===n||n.contains(t))return;const i=F.focusableChildren(n);0===i.length?n.focus():"backward"===this._lastTabNavDirection?i[i.length-1].focus():i[0].focus()}_handleKeydown(e){"Tab"===e.key&&(this._lastTabNavDirection=e.shiftKey?"backward":"forward")}_getConfig(e){return e={...St,..."object"==typeof e?e:{}},s("focustrap",e,Ct),e}}const Lt={backdrop:!0,keyboard:!0,focus:!0},jt={backdrop:"(boolean|string)",keyboard:"boolean",focus:"boolean"};class Nt extends P{constructor(e,t){super(e),this._config=this._getConfig(t),this._dialog=F.findOne(".modal-dialog",this._element),this._backdrop=this._initializeBackDrop(),this._focustrap=this._initializeFocusTrap(),this._isShown=!1,this._ignoreBackdropClick=!1,this._isTransitioning=!1,this._scrollBar=new At}static get Default(){return Lt}static get NAME(){return"modal"}toggle(e){return this._isShown?this.hide():this.show(e)}show(e){this._isShown||this._isTransitioning||D.trigger(this._element,"show.bs.modal",{relatedTarget:e}).defaultPrevented||(this._isShown=!0,this._isAnimated()&&(this._isTransitioning=!0),this._scrollBar.hide(),document.body.classList.add("modal-open"),this._adjustDialog(),this._setEscapeEvent(),this._setResizeEvent(),D.on(this._dialog,"mousedown.dismiss.bs.modal",()=>{D.one(this._element,"mouseup.dismiss.bs.modal",e=>{e.target===this._element&&(this._ignoreBackdropClick=!0)})}),this._showBackdrop(()=>this._showElement(e)))}hide(){if(!this._isShown||this._isTransitioning)return;if(D.trigger(this._element,"hide.bs.modal").defaultPrevented)return;this._isShown=!1;const e=this._isAnimated();e&&(this._isTransitioning=!0),this._setEscapeEvent(),this._setResizeEvent(),this._focustrap.deactivate(),this._element.classList.remove("show"),D.off(this._element,"click.dismiss.bs.modal"),D.off(this._dialog,"mousedown.dismiss.bs.modal"),this._queueCallback(()=>this._hideModal(),this._element,e)}dispose(){[window,this._dialog].forEach(e=>D.off(e,".bs.modal")),this._backdrop.dispose(),this._focustrap.deactivate(),super.dispose()}handleUpdate(){this._adjustDialog()}_initializeBackDrop(){return new Ot({isVisible:Boolean(this._config.backdrop),isAnimated:this._isAnimated()})}_initializeFocusTrap(){return new Mt({trapElement:this._element})}_getConfig(e){return e={...Lt,...B.getDataAttributes(this._element),..."object"==typeof e?e:{}},s("modal",e,jt),e}_showElement(e){const t=this._isAnimated(),n=F.findOne(".modal-body",this._dialog);this._element.parentNode&&this._element.parentNode.nodeType===Node.ELEMENT_NODE||document.body.append(this._element),this._element.style.display="block",this._element.removeAttribute("aria-hidden"),this._element.setAttribute("aria-modal",!0),this._element.setAttribute("role","dialog"),this._element.scrollTop=0,n&&(n.scrollTop=0),t&&h(this._element),this._element.classList.add("show"),this._queueCallback(()=>{this._config.focus&&this._focustrap.activate(),this._isTransitioning=!1,D.trigger(this._element,"shown.bs.modal",{relatedTarget:e})},this._dialog,t)}_setEscapeEvent(){this._isShown?D.on(this._element,"keydown.dismiss.bs.modal",e=>{this._config.keyboard&&"Escape"===e.key?(e.preventDefault(),this.hide()):this._config.keyboard||"Escape"!==e.key||this._triggerBackdropTransition()}):D.off(this._element,"keydown.dismiss.bs.modal")}_setResizeEvent(){this._isShown?D.on(window,"resize.bs.modal",()=>this._adjustDialog()):D.off(window,"resize.bs.modal")}_hideModal(){this._element.style.display="none",this._element.setAttribute("aria-hidden",!0),this._element.removeAttribute("aria-modal"),this._element.removeAttribute("role"),this._isTransitioning=!1,this._backdrop.hide(()=>{document.body.classList.remove("modal-open"),this._resetAdjustments(),this._scrollBar.reset(),D.trigger(this._element,"hidden.bs.modal")})}_showBackdrop(e){D.on(this._element,"click.dismiss.bs.modal",e=>{this._ignoreBackdropClick?this._ignoreBackdropClick=!1:e.target===e.currentTarget&&(!0===this._config.backdrop?this.hide():"static"===this._config.backdrop&&this._triggerBackdropTransition())}),this._backdrop.show(e)}_isAnimated(){return this._element.classList.contains("fade")}_triggerBackdropTransition(){if(D.trigger(this._element,"hidePrevented.bs.modal").defaultPrevented)return;const{classList:e,scrollHeight:t,style:n}=this._element,i=t>document.documentElement.clientHeight;!i&&"hidden"===n.overflowY||e.contains("modal-static")||(i||(n.overflowY="hidden"),e.add("modal-static"),this._queueCallback(()=>{e.remove("modal-static"),i||this._queueCallback(()=>{n.overflowY=""},this._dialog)},this._dialog),this._element.focus())}_adjustDialog(){const e=this._element.scrollHeight>document.documentElement.clientHeight,t=this._scrollBar.getWidth(),n=t>0;(!n&&e&&!d()||n&&!e&&d())&&(this._element.style.paddingLeft=t+"px"),(n&&!e&&!d()||!n&&e&&d())&&(this._element.style.paddingRight=t+"px")}_resetAdjustments(){this._element.style.paddingLeft="",this._element.style.paddingRight=""}static jQueryInterface(e,t){return this.each(function(){const n=Nt.getOrCreateInstance(this,e);if("string"==typeof e){if(void 0===n[e])throw new TypeError(`No method named "${e}"`);n[e](t)}})}}D.on(document,"click.bs.modal.data-api",'[data-bs-toggle="modal"]',function(e){const t=n(this);["A","AREA"].includes(this.tagName)&&e.preventDefault(),D.one(t,"show.bs.modal",e=>{e.defaultPrevented||D.one(t,"hidden.bs.modal",()=>{a(this)&&this.focus()})}),Nt.getOrCreateInstance(t).toggle(this)}),I(Nt),y(Nt);const Dt={backdrop:!0,keyboard:!0,scroll:!1},zt={backdrop:"boolean",keyboard:"boolean",scroll:"boolean"};class Ht extends P{constructor(e,t){super(e),this._config=this._getConfig(t),this._isShown=!1,this._backdrop=this._initializeBackDrop(),this._focustrap=this._initializeFocusTrap(),this._addEventListeners()}static get NAME(){return"offcanvas"}static get Default(){return Dt}toggle(e){return this._isShown?this.hide():this.show(e)}show(e){this._isShown||D.trigger(this._element,"show.bs.offcanvas",{relatedTarget:e}).defaultPrevented||(this._isShown=!0,this._element.style.visibility="visible",this._backdrop.show(),this._config.scroll||(new At).hide(),this._element.removeAttribute("aria-hidden"),this._element.setAttribute("aria-modal",!0),this._element.setAttribute("role","dialog"),this._element.classList.add("show"),this._queueCallback(()=>{this._config.scroll||this._focustrap.activate(),D.trigger(this._element,"shown.bs.offcanvas",{relatedTarget:e})},this._element,!0))}hide(){this._isShown&&(D.trigger(this._element,"hide.bs.offcanvas").defaultPrevented||(this._focustrap.deactivate(),this._element.blur(),this._isShown=!1,this._element.classList.remove("show"),this._backdrop.hide(),this._queueCallback(()=>{this._element.setAttribute("aria-hidden",!0),this._element.removeAttribute("aria-modal"),this._element.removeAttribute("role"),this._element.style.visibility="hidden",this._config.scroll||(new At).reset(),D.trigger(this._element,"hidden.bs.offcanvas")},this._element,!0)))}dispose(){this._backdrop.dispose(),this._focustrap.deactivate(),super.dispose()}_getConfig(e){return e={...Dt,...B.getDataAttributes(this._element),..."object"==typeof e?e:{}},s("offcanvas",e,zt),e}_initializeBackDrop(){return new Ot({className:"offcanvas-backdrop",isVisible:this._config.backdrop,isAnimated:!0,rootElement:this._element.parentNode,clickCallback:()=>this.hide()})}_initializeFocusTrap(){return new Mt({trapElement:this._element})}_addEventListeners(){D.on(this._element,"keydown.dismiss.bs.offcanvas",e=>{this._config.keyboard&&"Escape"===e.key&&this.hide()})}static jQueryInterface(e){return this.each(function(){const t=Ht.getOrCreateInstance(this,e);if("string"==typeof e){if(void 0===t[e]||e.startsWith("_")||"constructor"===e)throw new TypeError(`No method named "${e}"`);t[e](this)}})}}D.on(document,"click.bs.offcanvas.data-api",'[data-bs-toggle="offcanvas"]',function(e){const t=n(this);if(["A","AREA"].includes(this.tagName)&&e.preventDefault(),l(this))return;D.one(t,"hidden.bs.offcanvas",()=>{a(this)&&this.focus()});const i=F.findOne(".offcanvas.show");i&&i!==t&&Ht.getInstance(i).hide(),Ht.getOrCreateInstance(t).toggle(this)}),D.on(window,"load.bs.offcanvas.data-api",()=>F.find(".offcanvas.show").forEach(e=>Ht.getOrCreateInstance(e).show())),I(Ht),y(Ht);const Pt=new Set(["background","cite","href","itemtype","longdesc","poster","src","xlink:href"]),It=/^(?:(?:https?|mailto|ftp|tel|file):|[^#&/:?]*(?:[#/?]|$))/i,Rt=/^data:(?:image\/(?:bmp|gif|jpeg|jpg|png|tiff|webp)|video\/(?:mpeg|mp4|ogg|webm)|audio\/(?:mp3|oga|ogg|opus));base64,[\d+/a-z]+=*$/i,Wt=(e,t)=>{const n=e.nodeName.toLowerCase();if(t.includes(n))return!Pt.has(n)||Boolean(It.test(e.nodeValue)||Rt.test(e.nodeValue));const i=t.filter(e=>e instanceof RegExp);for(let e=0,t=i.length;e{Wt(e,a)||n.removeAttribute(e.nodeName)})}return i.body.innerHTML}const Vt=new Set(["sanitize","allowList","sanitizeFn"]),Bt={animation:"boolean",template:"string",title:"(string|element|function)",trigger:"string",delay:"(number|object)",html:"boolean",selector:"(string|boolean)",placement:"(string|function)",offset:"(array|string|function)",container:"(string|element|boolean)",fallbackPlacements:"array",boundary:"(string|element)",customClass:"(string|function)",sanitize:"boolean",sanitizeFn:"(null|function)",allowList:"object",popperConfig:"(null|object|function)"},Ft={AUTO:"auto",TOP:"top",RIGHT:d()?"left":"right",BOTTOM:"bottom",LEFT:d()?"right":"left"},$t={animation:!0,template:'',trigger:"hover focus",title:"",delay:0,html:!1,selector:!1,placement:"top",offset:[0,0],container:!1,fallbackPlacements:["top","right","bottom","left"],boundary:"clippingParents",customClass:"",sanitize:!0,sanitizeFn:null,allowList:{"*":["class","dir","id","lang","role",/^aria-[\w-]*$/i],a:["target","href","title","rel"],area:[],b:[],br:[],col:[],code:[],div:[],em:[],hr:[],h1:[],h2:[],h3:[],h4:[],h5:[],h6:[],i:[],img:["src","srcset","alt","title","width","height"],li:[],ol:[],p:[],pre:[],s:[],small:[],span:[],sub:[],sup:[],strong:[],u:[],ul:[]},popperConfig:null},Ut={HIDE:"hide.bs.tooltip",HIDDEN:"hidden.bs.tooltip",SHOW:"show.bs.tooltip",SHOWN:"shown.bs.tooltip",INSERTED:"inserted.bs.tooltip",CLICK:"click.bs.tooltip",FOCUSIN:"focusin.bs.tooltip",FOCUSOUT:"focusout.bs.tooltip",MOUSEENTER:"mouseenter.bs.tooltip",MOUSELEAVE:"mouseleave.bs.tooltip"};class Xt extends P{constructor(e,t){if(void 0===ft)throw new TypeError("Bootstrap's tooltips require Popper (https://popper.js.org)");super(e),this._isEnabled=!0,this._timeout=0,this._hoverState="",this._activeTrigger={},this._popper=null,this._config=this._getConfig(t),this.tip=null,this._setListeners()}static get Default(){return $t}static get NAME(){return"tooltip"}static get Event(){return Ut}static get DefaultType(){return Bt}enable(){this._isEnabled=!0}disable(){this._isEnabled=!1}toggleEnabled(){this._isEnabled=!this._isEnabled}toggle(e){if(this._isEnabled)if(e){const t=this._initializeOnDelegatedTarget(e);t._activeTrigger.click=!t._activeTrigger.click,t._isWithActiveTrigger()?t._enter(null,t):t._leave(null,t)}else{if(this.getTipElement().classList.contains("show"))return void this._leave(null,this);this._enter(null,this)}}dispose(){clearTimeout(this._timeout),D.off(this._element.closest(".modal"),"hide.bs.modal",this._hideModalHandler),this.tip&&this.tip.remove(),this._popper&&this._popper.destroy(),super.dispose()}show(){if("none"===this._element.style.display)throw new Error("Please use show on visible elements");if(!this.isWithContent()||!this._isEnabled)return;const e=D.trigger(this._element,this.constructor.Event.SHOW),t=c(this._element),n=null===t?this._element.ownerDocument.documentElement.contains(this._element):t.contains(this._element);if(e.defaultPrevented||!n)return;const i=this.getTipElement(),r=(e=>{do{e+=Math.floor(1e6*Math.random())}while(document.getElementById(e));return e})(this.constructor.NAME);i.setAttribute("id",r),this._element.setAttribute("aria-describedby",r),this._config.animation&&i.classList.add("fade");const o="function"==typeof this._config.placement?this._config.placement.call(this,i,this._element):this._config.placement,s=this._getAttachment(o);this._addAttachmentClass(s);const{container:a}=this._config;H.set(i,this.constructor.DATA_KEY,this),this._element.ownerDocument.documentElement.contains(this.tip)||(a.append(i),D.trigger(this._element,this.constructor.Event.INSERTED)),this._popper?this._popper.update():this._popper=pt(this._element,i,this._getPopperConfig(s)),i.classList.add("show");const l=this._resolvePossibleFunction(this._config.customClass);l&&i.classList.add(...l.split(" ")),"ontouchstart"in document.documentElement&&[].concat(...document.body.children).forEach(e=>{D.on(e,"mouseover",u)});const h=this.tip.classList.contains("fade");this._queueCallback(()=>{const e=this._hoverState;this._hoverState=null,D.trigger(this._element,this.constructor.Event.SHOWN),"out"===e&&this._leave(null,this)},this.tip,h)}hide(){if(!this._popper)return;const e=this.getTipElement();if(D.trigger(this._element,this.constructor.Event.HIDE).defaultPrevented)return;e.classList.remove("show"),"ontouchstart"in document.documentElement&&[].concat(...document.body.children).forEach(e=>D.off(e,"mouseover",u)),this._activeTrigger.click=!1,this._activeTrigger.focus=!1,this._activeTrigger.hover=!1;const t=this.tip.classList.contains("fade");this._queueCallback(()=>{this._isWithActiveTrigger()||("show"!==this._hoverState&&e.remove(),this._cleanTipClass(),this._element.removeAttribute("aria-describedby"),D.trigger(this._element,this.constructor.Event.HIDDEN),this._popper&&(this._popper.destroy(),this._popper=null))},this.tip,t),this._hoverState=""}update(){null!==this._popper&&this._popper.update()}isWithContent(){return Boolean(this.getTitle())}getTipElement(){if(this.tip)return this.tip;const e=document.createElement("div");e.innerHTML=this._config.template;const t=e.children[0];return this.setContent(t),t.classList.remove("fade","show"),this.tip=t,this.tip}setContent(e){this._sanitizeAndSetContent(e,this.getTitle(),".tooltip-inner")}_sanitizeAndSetContent(e,t,n){const i=F.findOne(n,e);t||!i?this.setElementContent(i,t):i.remove()}setElementContent(e,t){if(null!==e)return r(t)?(t=o(t),void(this._config.html?t.parentNode!==e&&(e.innerHTML="",e.append(t)):e.textContent=t.textContent)):void(this._config.html?(this._config.sanitize&&(t=qt(t,this._config.allowList,this._config.sanitizeFn)),e.innerHTML=t):e.textContent=t)}getTitle(){const e=this._element.getAttribute("data-bs-original-title")||this._config.title;return this._resolvePossibleFunction(e)}updateAttachment(e){return"right"===e?"end":"left"===e?"start":e}_initializeOnDelegatedTarget(e,t){return t||this.constructor.getOrCreateInstance(e.delegateTarget,this._getDelegateConfig())}_getOffset(){const{offset:e}=this._config;return"string"==typeof e?e.split(",").map(e=>Number.parseInt(e,10)):"function"==typeof e?t=>e(t,this._element):e}_resolvePossibleFunction(e){return"function"==typeof e?e.call(this._element):e}_getPopperConfig(e){const t={placement:e,modifiers:[{name:"flip",options:{fallbackPlacements:this._config.fallbackPlacements}},{name:"offset",options:{offset:this._getOffset()}},{name:"preventOverflow",options:{boundary:this._config.boundary}},{name:"arrow",options:{element:`.${this.constructor.NAME}-arrow`}},{name:"onChange",enabled:!0,phase:"afterWrite",fn:e=>this._handlePopperPlacementChange(e)}],onFirstUpdate:e=>{e.options.placement!==e.placement&&this._handlePopperPlacementChange(e)}};return{...t,..."function"==typeof this._config.popperConfig?this._config.popperConfig(t):this._config.popperConfig}}_addAttachmentClass(e){this.getTipElement().classList.add(`${this._getBasicClassPrefix()}-${this.updateAttachment(e)}`)}_getAttachment(e){return Ft[e.toUpperCase()]}_setListeners(){this._config.trigger.split(" ").forEach(e=>{if("click"===e)D.on(this._element,this.constructor.Event.CLICK,this._config.selector,e=>this.toggle(e));else if("manual"!==e){const t="hover"===e?this.constructor.Event.MOUSEENTER:this.constructor.Event.FOCUSIN,n="hover"===e?this.constructor.Event.MOUSELEAVE:this.constructor.Event.FOCUSOUT;D.on(this._element,t,this._config.selector,e=>this._enter(e)),D.on(this._element,n,this._config.selector,e=>this._leave(e))}}),this._hideModalHandler=(()=>{this._element&&this.hide()}),D.on(this._element.closest(".modal"),"hide.bs.modal",this._hideModalHandler),this._config.selector?this._config={...this._config,trigger:"manual",selector:""}:this._fixTitle()}_fixTitle(){const e=this._element.getAttribute("title"),t=typeof this._element.getAttribute("data-bs-original-title");(e||"string"!==t)&&(this._element.setAttribute("data-bs-original-title",e||""),!e||this._element.getAttribute("aria-label")||this._element.textContent||this._element.setAttribute("aria-label",e),this._element.setAttribute("title",""))}_enter(e,t){t=this._initializeOnDelegatedTarget(e,t),e&&(t._activeTrigger["focusin"===e.type?"focus":"hover"]=!0),t.getTipElement().classList.contains("show")||"show"===t._hoverState?t._hoverState="show":(clearTimeout(t._timeout),t._hoverState="show",t._config.delay&&t._config.delay.show?t._timeout=setTimeout(()=>{"show"===t._hoverState&&t.show()},t._config.delay.show):t.show())}_leave(e,t){t=this._initializeOnDelegatedTarget(e,t),e&&(t._activeTrigger["focusout"===e.type?"focus":"hover"]=t._element.contains(e.relatedTarget)),t._isWithActiveTrigger()||(clearTimeout(t._timeout),t._hoverState="out",t._config.delay&&t._config.delay.hide?t._timeout=setTimeout(()=>{"out"===t._hoverState&&t.hide()},t._config.delay.hide):t.hide())}_isWithActiveTrigger(){for(const e in this._activeTrigger)if(this._activeTrigger[e])return!0;return!1}_getConfig(e){const t=B.getDataAttributes(this._element);return Object.keys(t).forEach(e=>{Vt.has(e)&&delete t[e]}),(e={...this.constructor.Default,...t,..."object"==typeof e&&e?e:{}}).container=!1===e.container?document.body:o(e.container),"number"==typeof e.delay&&(e.delay={show:e.delay,hide:e.delay}),"number"==typeof e.title&&(e.title=e.title.toString()),"number"==typeof e.content&&(e.content=e.content.toString()),s("tooltip",e,this.constructor.DefaultType),e.sanitize&&(e.template=qt(e.template,e.allowList,e.sanitizeFn)),e}_getDelegateConfig(){const e={};for(const t in this._config)this.constructor.Default[t]!==this._config[t]&&(e[t]=this._config[t]);return e}_cleanTipClass(){const e=this.getTipElement(),t=new RegExp(`(^|\\s)${this._getBasicClassPrefix()}\\S+`,"g"),n=e.getAttribute("class").match(t);null!==n&&n.length>0&&n.map(e=>e.trim()).forEach(t=>e.classList.remove(t))}_getBasicClassPrefix(){return"bs-tooltip"}_handlePopperPlacementChange(e){const{state:t}=e;t&&(this.tip=t.elements.popper,this._cleanTipClass(),this._addAttachmentClass(this._getAttachment(t.placement)))}static jQueryInterface(e){return this.each(function(){const t=Xt.getOrCreateInstance(this,e);if("string"==typeof e){if(void 0===t[e])throw new TypeError(`No method named "${e}"`);t[e]()}})}}y(Xt);const Yt={...Xt.Default,placement:"right",offset:[0,8],trigger:"click",content:"",template:''},Qt={...Xt.DefaultType,content:"(string|element|function)"},Gt={HIDE:"hide.bs.popover",HIDDEN:"hidden.bs.popover",SHOW:"show.bs.popover",SHOWN:"shown.bs.popover",INSERTED:"inserted.bs.popover",CLICK:"click.bs.popover",FOCUSIN:"focusin.bs.popover",FOCUSOUT:"focusout.bs.popover",MOUSEENTER:"mouseenter.bs.popover",MOUSELEAVE:"mouseleave.bs.popover"};class Kt extends Xt{static get Default(){return Yt}static get NAME(){return"popover"}static get Event(){return Gt}static get DefaultType(){return Qt}isWithContent(){return this.getTitle()||this._getContent()}setContent(e){this._sanitizeAndSetContent(e,this.getTitle(),".popover-header"),this._sanitizeAndSetContent(e,this._getContent(),".popover-body")}_getContent(){return this._resolvePossibleFunction(this._config.content)}_getBasicClassPrefix(){return"bs-popover"}static jQueryInterface(e){return this.each(function(){const t=Kt.getOrCreateInstance(this,e);if("string"==typeof e){if(void 0===t[e])throw new TypeError(`No method named "${e}"`);t[e]()}})}}y(Kt);const Jt={offset:10,method:"auto",target:""},Zt={offset:"number",method:"string",target:"(string|element)"},en=".nav-link, .list-group-item, .dropdown-item";class tn extends P{constructor(e,t){super(e),this._scrollElement="BODY"===this._element.tagName?window:this._element,this._config=this._getConfig(t),this._offsets=[],this._targets=[],this._activeTarget=null,this._scrollHeight=0,D.on(this._scrollElement,"scroll.bs.scrollspy",()=>this._process()),this.refresh(),this._process()}static get Default(){return Jt}static get NAME(){return"scrollspy"}refresh(){const e=this._scrollElement===this._scrollElement.window?"offset":"position",n="auto"===this._config.method?e:this._config.method,i="position"===n?this._getScrollTop():0;this._offsets=[],this._targets=[],this._scrollHeight=this._getScrollHeight(),F.find(en,this._config.target).map(e=>{const r=t(e),o=r?F.findOne(r):null;if(o){const e=o.getBoundingClientRect();if(e.width||e.height)return[B[n](o).top+i,r]}return null}).filter(e=>e).sort((e,t)=>e[0]-t[0]).forEach(e=>{this._offsets.push(e[0]),this._targets.push(e[1])})}dispose(){D.off(this._scrollElement,".bs.scrollspy"),super.dispose()}_getConfig(e){return(e={...Jt,...B.getDataAttributes(this._element),..."object"==typeof e&&e?e:{}}).target=o(e.target)||document.documentElement,s("scrollspy",e,Zt),e}_getScrollTop(){return this._scrollElement===window?this._scrollElement.pageYOffset:this._scrollElement.scrollTop}_getScrollHeight(){return this._scrollElement.scrollHeight||Math.max(document.body.scrollHeight,document.documentElement.scrollHeight)}_getOffsetHeight(){return this._scrollElement===window?window.innerHeight:this._scrollElement.getBoundingClientRect().height}_process(){const e=this._getScrollTop()+this._config.offset,t=this._getScrollHeight(),n=this._config.offset+t-this._getOffsetHeight();if(this._scrollHeight!==t&&this.refresh(),e>=n){const e=this._targets[this._targets.length-1];this._activeTarget!==e&&this._activate(e)}else{if(this._activeTarget&&e0)return this._activeTarget=null,void this._clear();for(let t=this._offsets.length;t--;)this._activeTarget!==this._targets[t]&&e>=this._offsets[t]&&(void 0===this._offsets[t+1]||e`${t}[data-bs-target="${e}"],${t}[href="${e}"]`),n=F.findOne(t.join(","),this._config.target);n.classList.add("active"),n.classList.contains("dropdown-item")?F.findOne(".dropdown-toggle",n.closest(".dropdown")).classList.add("active"):F.parents(n,".nav, .list-group").forEach(e=>{F.prev(e,".nav-link, .list-group-item").forEach(e=>e.classList.add("active")),F.prev(e,".nav-item").forEach(e=>{F.children(e,".nav-link").forEach(e=>e.classList.add("active"))})}),D.trigger(this._scrollElement,"activate.bs.scrollspy",{relatedTarget:e})}_clear(){F.find(en,this._config.target).filter(e=>e.classList.contains("active")).forEach(e=>e.classList.remove("active"))}static jQueryInterface(e){return this.each(function(){const t=tn.getOrCreateInstance(this,e);if("string"==typeof e){if(void 0===t[e])throw new TypeError(`No method named "${e}"`);t[e]()}})}}D.on(window,"load.bs.scrollspy.data-api",()=>{F.find('[data-bs-spy="scroll"]').forEach(e=>new tn(e))}),y(tn);class nn extends P{static get NAME(){return"tab"}show(){if(this._element.parentNode&&this._element.parentNode.nodeType===Node.ELEMENT_NODE&&this._element.classList.contains("active"))return;let e;const t=n(this._element),i=this._element.closest(".nav, .list-group");if(i){const t="UL"===i.nodeName||"OL"===i.nodeName?":scope > li > .active":".active";e=(e=F.find(t,i))[e.length-1]}const r=e?D.trigger(e,"hide.bs.tab",{relatedTarget:this._element}):null;if(D.trigger(this._element,"show.bs.tab",{relatedTarget:e}).defaultPrevented||null!==r&&r.defaultPrevented)return;this._activate(this._element,i);const o=()=>{D.trigger(e,"hidden.bs.tab",{relatedTarget:this._element}),D.trigger(this._element,"shown.bs.tab",{relatedTarget:e})};t?this._activate(t,t.parentNode,o):o()}_activate(e,t,n){const i=(!t||"UL"!==t.nodeName&&"OL"!==t.nodeName?F.children(t,".active"):F.find(":scope > li > .active",t))[0],r=n&&i&&i.classList.contains("fade"),o=()=>this._transitionComplete(e,i,n);i&&r?(i.classList.remove("show"),this._queueCallback(o,e,!0)):o()}_transitionComplete(e,t,n){if(t){t.classList.remove("active");const e=F.findOne(":scope > .dropdown-menu .active",t.parentNode);e&&e.classList.remove("active"),"tab"===t.getAttribute("role")&&t.setAttribute("aria-selected",!1)}e.classList.add("active"),"tab"===e.getAttribute("role")&&e.setAttribute("aria-selected",!0),h(e),e.classList.contains("fade")&&e.classList.add("show");let i=e.parentNode;if(i&&"LI"===i.nodeName&&(i=i.parentNode),i&&i.classList.contains("dropdown-menu")){const t=e.closest(".dropdown");t&&F.find(".dropdown-toggle",t).forEach(e=>e.classList.add("active")),e.setAttribute("aria-expanded",!0)}n&&n()}static jQueryInterface(e){return this.each(function(){const t=nn.getOrCreateInstance(this);if("string"==typeof e){if(void 0===t[e])throw new TypeError(`No method named "${e}"`);t[e]()}})}}D.on(document,"click.bs.tab.data-api",'[data-bs-toggle="tab"], [data-bs-toggle="pill"], [data-bs-toggle="list"]',function(e){["A","AREA"].includes(this.tagName)&&e.preventDefault(),l(this)||nn.getOrCreateInstance(this).show()}),y(nn);const rn={animation:"boolean",autohide:"boolean",delay:"number"},on={animation:!0,autohide:!0,delay:5e3};class sn extends P{constructor(e,t){super(e),this._config=this._getConfig(t),this._timeout=null,this._hasMouseInteraction=!1,this._hasKeyboardInteraction=!1,this._setListeners()}static get DefaultType(){return rn}static get Default(){return on}static get NAME(){return"toast"}show(){D.trigger(this._element,"show.bs.toast").defaultPrevented||(this._clearTimeout(),this._config.animation&&this._element.classList.add("fade"),this._element.classList.remove("hide"),h(this._element),this._element.classList.add("show"),this._element.classList.add("showing"),this._queueCallback(()=>{this._element.classList.remove("showing"),D.trigger(this._element,"shown.bs.toast"),this._maybeScheduleHide()},this._element,this._config.animation))}hide(){this._element.classList.contains("show")&&(D.trigger(this._element,"hide.bs.toast").defaultPrevented||(this._element.classList.add("showing"),this._queueCallback(()=>{this._element.classList.add("hide"),this._element.classList.remove("showing"),this._element.classList.remove("show"),D.trigger(this._element,"hidden.bs.toast")},this._element,this._config.animation)))}dispose(){this._clearTimeout(),this._element.classList.contains("show")&&this._element.classList.remove("show"),super.dispose()}_getConfig(e){return e={...on,...B.getDataAttributes(this._element),..."object"==typeof e&&e?e:{}},s("toast",e,this.constructor.DefaultType),e}_maybeScheduleHide(){this._config.autohide&&(this._hasMouseInteraction||this._hasKeyboardInteraction||(this._timeout=setTimeout(()=>{this.hide()},this._config.delay)))}_onInteraction(e,t){switch(e.type){case"mouseover":case"mouseout":this._hasMouseInteraction=t;break;case"focusin":case"focusout":this._hasKeyboardInteraction=t}if(t)return void this._clearTimeout();const n=e.relatedTarget;this._element===n||this._element.contains(n)||this._maybeScheduleHide()}_setListeners(){D.on(this._element,"mouseover.bs.toast",e=>this._onInteraction(e,!0)),D.on(this._element,"mouseout.bs.toast",e=>this._onInteraction(e,!1)),D.on(this._element,"focusin.bs.toast",e=>this._onInteraction(e,!0)),D.on(this._element,"focusout.bs.toast",e=>this._onInteraction(e,!1))}_clearTimeout(){clearTimeout(this._timeout),this._timeout=null}static jQueryInterface(e){return this.each(function(){const t=sn.getOrCreateInstance(this,e);if("string"==typeof e){if(void 0===t[e])throw new TypeError(`No method named "${e}"`);t[e](this)}})}}return I(sn),y(sn),{Alert:R,Button:W,Carousel:J,Collapse:te,Dropdown:Et,Modal:Nt,Offcanvas:Ht,Popover:Kt,ScrollSpy:tn,Tab:nn,Toast:sn,Tooltip:Xt}}),function(e,t){"object"==typeof exports&&"undefined"!=typeof module?module.exports=t():"function"==typeof define&&define.amd?define(t):(e=e||self).SimpleBar=t()}(this,function(){"use strict";var e="undefined"!=typeof globalThis?globalThis:"undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:{};function t(e,t){return e(t={exports:{}},t.exports),t.exports}var n,i,r,o="object",s=function(e){return e&&e.Math==Math&&e},a=s(typeof globalThis==o&&globalThis)||s(typeof window==o&&window)||s(typeof self==o&&self)||s(typeof e==o&&e)||Function("return this")(),l=function(e){try{return!!e()}catch(e){return!0}},c=!l(function(){return 7!=Object.defineProperty({},"a",{get:function(){return 7}}).a}),u={}.propertyIsEnumerable,h=Object.getOwnPropertyDescriptor,p={f:h&&!u.call({1:2},1)?function(e){var t=h(this,e);return!!t&&t.enumerable}:u},f=function(e,t){return{enumerable:!(1&e),configurable:!(2&e),writable:!(4&e),value:t}},d={}.toString,y=function(e){return d.call(e).slice(8,-1)},g="".split,m=l(function(){return!Object("z").propertyIsEnumerable(0)})?function(e){return"String"==y(e)?g.call(e,""):Object(e)}:Object,v=function(e){if(null==e)throw TypeError("Can't call method on "+e);return e},x=function(e){return m(v(e))},b=function(e){return"object"==typeof e?null!==e:"function"==typeof e},w=function(e,t){if(!b(e))return e;var n,i;if(t&&"function"==typeof(n=e.toString)&&!b(i=n.call(e)))return i;if("function"==typeof(n=e.valueOf)&&!b(i=n.call(e)))return i;if(!t&&"function"==typeof(n=e.toString)&&!b(i=n.call(e)))return i;throw TypeError("Can't convert object to primitive value")},_={}.hasOwnProperty,E=function(e,t){return _.call(e,t)},A=a.document,T=b(A)&&b(A.createElement),k=function(e){return T?A.createElement(e):{}},O=!c&&!l(function(){return 7!=Object.defineProperty(k("div"),"a",{get:function(){return 7}}).a}),S=Object.getOwnPropertyDescriptor,C={f:c?S:function(e,t){if(e=x(e),t=w(t,!0),O)try{return S(e,t)}catch(e){}if(E(e,t))return f(!p.f.call(e,t),e[t])}},M=function(e){if(!b(e))throw TypeError(String(e)+" is not an object");return e},L=Object.defineProperty,j={f:c?L:function(e,t,n){if(M(e),t=w(t,!0),M(n),O)try{return L(e,t,n)}catch(e){}if("get"in n||"set"in n)throw TypeError("Accessors not supported");return"value"in n&&(e[t]=n.value),e}},N=c?function(e,t,n){return j.f(e,t,f(1,n))}:function(e,t,n){return e[t]=n,e},D=function(e,t){try{N(a,e,t)}catch(n){a[e]=t}return t},z=t(function(e){var t=a["__core-js_shared__"]||D("__core-js_shared__",{});(e.exports=function(e,n){return t[e]||(t[e]=void 0!==n?n:{})})("versions",[]).push({version:"3.2.1",mode:"global",copyright:"© 2019 Denis Pushkarev (zloirock.ru)"})}),H=z("native-function-to-string",Function.toString),P=a.WeakMap,I="function"==typeof P&&/native code/.test(H.call(P)),R=0,W=Math.random(),q=function(e){return"Symbol("+String(void 0===e?"":e)+")_"+(++R+W).toString(36)},V=z("keys"),B=function(e){return V[e]||(V[e]=q(e))},F={},$=a.WeakMap;if(I){var U=new $,X=U.get,Y=U.has,Q=U.set;n=function(e,t){return Q.call(U,e,t),t},i=function(e){return X.call(U,e)||{}},r=function(e){return Y.call(U,e)}}else{var G=B("state");F[G]=!0,n=function(e,t){return N(e,G,t),t},i=function(e){return E(e,G)?e[G]:{}},r=function(e){return E(e,G)}}var K={set:n,get:i,has:r,enforce:function(e){return r(e)?i(e):n(e,{})},getterFor:function(e){return function(t){var n;if(!b(t)||(n=i(t)).type!==e)throw TypeError("Incompatible receiver, "+e+" required");return n}}},J=t(function(e){var t=K.get,n=K.enforce,i=String(H).split("toString");z("inspectSource",function(e){return H.call(e)}),(e.exports=function(e,t,r,o){var s=!!o&&!!o.unsafe,l=!!o&&!!o.enumerable,c=!!o&&!!o.noTargetGet;"function"==typeof r&&("string"!=typeof t||E(r,"name")||N(r,"name",t),n(r).source=i.join("string"==typeof t?t:"")),e!==a?(s?!c&&e[t]&&(l=!0):delete e[t],l?e[t]=r:N(e,t,r)):l?e[t]=r:D(t,r)})(Function.prototype,"toString",function(){return"function"==typeof this&&t(this).source||H.call(this)})}),Z=a,ee=function(e){return"function"==typeof e?e:void 0},te=function(e,t){return arguments.length<2?ee(Z[e])||ee(a[e]):Z[e]&&Z[e][t]||a[e]&&a[e][t]},ne=Math.ceil,ie=Math.floor,re=function(e){return isNaN(e=+e)?0:(e>0?ie:ne)(e)},oe=Math.min,se=function(e){return e>0?oe(re(e),9007199254740991):0},ae=Math.max,le=Math.min,ce=function(e){return function(t,n,i){var r,o=x(t),s=se(o.length),a=function(e,t){var n=re(i);return n<0?ae(n+t,0):le(n,t)}(0,s);if(e&&n!=n){for(;s>a;)if((r=o[a++])!=r)return!0}else for(;s>a;a++)if((e||a in o)&&o[a]===n)return e||a||0;return!e&&-1}},ue=(ce(!0),ce(!1)),he=function(e,t){var n,i=x(e),r=0,o=[];for(n in i)!E(F,n)&&E(i,n)&&o.push(n);for(;t.length>r;)E(i,n=t[r++])&&(~ue(o,n)||o.push(n));return o},pe=["constructor","hasOwnProperty","isPrototypeOf","propertyIsEnumerable","toLocaleString","toString","valueOf"],fe=pe.concat("length","prototype"),de={f:Object.getOwnPropertyNames||function(e){return he(e,fe)}},ye={f:Object.getOwnPropertySymbols},ge=te("Reflect","ownKeys")||function(e){var t=de.f(M(e)),n=ye.f;return n?t.concat(n(e)):t},me=function(e,t){for(var n=ge(t),i=j.f,r=C.f,o=0;ov;v++)if((s||v in d)&&(p=y(h=d[v],v,f),e))if(t)b[v]=p;else if(p)switch(e){case 3:return!0;case 5:return h;case 6:return v;case 2:Pe.call(b,h)}else if(r)return!1;return o?-1:i||r?r:b}},Re={forEach:Ie(0),map:Ie(1),filter:Ie(2),some:Ie(3),every:Ie(4),find:Ie(5),findIndex:Ie(6)},We=function(e,t){var n=[][e];return!n||!l(function(){n.call(null,t||function(){throw 1},1)})},qe=Re.forEach,Ve=We("forEach")?function(e){return qe(this,e,arguments.length>1?arguments[1]:void 0)}:[].forEach;ke({target:"Array",proto:!0,forced:[].forEach!=Ve},{forEach:Ve});var Be={CSSRuleList:0,CSSStyleDeclaration:0,CSSValueList:0,ClientRectList:0,DOMRectList:0,DOMStringList:0,DOMTokenList:1,DataTransferItemList:0,FileList:0,HTMLAllCollection:0,HTMLCollection:0,HTMLFormElement:0,HTMLSelectElement:0,MediaList:0,MimeTypeArray:0,NamedNodeMap:0,NodeList:1,PaintRequestList:0,Plugin:0,PluginArray:0,SVGLengthList:0,SVGNumberList:0,SVGPathSegList:0,SVGPointList:0,SVGStringList:0,SVGTransformList:0,SourceBufferList:0,StyleSheetList:0,TextTrackCueList:0,TextTrackList:0,TouchList:0};for(var Fe in Be){var $e=a[Fe],Ue=$e&&$e.prototype;if(Ue&&Ue.forEach!==Ve)try{N(Ue,"forEach",Ve)}catch(e){Ue.forEach=Ve}}var Xe=!("undefined"==typeof window||!window.document||!window.document.createElement),Ye=De("species"),Qe=Re.filter;ke({target:"Array",proto:!0,forced:!!l(function(){var e=[];return(e.constructor={})[Ye]=function(){return{foo:1}},1!==e.filter(Boolean).foo})},{filter:function(e){return Qe(this,e,arguments.length>1?arguments[1]:void 0)}});var Ge=Object.keys||function(e){return he(e,pe)},Ke=c?Object.defineProperties:function(e,t){M(e);for(var n,i=Ge(t),r=i.length,o=0;r>o;)j.f(e,n=i[o++],t[n]);return e},Je=te("document","documentElement"),Ze=B("IE_PROTO"),et=function(){},tt=function(){var e,t=k("iframe"),n=pe.length;for(t.style.display="none",Je.appendChild(t),t.src=String("javascript:"),(e=t.contentWindow.document).open(),e.write(" @@ -106,12 +108,20 @@

        {% block page_title %}{% endblock page_title %}