Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Dev RELEASE: v0.19.0 #120

Merged
merged 18 commits into from
Jun 25, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 0 additions & 2 deletions .github/workflows/pypi-publish.yml
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,4 @@ jobs:
- name: Publish package
uses: pypa/gh-action-pypi-publish@v1.8.10
with:
user: __token__
password: ${{ secrets.PYPI_API_TOKEN }}
packages_dir: src/dist
8 changes: 4 additions & 4 deletions src/offat/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,8 +115,8 @@ def start():
)
parser.add_argument(
'-s',
'--ssl',
dest='ssl',
'--ssl-verify',
dest='ssl_verify',
required=False,
action='store_true',
help='Enable SSL Verification',
Expand Down Expand Up @@ -153,7 +153,8 @@ def start():

# parse args and run tests
api_parser: SwaggerParser | OpenAPIv3Parser = create_parser(
args.fpath, server_url=args.server_url
args.fpath, server_url=args.server_url,
ssl_verify=args.ssl_verify
)

generate_and_run_tests(
Expand All @@ -165,7 +166,6 @@ def start():
rate_limit=rate_limit,
test_data_config=test_data_config,
proxies=args.proxies_list,
ssl=args.ssl,
capture_failed=args.capture_failed,
)

Expand Down
69 changes: 40 additions & 29 deletions src/offat/api/app.py
Original file line number Diff line number Diff line change
@@ -1,86 +1,97 @@
from os import uname, environ
from textwrap import dedent

from fastapi import status, Request, Response
from offat.api.config import app, task_queue, task_timeout, auth_secret_key
from offat.api.jobs import scan_api
from offat.api.models import CreateScanModel
from offat.api.schema import CreateScanSchema
from offat.logger import logger

# from os import uname, environ


logger.info("Secret Key: %s", auth_secret_key)
logger.info('Secret Key: %s', auth_secret_key)
Dismissed Show dismissed Hide dismissed


# if uname().sysname == 'Darwin' and environ.get('OBJC_DISABLE_INITIALIZE_FORK_SAFETY') != 'YES':
# logger.warning('Mac Users might need to configure OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES in env\nVisit StackOverFlow link for more info: https://stackoverflow.com/questions/50168647/multiprocessing-causes-python-to-crash-and-gives-an-error-may-have-been-in-progr')
if (
uname().sysname == 'Darwin'
and environ.get('OBJC_DISABLE_INITIALIZE_FORK_SAFETY') != 'YES'
):
logger.warning(
dedent(
'''Mac Users might need to configure OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES in env
Visit StackOverFlow link for more info:
https://stackoverflow.com/questions/50168647/multiprocessing-causes-python-to-crash-and-gives-an-error-may-have-been-in-progr
'''
)
)


@app.get("/", status_code=status.HTTP_200_OK)
@app.get('/', status_code=status.HTTP_200_OK)
async def root():
return {
"name": "OFFAT API",
"project": "https://github.com/OWASP/offat",
"license": "https://github.com/OWASP/offat/blob/main/LICENSE",
'name': 'OFFAT API',
'project': 'https://github.com/OWASP/offat',
'license': 'https://github.com/OWASP/offat/blob/main/LICENSE',
}


@app.post("/api/v1/scan", status_code=status.HTTP_201_CREATED)
@app.post('/api/v1/scan', status_code=status.HTTP_201_CREATED)
async def add_scan_task(
scan_data: CreateScanModel, request: Request, response: Response
scan_data: CreateScanSchema, request: Request, response: Response
):
# for auth
client_ip = request.client.host
secret_key = request.headers.get("SECRET-KEY", None)
secret_key = request.headers.get('SECRET-KEY', None)
if secret_key != auth_secret_key:
# return 404 for better endpoint security
response.status_code = status.HTTP_401_UNAUTHORIZED
logger.warning("INTRUSION: %s tried to create a new scan job", client_ip)
return {"message": "Unauthorized"}
logger.warning('INTRUSION: %s tried to create a new scan job', client_ip)
return {'message': 'Unauthorized'}

msg = {"msg": "Scan Task Created", "job_id": None}
msg = {'msg': 'Scan Task Created', 'job_id': None}

job = task_queue.enqueue(scan_api, scan_data, job_timeout=task_timeout)
msg["job_id"] = job.id
msg['job_id'] = job.id

logger.info("SUCCESS: %s created new scan job - %s", client_ip, job.id)
logger.info('SUCCESS: %s created new scan job - %s', client_ip, job.id)

return msg


@app.get("/api/v1/scan/{job_id}/result")
@app.get('/api/v1/scan/{job_id}/result')
async def get_scan_task_result(job_id: str, request: Request, response: Response):
# for auth
client_ip = request.client.host
secret_key = request.headers.get("SECRET-KEY", None)
secret_key = request.headers.get('SECRET-KEY', None)
if secret_key != auth_secret_key:
# return 404 for better endpoint security
response.status_code = status.HTTP_401_UNAUTHORIZED
logger.warning(
"INTRUSION: %s tried to access %s job scan results", client_ip, job_id
'INTRUSION: %s tried to access %s job scan results', client_ip, job_id
)
return {"message": "Unauthorized"}
return {'message': 'Unauthorized'}

scan_results_job = task_queue.fetch_job(job_id=job_id)

logger.info("SUCCESS: %s accessed %s job scan results", client_ip, job_id)
logger.info('SUCCESS: %s accessed %s job scan results', client_ip, job_id)

msg = "Task Remaining or Invalid Job Id"
msg = 'Task Remaining or Invalid Job Id'
results = None
response.status_code = status.HTTP_202_ACCEPTED

if scan_results_job and scan_results_job.is_started:
msg = "Job In Progress"
msg = 'Job In Progress'

elif scan_results_job and scan_results_job.is_finished:
msg = "Task Completed"
msg = 'Task Completed'
results = scan_results_job.result
response.status_code = status.HTTP_200_OK

elif scan_results_job and scan_results_job.is_failed:
msg = "Task Failed. Try Creating Task Again."
msg = 'Task Failed. Try Creating Task Again.'
response.status_code = status.HTTP_200_OK

msg = {
"msg": msg,
"results": results,
'msg': msg,
'results': results,
}
return msg
15 changes: 11 additions & 4 deletions src/offat/api/jobs.py
Original file line number Diff line number Diff line change
@@ -1,23 +1,30 @@
from sys import exc_info
from offat.api.models import CreateScanModel
from offat.utils import is_valid_url
from offat.api.schema import CreateScanSchema
from offat.tester.handler import generate_and_run_tests
from offat.parsers import create_parser
from offat.logger import logger


def scan_api(body_data: CreateScanModel):
def scan_api(body_data: CreateScanSchema, ssl_verify: bool = True):
try:
api_parser = create_parser(fpath_or_url=None, spec=body_data.openAPI)
url = body_data.openapi if is_valid_url(body_data.openapi) else None
spec = None if url else body_data.openapi

api_parser = create_parser(fpath_or_url=url, spec=spec, ssl_verify=ssl_verify)

results = generate_and_run_tests(
api_parser=api_parser,
regex_pattern=body_data.regex_pattern,
req_headers=body_data.req_headers,
rate_limit=body_data.rate_limit,
test_data_config=body_data.test_data_config,
proxies=body_data.proxies,
capture_failed=body_data.capture_failed,
remove_unused_data=body_data.remove_unused_data,
)
return results
except Exception as e:
logger.error('Error occurred while creating a job: %s', repr(e))
logger.debug('Debug Data:', exc_info=exc_info())
logger.error('Debug Data:', exc_info=exc_info())
return [{'error': str(e)}]
11 changes: 0 additions & 11 deletions src/offat/api/models.py

This file was deleted.

13 changes: 13 additions & 0 deletions src/offat/api/schema.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
from typing import Optional
from pydantic import BaseModel


class CreateScanSchema(BaseModel):
openapi: str
regex_pattern: Optional[str] = None
req_headers: Optional[dict] = {'User-Agent': 'offat-api'}
rate_limit: Optional[int] = 60
test_data_config: Optional[dict] = None
proxies: Optional[list[str]] = None
capture_failed: Optional[bool] = False
remove_unused_data: Optional[bool] = True
8 changes: 4 additions & 4 deletions src/offat/http.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ def __init__(
proxies: list[str] | None = [],
allow_redirects: bool = True,
timeout: float = 60,
ssl: bool = False,
ssl_verify: bool = True,
) -> None:
"""AsyncRequests class constructor

Expand All @@ -74,7 +74,7 @@ def __init__(
headers (dict): overrides default headers while sending HTTP requests
proxy (str): proxy URL to be used while sending requests
timeout (float): total timeout parameter of aiohttp.ClientTimeout
ssl (bool): enforces tls/ssl verification if True
ssl_verify (bool): enforces tls/ssl verification if True

Returns:
None
Expand All @@ -84,7 +84,7 @@ def __init__(
self._allow_redirects = allow_redirects
self._limiter = AsyncLimiter(max_rate=rate_limit, time_period=1)
self._timeout = ClientTimeout(total=timeout)
self._ssl = ssl
self._ssl_verify = ssl_verify

@retry(
stop=stop_after_attempt(3),
Expand Down Expand Up @@ -130,7 +130,7 @@ async def request(self, url: str, *args, method: str = "GET", **kwargs) -> dict:
url,
allow_redirects=self._allow_redirects,
proxy=self._proxy.get_random_proxy(),
ssl=self._ssl,
ssl=self._ssl_verify,
*args,
**kwargs,
) as response:
Expand Down
3 changes: 2 additions & 1 deletion src/offat/parsers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,11 @@ def create_parser(
fpath_or_url: str,
spec: dict | None = None,
server_url: str | None = None,
ssl_verify: bool = True,
) -> SwaggerParser | OpenAPIv3Parser:
"""returns parser based on doc file"""
if fpath_or_url and is_valid_url(fpath_or_url):
res = http_get(fpath_or_url, timeout=3)
res = http_get(fpath_or_url, timeout=3, verify=ssl_verify)
if res.status_code != 200:
logger.error(
'server returned status code %d offat expects 200 status code',
Expand Down
6 changes: 1 addition & 5 deletions src/offat/tester/handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@ def generate_and_run_tests(
req_headers: dict | None = None,
proxies: list[str] | None = None,
test_data_config: dict | None = None,
ssl: bool = False,
capture_failed: bool = False,
remove_unused_data: bool = True,
):
Expand All @@ -49,8 +48,6 @@ def generate_and_run_tests(
(optional).
test_data_config: A dictionary representing the configuration
for user-provided test data (optional).
ssl: A boolean indicating whether to use SSL for the requests
(default: False).
capture_failed: A boolean indicating whether to capture failed
tests in the report (default: False).
remove_unused_data: A boolean indicating whether to remove
Expand All @@ -61,7 +58,7 @@ def generate_and_run_tests(
"""
if not is_host_up(openapi_parser=api_parser):
logger.error(
'Stopping tests due to unavailibility of host: %s', api_parser.host
'Stopping tests due to unavailability of host: %s', api_parser.host
)
return

Expand All @@ -71,7 +68,6 @@ def generate_and_run_tests(
rate_limit=rate_limit,
headers=req_headers,
proxies=proxies,
ssl=ssl,
)

results: list = []
Expand Down
7 changes: 5 additions & 2 deletions src/offat/tester/runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,13 @@ def __init__(
rate_limit: float = 60,
headers: dict | None = None,
proxies: list[str] | None = None,
ssl: bool = False,
ssl_verify: bool = True,
) -> None:
self._client = AsyncRequests(
rate_limit=rate_limit, headers=headers, proxies=proxies, ssl=ssl
rate_limit=rate_limit,
headers=headers,
proxies=proxies,
ssl_verify=ssl_verify
)
self.progress = Progress(console=console)
self.progress_task_id: TaskID | None = None
Expand Down
17 changes: 15 additions & 2 deletions src/offat/tester/tester_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
OWASP OFFAT Tester Utils Module
"""
from http import client as http_client
import ssl
from sys import exc_info
from typing import Optional
from asyncio import run
Expand All @@ -15,10 +16,11 @@
from ..parsers import SwaggerParser, OpenAPIv3Parser


def is_host_up(openapi_parser: SwaggerParser | OpenAPIv3Parser) -> bool:
def is_host_up(openapi_parser: SwaggerParser | OpenAPIv3Parser, ssl_verify: bool = True) -> bool:
'''checks whether the host from openapi doc is available or not.
Returns True is host is available else returns False'''
tokens = openapi_parser.host.split(':')
use_ssl = False
match len(tokens):
case 1:
host = tokens[0]
Expand All @@ -34,13 +36,24 @@ def is_host_up(openapi_parser: SwaggerParser | OpenAPIv3Parser) -> bool:

match port:
case 443:
use_ssl = True
proto = http_client.HTTPSConnection
case _:
proto = http_client.HTTPConnection

logger.info('Checking whether host %s:%s is available', host, port)
try:
conn = proto(host=host, port=port, timeout=5)
if not use_ssl:
conn = proto(host=host, port=port, timeout=5)
else:
if ssl_verify:
conn = proto(host=host, port=port, timeout=5)
else:
conn = proto(
host=host,
port=port,
timeout=5,
context = ssl._create_unverified_context())
conn.request('GET', '/')
res = conn.getresponse()
logger.info('Host returned status code: %d', res.status)
Expand Down
Empty file.
Loading
Loading