Skip to content

Commit

Permalink
Merge pull request #44 from OWASP/dev
Browse files Browse the repository at this point in the history
RELEASE v0.14.0
  • Loading branch information
dmdhrumilmistry authored Feb 1, 2024
2 parents 1446c15 + c83aa8a commit 16e9ab6
Show file tree
Hide file tree
Showing 7 changed files with 256 additions and 295 deletions.
14 changes: 2 additions & 12 deletions src/offat/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,9 +37,7 @@ def start():
parser.add_argument('-v', '--version', action='version',
version=f'%(prog)s {get_package_version()}')
parser.add_argument('-rl', '--rate-limit', dest='rate_limit',
help='API requests rate limit. -dr should be passed in order to use this option', type=int, default=None, required=False)
parser.add_argument('-dr', '--delay-rate', dest='delay_rate',
help='API requests delay rate in seconds. -rl should be passed in order to use this option', type=float, default=None, required=False)
help='API requests rate limit per second', type=float, default=60, required=False)
parser.add_argument('-pr', '--path-regex', dest='path_regex_pattern', type=str,
help='run tests for paths matching given regex pattern', required=False, default=None)
parser.add_argument('-o', '--output', dest='output_file', type=str,
Expand All @@ -52,21 +50,14 @@ def start():
help='YAML file containing user test data for tests', required=False, type=str)
parser.add_argument('-p', '--proxy', dest='proxy',
help='Proxy server URL to route HTTP requests through (e.g., "http://proxyserver:port")', required=False, type=str)
parser.add_argument('-ns', '--no-ssl', dest='no_ssl', help='Ignores SSL verification when enabled',
action='store_true', required=False) # False -> ignore SSL, True -> enforce SSL check
args = parser.parse_args()

# convert req headers str to dict
headers_dict: dict = headers_list_to_dict(args.headers)

# handle rate limiting options
# TODO: allow user to opt out of rate limit
rate_limit = args.rate_limit
delay_rate = args.delay_rate

# if any is not set, then set both to None
if (rate_limit and not delay_rate) or (not rate_limit and delay_rate):
rate_limit = None
delay_rate = None

# handle test user data config file
test_data_config = args.test_data_config
Expand All @@ -83,7 +74,6 @@ def start():
output_file_format=args.output_format,
req_headers=headers_dict,
rate_limit=rate_limit,
delay=delay_rate,
test_data_config=test_data_config,
proxy=args.proxy,
ssl=args.no_ssl,
Expand Down
104 changes: 42 additions & 62 deletions src/offat/http.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from aiohttp import ClientSession, ClientResponse, TCPConnector
from aiohttp import ClientSession, ClientTimeout, TCPConnector
from aiolimiter import AsyncLimiter
from os import name as os_name
from typing import Optional

Expand All @@ -16,88 +17,67 @@ class AsyncRequests:
AsyncRequests class helps to send HTTP requests with rate limiting options.
'''

def __init__(self, rate_limit: Optional[int] = None, delay: Optional[float] = None, headers: Optional[dict] = None, proxy: Optional[str] = None, ssl: Optional[bool] = True, allow_redirects: Optional[bool] = True) -> None:
def __init__(self, rate_limit: float = 50, headers: dict | None = None, proxy: str | None = None, allow_redirects: bool = True, timeout: float = 60) -> None:
'''AsyncRequests class constructor
Args:
rate_limit (int): number of concurrent requests at the same time
rate_limit (int): number of requests per seconds
delay (float): delay between consecutive requests
headers (dict): overrides default headers while sending HTTP requests
proxy (str): proxy URL to be used while sending requests
ssl (bool): ignores few SSL errors if value is False
timeout (float): total timeout parameter of aiohttp.ClientTimeout
Returns:
None
'''
self._rate_limit = rate_limit
self._delay = delay
self._headers = headers
self._proxy = proxy if proxy else None
self._ssl = ssl if ssl else None
self._allow_redirects = allow_redirects
self._limiter = AsyncLimiter(max_rate=rate_limit, time_period=1)
self._timeout = ClientTimeout(total=timeout)

async def request(self, url: str, method: str = 'GET', session: ClientSession = None, *args, **kwargs) -> ClientResponse:
async def request(self, url: str, method: str = 'GET', *args, **kwargs) -> dict:
'''Send HTTP requests asynchronously
Args:
url (str): URL of the webpage/endpoint
method (str): HTTP methods (default: GET) supports GET, POST,
PUT, HEAD, OPTIONS, DELETE
session (aiohttp.ClientSession): aiohttp Client Session for sending requests
Returns:
dict: returns request and response data as dict
'''
is_new_session = False

if not session:
connector = TCPConnector(ssl=self._ssl, limit=self._rate_limit,)
session = ClientSession(headers=self._headers, connector=connector)
is_new_session = True

method = str(method).upper()
match method:
case 'GET':
sent_req = session.get(
url, proxy=self._proxy, allow_redirects=self._allow_redirects, *args, **kwargs)
case 'POST':
sent_req = session.post(
url, proxy=self._proxy, allow_redirects=self._allow_redirects, *args, **kwargs)
case 'PUT':
sent_req = session.put(
url, proxy=self._proxy, allow_redirects=self._allow_redirects, *args, **kwargs)
case 'PATCH':
sent_req = session.patch(
url, proxy=self._proxy, allow_redirects=self._allow_redirects, *args, **kwargs)
case 'HEAD':
sent_req = session.head(
url, proxy=self._proxy, allow_redirects=self._allow_redirects, *args, **kwargs)
case 'OPTIONS':
sent_req = session.options(
url, proxy=self._proxy, allow_redirects=self._allow_redirects, *args, **kwargs)
case 'DELETE':
sent_req = session.delete(
url, proxy=self._proxy, allow_redirects=self._allow_redirects, *args, **kwargs)

resp_data = None
async with sent_req as response:
resp_data = {
"status": response.status,
"req_url": str(response.request_info.real_url),
"query_url": str(response.url),
"req_method": response.request_info.method,
"req_headers": dict(**response.request_info.headers),
"res_redirection": str(response.history),
"res_headers": dict(response.headers),
"res_body": await response.text(),
}

if is_new_session:
await session.close()
del session

if self._delay:
await asyncio.sleep(self._delay)

return resp_data
async with self._limiter:
async with ClientSession(headers=self._headers, timeout=self._timeout) as session:
method = str(method).upper()
match method:
case 'GET':
req_method = session.get
case 'POST':
req_method = session.post
case 'PUT':
req_method = session.put
case 'PATCH':
req_method = session.patch
case 'HEAD':
req_method = session.head
case 'OPTIONS':
req_method = session.options
case 'DELETE':
req_method = session.delete
case _:
req_method = session.get

async with req_method(url, proxy=self._proxy, allow_redirects=self._allow_redirects, *args, **kwargs) as response:
resp_data = {
"status": response.status,
"req_url": str(response.request_info.real_url),
"query_url": str(response.url),
"req_method": response.request_info.method,
"req_headers": dict(**response.request_info.headers),
"res_redirection": str(response.history),
"res_headers": dict(response.headers),
"res_body": await response.text(),
}

return resp_data
1 change: 1 addition & 0 deletions src/offat/report/generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ def handle_report_format(results: list[dict], report_format: str | None) -> str
'results': results,
})
case _: # default: CLI table
# TODO: filter failed requests first and then create new table for failed requests
report_format = 'table'
results_table = TestResultTable().generate_result_table(
deepcopy(results))
Expand Down
43 changes: 23 additions & 20 deletions src/offat/tester/test_runner.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,7 @@
from asyncio import ensure_future, gather
from aiohttp.client_exceptions import ClientProxyConnectionError
from enum import Enum
from rich.progress import Progress, TaskID
from traceback import print_exc
from typing import Optional


from ..http import AsyncRequests
Expand All @@ -17,11 +15,11 @@ class PayloadFor(Enum):


class TestRunner:
def __init__(self, rate_limit: Optional[int] = None, delay: Optional[float] = None, headers: Optional[dict] = None, proxy: Optional[str] = None, ssl: Optional[bool] = True) -> None:
def __init__(self, rate_limit: float = 60, headers: dict | None = None, proxy: str | None = None, ssl: bool = True) -> None:
self._client = AsyncRequests(
rate_limit=rate_limit, delay=delay, headers=headers, proxy=proxy, ssl=ssl)
rate_limit=rate_limit, headers=headers, proxy=proxy, ssl=ssl)
self.progress = Progress(console=console)
self.progress_task_id: Optional[TaskID] = None
self.progress_task_id: TaskID | None = None

def _generate_payloads(self, params: list[dict], payload_for: PayloadFor = PayloadFor.BODY):
'''Generate body payload from passed data for HTTP body and query.
Expand Down Expand Up @@ -85,24 +83,29 @@ async def send_request(self, test_task):
kwargs['params'] = self._generate_payloads(
query_params, payload_for=PayloadFor.QUERY)

test_result = test_task
try:
response = await self._client.request(url=url, method=http_method, *args, **kwargs)
except ConnectionRefusedError:
logger.error('Connection Failed! Server refused Connection!!')
except ClientProxyConnectionError as e:
logger.error(f'Proxy Connection Error: {e}')
# TODO: handle exception here
# add request headers to result
test_result['request_headers'] = response.get('req_headers', [])
# append response headers and body for analyzing data leak
res_body = response.get('res_body', 'No Response Body Found')
test_result['response_headers'] = response.get('res_headers')
test_result['response_body'] = res_body
test_result['response_status_code'] = response.get('status')
test_result['redirection'] = response.get('res_redirection', '')
test_result['error'] = False

test_result = test_task
except Exception as e:
test_result['request_headers'] = []
test_result['response_headers'] = []
test_result['response_body'] = 'No Response Body Found'
test_result['response_status_code'] = -1
test_result['redirection'] = ''
test_result['error'] = True

# add request headers to result
test_result['request_headers'] = response.get('req_headers', [])
# append response headers and body for analyzing data leak
res_body = response.get('res_body', 'No Response Body Found')
test_result['response_headers'] = response.get('res_headers')
test_result['response_body'] = res_body
test_result['response_status_code'] = response.get('status')
test_result['redirection'] = response.get('res_redirection', '')
logger.error(f'Unable to send request due to error: {e}')
logger.error(locals())

# advance progress bar
if self.progress_task_id:
Expand All @@ -115,7 +118,7 @@ async def send_request(self, test_task):

return test_result

async def run_tests(self, test_tasks: list, description: Optional[str]):
async def run_tests(self, test_tasks: list, description: str | None):
'''run tests generated from test generator module'''
self.progress.start()
self.progress_task_id = self.progress.add_task(
Expand Down
1 change: 0 additions & 1 deletion src/offat/tester/tester_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,6 @@ def generate_and_run_tests(api_parser: OpenAPIParser, regex_pattern: Optional[st

test_runner = TestRunner(
rate_limit=rate_limit,
delay=delay,
headers=req_headers,
proxy=proxy,
ssl=ssl,
Expand Down
Loading

0 comments on commit 16e9ab6

Please sign in to comment.