From 77004366ffc1ccb121e7652a4ca63b3d548b83aa Mon Sep 17 00:00:00 2001 From: nadiraikido <166383531+nadiraikido@users.noreply.github.com> Date: Wed, 11 Feb 2026 20:39:33 +0100 Subject: [PATCH 1/4] Use more lenient urllib3.util's parse_url --- aikido_zen/helpers/get_port_from_url.py | 4 ++-- aikido_zen/helpers/get_port_from_url_test.py | 16 ++++++++-------- aikido_zen/helpers/get_subdomains_from_url.py | 5 ++--- aikido_zen/helpers/path_to_string.py | 10 ++++++++-- aikido_zen/helpers/try_parse_url.py | 5 ++--- aikido_zen/helpers/try_parse_url_path.py | 5 +++-- aikido_zen/helpers/urls/normalize_url.py | 5 +++-- .../path_traversal/parse_as_file_url.py | 7 ++++--- .../ssrf/get_redirect_origin_test.py | 4 ++-- .../ssrf/is_redirect_to_private_ip_test.py | 4 ++-- 10 files changed, 36 insertions(+), 29 deletions(-) diff --git a/aikido_zen/helpers/get_port_from_url.py b/aikido_zen/helpers/get_port_from_url.py index 08cae74cc..717d08e5f 100644 --- a/aikido_zen/helpers/get_port_from_url.py +++ b/aikido_zen/helpers/get_port_from_url.py @@ -2,7 +2,7 @@ Helper function file, see function docstring """ -from urllib.parse import urlparse +from urllib3.util import parse_url def get_port_from_url(url, parsed=False): @@ -10,7 +10,7 @@ def get_port_from_url(url, parsed=False): Tries to retrieve a port number from the given url """ if not parsed: - parsed_url = urlparse(url) + parsed_url = parse_url(url) else: parsed_url = url diff --git a/aikido_zen/helpers/get_port_from_url_test.py b/aikido_zen/helpers/get_port_from_url_test.py index 601002bcc..559e1543d 100644 --- a/aikido_zen/helpers/get_port_from_url_test.py +++ b/aikido_zen/helpers/get_port_from_url_test.py @@ -1,6 +1,6 @@ import pytest from .get_port_from_url import get_port_from_url -from urllib.parse import urlparse +from urllib3.util import parse_url def test_get_port_from_url(): @@ -14,14 +14,14 @@ def test_get_port_from_url(): def test_get_port_from_parsed_url(): - assert get_port_from_url(urlparse("http://localhost:4000"), True) == 4000 - assert get_port_from_url(urlparse("http://localhost"), True) == 80 + assert get_port_from_url(parse_url("http://localhost:4000"), True) == 4000 + assert get_port_from_url(parse_url("http://localhost"), True) == 80 assert ( - get_port_from_url(urlparse("https://test.com:8080/test?abc=123"), True) == 8080 + get_port_from_url(parse_url("https://test.com:8080/test?abc=123"), True) == 8080 ) - assert get_port_from_url(urlparse("https://localhost"), True) == 443 - assert get_port_from_url(urlparse("ftp://localhost"), True) is None + assert get_port_from_url(parse_url("https://localhost"), True) == 443 + assert get_port_from_url(parse_url("ftp://localhost"), True) is None assert ( - get_port_from_url(urlparse("http://localhost:1337\\u0000asd.php"), True) is None + get_port_from_url(parse_url("http://localhost:1337\\u0000asd.php"), True) is None ) - assert get_port_from_url(urlparse("http://localhost:123123/asd.php"), True) is None + assert get_port_from_url(parse_url("http://localhost:123123/asd.php"), True) is None diff --git a/aikido_zen/helpers/get_subdomains_from_url.py b/aikido_zen/helpers/get_subdomains_from_url.py index c43e02765..512160b03 100644 --- a/aikido_zen/helpers/get_subdomains_from_url.py +++ b/aikido_zen/helpers/get_subdomains_from_url.py @@ -1,8 +1,7 @@ """ Helper function file, see function docstring """ - -from urllib.parse import urlparse +from urllib3.util import parse_url def get_subdomains_from_url(url): @@ -11,7 +10,7 @@ def get_subdomains_from_url(url): """ if not isinstance(url, str): return [] - host = urlparse(url).hostname + host = parse_url(url).hostname if not host: return [] parts = host.split(".") diff --git a/aikido_zen/helpers/path_to_string.py b/aikido_zen/helpers/path_to_string.py index 0e96fe2a3..efbeaf1c8 100644 --- a/aikido_zen/helpers/path_to_string.py +++ b/aikido_zen/helpers/path_to_string.py @@ -1,6 +1,6 @@ """Helper function file""" -from urllib.parse import urlparse +from urllib3.util import parse_url from pathlib import PurePath @@ -8,20 +8,26 @@ def path_to_string(path): """Converts an obj that represents a path into a string""" if isinstance(path, str): try: - parsed_url = urlparse(path) + parsed_url = parse_url(path) if parsed_url and parsed_url.scheme == "file": return parsed_url.path except Exception: + print("can't parse thsi shit! bye") return None return path + print("can't parse thsi shit! bye - 9") if isinstance(path, bytes): try: return path.decode("utf-8") except UnicodeDecodeError: + print("can't parse thsi shit! by - 4 e") return None if isinstance(path, PurePath): # Stringify PurePath. This can still allow path traversal but in extremely # limited cases so it's safe to just stringify for now. return str(path) + + + print("can't parse thsi shit! bye - 1") return None diff --git a/aikido_zen/helpers/try_parse_url.py b/aikido_zen/helpers/try_parse_url.py index c4a15be47..a0b8def70 100644 --- a/aikido_zen/helpers/try_parse_url.py +++ b/aikido_zen/helpers/try_parse_url.py @@ -1,12 +1,11 @@ """Helper function file""" -from urllib.parse import urlparse - +from urllib3.util import parse_url def try_parse_url(url): """Tries to parse the url using urlparse""" try: - parsed_url = urlparse(url) + parsed_url = parse_url(url) if parsed_url.scheme and parsed_url.netloc: return parsed_url return None diff --git a/aikido_zen/helpers/try_parse_url_path.py b/aikido_zen/helpers/try_parse_url_path.py index b1654d3bd..def0b0de5 100644 --- a/aikido_zen/helpers/try_parse_url_path.py +++ b/aikido_zen/helpers/try_parse_url_path.py @@ -4,15 +4,16 @@ Includes try_parse_url_path """ -from urllib.parse import urlparse +from urllib3.util import parse_url import regex as re def try_parse_url(url): """try to parse Url with urlparse""" try: - return urlparse(url) + return parse_url(url) except ValueError: + print("value error! can't parse this shit!") return None diff --git a/aikido_zen/helpers/urls/normalize_url.py b/aikido_zen/helpers/urls/normalize_url.py index 35f01c22f..18cb54120 100644 --- a/aikido_zen/helpers/urls/normalize_url.py +++ b/aikido_zen/helpers/urls/normalize_url.py @@ -1,12 +1,13 @@ """Helper function file, exports normalize_url""" -from urllib.parse import urlparse, urlunparse +from urllib3.util import parse_url +from urllib.parse import urlunparse def normalize_url(url): """Normalizes the url""" # Parse the URL - parsed_url = urlparse(url) + parsed_url = parse_url(url) # Normalize components scheme = parsed_url.scheme.lower() # Lowercase scheme diff --git a/aikido_zen/vulnerabilities/path_traversal/parse_as_file_url.py b/aikido_zen/vulnerabilities/path_traversal/parse_as_file_url.py index 9623682bf..2272a2fa0 100644 --- a/aikido_zen/vulnerabilities/path_traversal/parse_as_file_url.py +++ b/aikido_zen/vulnerabilities/path_traversal/parse_as_file_url.py @@ -2,21 +2,22 @@ Mainly exports `parse_as_file_url` """ -from urllib.parse import urlparse, urlunparse +from urllib3.util import parse_url +from urllib.parse import urlunparse from pathlib import Path def parse_as_file_url(path): """Convert a file path as a URL to a file path.""" if path.startswith("file:"): - parsed_url = urlparse(path) + parsed_url = parse_url(path) file_path = Path(parsed_url.path) else: if not path.startswith("/"): path = f"/{path}" file_path = Path(path) file_url = urlunparse(("file", "", str(file_path), "", "", "")) - parsed_url = urlparse(file_url) + parsed_url = parse_url(file_url) normalized_path = Path(parsed_url.path).resolve() diff --git a/aikido_zen/vulnerabilities/ssrf/get_redirect_origin_test.py b/aikido_zen/vulnerabilities/ssrf/get_redirect_origin_test.py index fc4f51148..5618fecfc 100644 --- a/aikido_zen/vulnerabilities/ssrf/get_redirect_origin_test.py +++ b/aikido_zen/vulnerabilities/ssrf/get_redirect_origin_test.py @@ -1,11 +1,11 @@ import pytest -from urllib.parse import urlparse, urlunparse +from urllib3.util import parse_url from .get_redirect_origin import get_redirect_origin # Helper function to create URL objects def create_url(href): - return urlparse(href) + return parse_url(href) # Test cases diff --git a/aikido_zen/vulnerabilities/ssrf/is_redirect_to_private_ip_test.py b/aikido_zen/vulnerabilities/ssrf/is_redirect_to_private_ip_test.py index a79caf03f..cfdf82b19 100644 --- a/aikido_zen/vulnerabilities/ssrf/is_redirect_to_private_ip_test.py +++ b/aikido_zen/vulnerabilities/ssrf/is_redirect_to_private_ip_test.py @@ -1,12 +1,12 @@ import pytest from unittest.mock import MagicMock, patch from .is_redirect_to_private_ip import is_redirect_to_private_ip -from urllib.parse import urlparse, urlunparse +from urllib3.util import parse_url # Helper function to create URL objects def create_url(href): - return urlparse(href) + return parse_url(href) def test_is_redirect_to_private_ip_success(): From 9740144ad4654ef753b613ab189583f51d4511fa Mon Sep 17 00:00:00 2001 From: nadiraikido <166383531+nadiraikido@users.noreply.github.com> Date: Thu, 12 Feb 2026 14:12:15 +0100 Subject: [PATCH 2/4] Revert "Use more lenient urllib3.util's parse_url" This reverts commit 77004366ffc1ccb121e7652a4ca63b3d548b83aa. --- aikido_zen/helpers/get_port_from_url.py | 4 ++-- aikido_zen/helpers/get_port_from_url_test.py | 16 ++++++++-------- aikido_zen/helpers/get_subdomains_from_url.py | 5 +++-- aikido_zen/helpers/path_to_string.py | 10 ++-------- aikido_zen/helpers/try_parse_url.py | 5 +++-- aikido_zen/helpers/try_parse_url_path.py | 5 ++--- aikido_zen/helpers/urls/normalize_url.py | 5 ++--- .../path_traversal/parse_as_file_url.py | 7 +++---- .../ssrf/get_redirect_origin_test.py | 4 ++-- .../ssrf/is_redirect_to_private_ip_test.py | 4 ++-- 10 files changed, 29 insertions(+), 36 deletions(-) diff --git a/aikido_zen/helpers/get_port_from_url.py b/aikido_zen/helpers/get_port_from_url.py index 717d08e5f..08cae74cc 100644 --- a/aikido_zen/helpers/get_port_from_url.py +++ b/aikido_zen/helpers/get_port_from_url.py @@ -2,7 +2,7 @@ Helper function file, see function docstring """ -from urllib3.util import parse_url +from urllib.parse import urlparse def get_port_from_url(url, parsed=False): @@ -10,7 +10,7 @@ def get_port_from_url(url, parsed=False): Tries to retrieve a port number from the given url """ if not parsed: - parsed_url = parse_url(url) + parsed_url = urlparse(url) else: parsed_url = url diff --git a/aikido_zen/helpers/get_port_from_url_test.py b/aikido_zen/helpers/get_port_from_url_test.py index 559e1543d..601002bcc 100644 --- a/aikido_zen/helpers/get_port_from_url_test.py +++ b/aikido_zen/helpers/get_port_from_url_test.py @@ -1,6 +1,6 @@ import pytest from .get_port_from_url import get_port_from_url -from urllib3.util import parse_url +from urllib.parse import urlparse def test_get_port_from_url(): @@ -14,14 +14,14 @@ def test_get_port_from_url(): def test_get_port_from_parsed_url(): - assert get_port_from_url(parse_url("http://localhost:4000"), True) == 4000 - assert get_port_from_url(parse_url("http://localhost"), True) == 80 + assert get_port_from_url(urlparse("http://localhost:4000"), True) == 4000 + assert get_port_from_url(urlparse("http://localhost"), True) == 80 assert ( - get_port_from_url(parse_url("https://test.com:8080/test?abc=123"), True) == 8080 + get_port_from_url(urlparse("https://test.com:8080/test?abc=123"), True) == 8080 ) - assert get_port_from_url(parse_url("https://localhost"), True) == 443 - assert get_port_from_url(parse_url("ftp://localhost"), True) is None + assert get_port_from_url(urlparse("https://localhost"), True) == 443 + assert get_port_from_url(urlparse("ftp://localhost"), True) is None assert ( - get_port_from_url(parse_url("http://localhost:1337\\u0000asd.php"), True) is None + get_port_from_url(urlparse("http://localhost:1337\\u0000asd.php"), True) is None ) - assert get_port_from_url(parse_url("http://localhost:123123/asd.php"), True) is None + assert get_port_from_url(urlparse("http://localhost:123123/asd.php"), True) is None diff --git a/aikido_zen/helpers/get_subdomains_from_url.py b/aikido_zen/helpers/get_subdomains_from_url.py index 512160b03..c43e02765 100644 --- a/aikido_zen/helpers/get_subdomains_from_url.py +++ b/aikido_zen/helpers/get_subdomains_from_url.py @@ -1,7 +1,8 @@ """ Helper function file, see function docstring """ -from urllib3.util import parse_url + +from urllib.parse import urlparse def get_subdomains_from_url(url): @@ -10,7 +11,7 @@ def get_subdomains_from_url(url): """ if not isinstance(url, str): return [] - host = parse_url(url).hostname + host = urlparse(url).hostname if not host: return [] parts = host.split(".") diff --git a/aikido_zen/helpers/path_to_string.py b/aikido_zen/helpers/path_to_string.py index efbeaf1c8..0e96fe2a3 100644 --- a/aikido_zen/helpers/path_to_string.py +++ b/aikido_zen/helpers/path_to_string.py @@ -1,6 +1,6 @@ """Helper function file""" -from urllib3.util import parse_url +from urllib.parse import urlparse from pathlib import PurePath @@ -8,26 +8,20 @@ def path_to_string(path): """Converts an obj that represents a path into a string""" if isinstance(path, str): try: - parsed_url = parse_url(path) + parsed_url = urlparse(path) if parsed_url and parsed_url.scheme == "file": return parsed_url.path except Exception: - print("can't parse thsi shit! bye") return None return path - print("can't parse thsi shit! bye - 9") if isinstance(path, bytes): try: return path.decode("utf-8") except UnicodeDecodeError: - print("can't parse thsi shit! by - 4 e") return None if isinstance(path, PurePath): # Stringify PurePath. This can still allow path traversal but in extremely # limited cases so it's safe to just stringify for now. return str(path) - - - print("can't parse thsi shit! bye - 1") return None diff --git a/aikido_zen/helpers/try_parse_url.py b/aikido_zen/helpers/try_parse_url.py index a0b8def70..c4a15be47 100644 --- a/aikido_zen/helpers/try_parse_url.py +++ b/aikido_zen/helpers/try_parse_url.py @@ -1,11 +1,12 @@ """Helper function file""" -from urllib3.util import parse_url +from urllib.parse import urlparse + def try_parse_url(url): """Tries to parse the url using urlparse""" try: - parsed_url = parse_url(url) + parsed_url = urlparse(url) if parsed_url.scheme and parsed_url.netloc: return parsed_url return None diff --git a/aikido_zen/helpers/try_parse_url_path.py b/aikido_zen/helpers/try_parse_url_path.py index def0b0de5..b1654d3bd 100644 --- a/aikido_zen/helpers/try_parse_url_path.py +++ b/aikido_zen/helpers/try_parse_url_path.py @@ -4,16 +4,15 @@ Includes try_parse_url_path """ -from urllib3.util import parse_url +from urllib.parse import urlparse import regex as re def try_parse_url(url): """try to parse Url with urlparse""" try: - return parse_url(url) + return urlparse(url) except ValueError: - print("value error! can't parse this shit!") return None diff --git a/aikido_zen/helpers/urls/normalize_url.py b/aikido_zen/helpers/urls/normalize_url.py index 18cb54120..35f01c22f 100644 --- a/aikido_zen/helpers/urls/normalize_url.py +++ b/aikido_zen/helpers/urls/normalize_url.py @@ -1,13 +1,12 @@ """Helper function file, exports normalize_url""" -from urllib3.util import parse_url -from urllib.parse import urlunparse +from urllib.parse import urlparse, urlunparse def normalize_url(url): """Normalizes the url""" # Parse the URL - parsed_url = parse_url(url) + parsed_url = urlparse(url) # Normalize components scheme = parsed_url.scheme.lower() # Lowercase scheme diff --git a/aikido_zen/vulnerabilities/path_traversal/parse_as_file_url.py b/aikido_zen/vulnerabilities/path_traversal/parse_as_file_url.py index 2272a2fa0..9623682bf 100644 --- a/aikido_zen/vulnerabilities/path_traversal/parse_as_file_url.py +++ b/aikido_zen/vulnerabilities/path_traversal/parse_as_file_url.py @@ -2,22 +2,21 @@ Mainly exports `parse_as_file_url` """ -from urllib3.util import parse_url -from urllib.parse import urlunparse +from urllib.parse import urlparse, urlunparse from pathlib import Path def parse_as_file_url(path): """Convert a file path as a URL to a file path.""" if path.startswith("file:"): - parsed_url = parse_url(path) + parsed_url = urlparse(path) file_path = Path(parsed_url.path) else: if not path.startswith("/"): path = f"/{path}" file_path = Path(path) file_url = urlunparse(("file", "", str(file_path), "", "", "")) - parsed_url = parse_url(file_url) + parsed_url = urlparse(file_url) normalized_path = Path(parsed_url.path).resolve() diff --git a/aikido_zen/vulnerabilities/ssrf/get_redirect_origin_test.py b/aikido_zen/vulnerabilities/ssrf/get_redirect_origin_test.py index 5618fecfc..fc4f51148 100644 --- a/aikido_zen/vulnerabilities/ssrf/get_redirect_origin_test.py +++ b/aikido_zen/vulnerabilities/ssrf/get_redirect_origin_test.py @@ -1,11 +1,11 @@ import pytest -from urllib3.util import parse_url +from urllib.parse import urlparse, urlunparse from .get_redirect_origin import get_redirect_origin # Helper function to create URL objects def create_url(href): - return parse_url(href) + return urlparse(href) # Test cases diff --git a/aikido_zen/vulnerabilities/ssrf/is_redirect_to_private_ip_test.py b/aikido_zen/vulnerabilities/ssrf/is_redirect_to_private_ip_test.py index cfdf82b19..a79caf03f 100644 --- a/aikido_zen/vulnerabilities/ssrf/is_redirect_to_private_ip_test.py +++ b/aikido_zen/vulnerabilities/ssrf/is_redirect_to_private_ip_test.py @@ -1,12 +1,12 @@ import pytest from unittest.mock import MagicMock, patch from .is_redirect_to_private_ip import is_redirect_to_private_ip -from urllib3.util import parse_url +from urllib.parse import urlparse, urlunparse # Helper function to create URL objects def create_url(href): - return parse_url(href) + return urlparse(href) def test_is_redirect_to_private_ip_success(): From 3c584e4931da8b851218d75e6b39654c80dcfd0d Mon Sep 17 00:00:00 2001 From: nadiraikido <166383531+nadiraikido@users.noreply.github.com> Date: Thu, 12 Feb 2026 14:17:13 +0100 Subject: [PATCH 3/4] Use more lenient urllib3.util.parse_url --- aikido_zen/helpers/try_parse_url.py | 4 ++-- aikido_zen/vulnerabilities/ssrf/find_hostname_in_userinput.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/aikido_zen/helpers/try_parse_url.py b/aikido_zen/helpers/try_parse_url.py index c4a15be47..6efef846d 100644 --- a/aikido_zen/helpers/try_parse_url.py +++ b/aikido_zen/helpers/try_parse_url.py @@ -1,12 +1,12 @@ """Helper function file""" -from urllib.parse import urlparse +from urllib3.util import parse_url def try_parse_url(url): """Tries to parse the url using urlparse""" try: - parsed_url = urlparse(url) + parsed_url = parse_url(url) if parsed_url.scheme and parsed_url.netloc: return parsed_url return None diff --git a/aikido_zen/vulnerabilities/ssrf/find_hostname_in_userinput.py b/aikido_zen/vulnerabilities/ssrf/find_hostname_in_userinput.py index 07934b04c..2091f319c 100644 --- a/aikido_zen/vulnerabilities/ssrf/find_hostname_in_userinput.py +++ b/aikido_zen/vulnerabilities/ssrf/find_hostname_in_userinput.py @@ -30,7 +30,7 @@ def find_hostname_in_userinput(user_input, hostname_options: List[str], port=Non for variant in variants: user_input_url = try_parse_url(variant) if user_input_url and user_input_url.hostname in hostname_options: - user_port = get_port_from_url(user_input_url.geturl()) + user_port = get_port_from_url(user_input_url.url) # We were unable to retrieve the port from the URL, likely because it contains an invalid port. # Let's assume we have found the hostname in the user input, even though it doesn't match on port. From 657e403a4843d54b5f93db702e32658274f207a9 Mon Sep 17 00:00:00 2001 From: nadiraikido <166383531+nadiraikido@users.noreply.github.com> Date: Thu, 12 Feb 2026 15:27:29 +0100 Subject: [PATCH 4/4] Add try_lenient_parse_url(...) --- aikido_zen/helpers/try_parse_url.py | 14 ++++++++++++-- .../ssrf/find_hostname_in_userinput.py | 4 ++-- .../vulnerabilities/ssrf/handle_http_response.py | 4 ++-- 3 files changed, 16 insertions(+), 6 deletions(-) diff --git a/aikido_zen/helpers/try_parse_url.py b/aikido_zen/helpers/try_parse_url.py index 6efef846d..f1bbff27e 100644 --- a/aikido_zen/helpers/try_parse_url.py +++ b/aikido_zen/helpers/try_parse_url.py @@ -1,14 +1,24 @@ """Helper function file""" +from urllib.parse import urlparse from urllib3.util import parse_url - def try_parse_url(url): """Tries to parse the url using urlparse""" try: - parsed_url = parse_url(url) + parsed_url = urlparse(url) if parsed_url.scheme and parsed_url.netloc: return parsed_url return None except Exception: return None + +def try_lenient_parse_url(url): + """Tries to parse the url using parse_url, which is more lenient than urlparse""" + try: + parsed_url = parse_url(url) + if parsed_url.scheme and parsed_url.host: + return parsed_url + return None + except Exception: + return None diff --git a/aikido_zen/vulnerabilities/ssrf/find_hostname_in_userinput.py b/aikido_zen/vulnerabilities/ssrf/find_hostname_in_userinput.py index 2091f319c..9dbc02009 100644 --- a/aikido_zen/vulnerabilities/ssrf/find_hostname_in_userinput.py +++ b/aikido_zen/vulnerabilities/ssrf/find_hostname_in_userinput.py @@ -6,7 +6,7 @@ from urllib.parse import unquote from aikido_zen.helpers.get_port_from_url import get_port_from_url -from aikido_zen.helpers.try_parse_url import try_parse_url +from aikido_zen.helpers.try_parse_url import try_lenient_parse_url def find_hostname_in_userinput(user_input, hostname_options: List[str], port=None): @@ -28,7 +28,7 @@ def find_hostname_in_userinput(user_input, hostname_options: List[str], port=Non ] for variant in variants: - user_input_url = try_parse_url(variant) + user_input_url = try_lenient_parse_url(variant) if user_input_url and user_input_url.hostname in hostname_options: user_port = get_port_from_url(user_input_url.url) diff --git a/aikido_zen/vulnerabilities/ssrf/handle_http_response.py b/aikido_zen/vulnerabilities/ssrf/handle_http_response.py index 9806490c8..8d3b7c946 100644 --- a/aikido_zen/vulnerabilities/ssrf/handle_http_response.py +++ b/aikido_zen/vulnerabilities/ssrf/handle_http_response.py @@ -2,7 +2,7 @@ from aikido_zen.context import get_current_context from aikido_zen.helpers.is_redirect_status_code import is_redirect_status_code -from aikido_zen.helpers.try_parse_url import try_parse_url +from aikido_zen.helpers.try_parse_url import try_lenient_parse_url from .find_hostname_in_context import find_hostname_in_context from .get_redirect_origin import get_redirect_origin @@ -29,7 +29,7 @@ def handle_http_response(http_response, source): if not isinstance(location, str): return - parsed_location = try_parse_url(location) + parsed_location = try_lenient_parse_url(location) if not parsed_location: return