-
Notifications
You must be signed in to change notification settings - Fork 33
/
Copy pathidentify-header-connection-abuse.py
108 lines (91 loc) · 5.18 KB
/
identify-header-connection-abuse.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
#!/usr/bin/env python
import requests
import hashlib
import argparse
from termcolor import colored
from tabulate import tabulate
from http.client import HTTPConnection
"""
Script to detect the presence of cases for which the HTTP request header "Connection" can be abused.
Abuse here have for objective to cause middlewares handling the HTTP request to remove "hop by hop" headers
added by previous middleware in the request processing pipeline.
The objective is to cause an unexpected behavior of the target web app due to the absence of some request headers.
Based on the following references:
- Article by Stéphane Duchemin in MISC n°127 (Exploit Corrner):
- https://boutique.ed-diamond.com/en-kiosque/1658-misc-127.html
- https://www.linkedin.com/in/stephanedcm/
- https://nvd.nist.gov/vuln/detail/CVE-2022-1388
- https://github.com/Al1ex/CVE-2022-1388
- https://stackoverflow.com/a/68354944
Main reference used:
- https://datatracker.ietf.org/doc/html/rfc2616#section-14.10
"HTTP/1.1 proxies MUST parse the Connection header field before a message is forwarded and, for each connection-token in this field,
remove any header field(s) from the message with the same name as the connection-token."
- https://datatracker.ietf.org/doc/html/rfc2616#section-13.5.1
"Hop-by-hop headers, which are meaningful only for a single transport-level connection, and are not stored by caches or forwarded by proxies."
Dependencies:
pip3 install requests termcolor tabulate
"""
# Config
# Disable TLS warning when validation is disabled when requests is used
requests.packages.urllib3.disable_warnings(requests.packages.urllib3.exceptions.InsecureRequestWarning)
# Constants
# Define proxy to debug request sent using requests
PROXIES = {}
USER_AGENT = "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:109.0) Gecko/20100101 Firefox/113.0"
TIMEOUT = 20
# Collection of request headers to cause removal, via the request Connection header, in the final received request
HEADERS_TO_REMOVE = [
"Origin", "Client-IP", "Forwarded", "Forwarded-For", "Forwarded-For-IP", "X-Client-IP", "X-Custom-IP-Authorization",
"X-Forwarded", "X-Forwarded-By", "X-Forwarded-For", "X-Forwarded-For-Original", "X-Forwarded-Host", "X-Forwarder-For",
"X-Originating-IP", "X-Remote-Addr", "X-Remote-IP", "CF-Connecting-Ip", "X-Real-IP", "True-Client-IP", "X-Host",
"X-Forwarded-Server", "X-HTTP-Host-Override", "X-Original-Host", "X-Original-Remote-Addr", "Proxy-Authenticate",
"Proxy-Authorization", "X-Forwarded-Proto"
]
HEADERS_TO_REMOVE.sort()
class ResponseData:
def __init__(self, response, comment=""):
self.return_code = response.status_code
self.body_length = len(response.text)
self.body_content_hash = hashlib.sha1(response.text.encode("utf-8")).hexdigest()
self.comment = comment
content = ""
for header in response.headers:
if header.lower() not in ["date", "set-cookie", "expires", "etag", "last-modified", "content-length"]:
content += f"{header}={response.headers[header]}"
self.headers_hash = hashlib.sha1(content.encode("utf-8")).hexdigest()
def send_probe_request(full_url, header_to_remove=None):
req_headers = {"User-Agent": USER_AGENT}
if header_to_remove is None:
comment = "Baseline"
else:
comment = f"Remove header {header_to_remove}"
req_headers["Connection"] = f"keep-alive, {header_to_remove}"
response = requests.get(full_url, verify=False, proxies=PROXIES, headers=req_headers, allow_redirects=False, timeout=TIMEOUT)
return [ResponseData(response, comment)]
def main(full_url):
cases_results = []
print(colored(f"[+] Execution context:", "yellow"))
print(f"Full URL : {full_url}")
print(f"Proxy : {PROXIES}")
print(colored(f"[+] Force HTTP client to use HTTP/1.1 ...", "yellow"))
HTTPConnection._http_vsn_str = "HTTP/1.1"
print(colored(f"[+] Send crafted requests for the different cases ...", "yellow"))
cases_results.extend(send_probe_request(full_url))
for header_to_remove in HEADERS_TO_REMOVE:
cases_results.extend(send_probe_request(full_url, header_to_remove))
print(colored(f"[+] Results:", "yellow"))
data = []
data.append(["Case", "HTTP Code", "Body length", "Body hash", "Headers hash"])
for cases_result in cases_results:
data.append([cases_result.comment, cases_result.return_code, cases_result.body_length, cases_result.body_content_hash, cases_result.headers_hash])
print(tabulate(data, headers="firstrow", tablefmt="github"))
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Detect the presence of cases for which the HTTP request header 'Connection' can be abused.")
required_params = parser.add_argument_group("required named arguments")
required_params.add_argument("-b", action="store", dest="full_url", help="Base URL (ex: 'https://righettod.eu').", required=True)
parser.add_argument("-x", action="store", dest="proxy", help="Proxy to use for all probe requests (ex: 'http://127.0.0.1:8080', default to no proxy).", required=False, default=None)
args = parser.parse_args()
if args.proxy is not None:
PROXIES = {"http": args.proxy, "https": args.proxy}
main(args.full_url)