aboutsummaryrefslogtreecommitdiff
path: root/.venv/lib/python3.12/site-packages/gunicorn/http
diff options
context:
space:
mode:
Diffstat (limited to '.venv/lib/python3.12/site-packages/gunicorn/http')
-rw-r--r--.venv/lib/python3.12/site-packages/gunicorn/http/__init__.py9
-rw-r--r--.venv/lib/python3.12/site-packages/gunicorn/http/body.py262
-rw-r--r--.venv/lib/python3.12/site-packages/gunicorn/http/errors.py120
-rw-r--r--.venv/lib/python3.12/site-packages/gunicorn/http/message.py360
-rw-r--r--.venv/lib/python3.12/site-packages/gunicorn/http/parser.py52
-rw-r--r--.venv/lib/python3.12/site-packages/gunicorn/http/unreader.py79
-rw-r--r--.venv/lib/python3.12/site-packages/gunicorn/http/wsgi.py393
7 files changed, 1275 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/gunicorn/http/__init__.py b/.venv/lib/python3.12/site-packages/gunicorn/http/__init__.py
new file mode 100644
index 00000000..1da6f3ec
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/gunicorn/http/__init__.py
@@ -0,0 +1,9 @@
+# -*- coding: utf-8 -
+#
+# This file is part of gunicorn released under the MIT license.
+# See the NOTICE for more information.
+
+from gunicorn.http.message import Message, Request
+from gunicorn.http.parser import RequestParser
+
+__all__ = ['Message', 'Request', 'RequestParser']
diff --git a/.venv/lib/python3.12/site-packages/gunicorn/http/body.py b/.venv/lib/python3.12/site-packages/gunicorn/http/body.py
new file mode 100644
index 00000000..aa1af2cb
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/gunicorn/http/body.py
@@ -0,0 +1,262 @@
+# -*- coding: utf-8 -
+#
+# This file is part of gunicorn released under the MIT license.
+# See the NOTICE for more information.
+
+import io
+import sys
+
+from gunicorn.http.errors import (NoMoreData, ChunkMissingTerminator,
+ InvalidChunkSize)
+
+
+class ChunkedReader(object):
+ def __init__(self, req, unreader):
+ self.req = req
+ self.parser = self.parse_chunked(unreader)
+ self.buf = io.BytesIO()
+
+ def read(self, size):
+ if not isinstance(size, int):
+ raise TypeError("size must be an integer type")
+ if size < 0:
+ raise ValueError("Size must be positive.")
+ if size == 0:
+ return b""
+
+ if self.parser:
+ while self.buf.tell() < size:
+ try:
+ self.buf.write(next(self.parser))
+ except StopIteration:
+ self.parser = None
+ break
+
+ data = self.buf.getvalue()
+ ret, rest = data[:size], data[size:]
+ self.buf = io.BytesIO()
+ self.buf.write(rest)
+ return ret
+
+ def parse_trailers(self, unreader, data):
+ buf = io.BytesIO()
+ buf.write(data)
+
+ idx = buf.getvalue().find(b"\r\n\r\n")
+ done = buf.getvalue()[:2] == b"\r\n"
+ while idx < 0 and not done:
+ self.get_data(unreader, buf)
+ idx = buf.getvalue().find(b"\r\n\r\n")
+ done = buf.getvalue()[:2] == b"\r\n"
+ if done:
+ unreader.unread(buf.getvalue()[2:])
+ return b""
+ self.req.trailers = self.req.parse_headers(buf.getvalue()[:idx])
+ unreader.unread(buf.getvalue()[idx + 4:])
+
+ def parse_chunked(self, unreader):
+ (size, rest) = self.parse_chunk_size(unreader)
+ while size > 0:
+ while size > len(rest):
+ size -= len(rest)
+ yield rest
+ rest = unreader.read()
+ if not rest:
+ raise NoMoreData()
+ yield rest[:size]
+ # Remove \r\n after chunk
+ rest = rest[size:]
+ while len(rest) < 2:
+ rest += unreader.read()
+ if rest[:2] != b'\r\n':
+ raise ChunkMissingTerminator(rest[:2])
+ (size, rest) = self.parse_chunk_size(unreader, data=rest[2:])
+
+ def parse_chunk_size(self, unreader, data=None):
+ buf = io.BytesIO()
+ if data is not None:
+ buf.write(data)
+
+ idx = buf.getvalue().find(b"\r\n")
+ while idx < 0:
+ self.get_data(unreader, buf)
+ idx = buf.getvalue().find(b"\r\n")
+
+ data = buf.getvalue()
+ line, rest_chunk = data[:idx], data[idx + 2:]
+
+ chunk_size = line.split(b";", 1)[0].strip()
+ try:
+ chunk_size = int(chunk_size, 16)
+ except ValueError:
+ raise InvalidChunkSize(chunk_size)
+
+ if chunk_size == 0:
+ try:
+ self.parse_trailers(unreader, rest_chunk)
+ except NoMoreData:
+ pass
+ return (0, None)
+ return (chunk_size, rest_chunk)
+
+ def get_data(self, unreader, buf):
+ data = unreader.read()
+ if not data:
+ raise NoMoreData()
+ buf.write(data)
+
+
+class LengthReader(object):
+ def __init__(self, unreader, length):
+ self.unreader = unreader
+ self.length = length
+
+ def read(self, size):
+ if not isinstance(size, int):
+ raise TypeError("size must be an integral type")
+
+ size = min(self.length, size)
+ if size < 0:
+ raise ValueError("Size must be positive.")
+ if size == 0:
+ return b""
+
+ buf = io.BytesIO()
+ data = self.unreader.read()
+ while data:
+ buf.write(data)
+ if buf.tell() >= size:
+ break
+ data = self.unreader.read()
+
+ buf = buf.getvalue()
+ ret, rest = buf[:size], buf[size:]
+ self.unreader.unread(rest)
+ self.length -= size
+ return ret
+
+
+class EOFReader(object):
+ def __init__(self, unreader):
+ self.unreader = unreader
+ self.buf = io.BytesIO()
+ self.finished = False
+
+ def read(self, size):
+ if not isinstance(size, int):
+ raise TypeError("size must be an integral type")
+ if size < 0:
+ raise ValueError("Size must be positive.")
+ if size == 0:
+ return b""
+
+ if self.finished:
+ data = self.buf.getvalue()
+ ret, rest = data[:size], data[size:]
+ self.buf = io.BytesIO()
+ self.buf.write(rest)
+ return ret
+
+ data = self.unreader.read()
+ while data:
+ self.buf.write(data)
+ if self.buf.tell() > size:
+ break
+ data = self.unreader.read()
+
+ if not data:
+ self.finished = True
+
+ data = self.buf.getvalue()
+ ret, rest = data[:size], data[size:]
+ self.buf = io.BytesIO()
+ self.buf.write(rest)
+ return ret
+
+
+class Body(object):
+ def __init__(self, reader):
+ self.reader = reader
+ self.buf = io.BytesIO()
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ ret = self.readline()
+ if not ret:
+ raise StopIteration()
+ return ret
+
+ next = __next__
+
+ def getsize(self, size):
+ if size is None:
+ return sys.maxsize
+ elif not isinstance(size, int):
+ raise TypeError("size must be an integral type")
+ elif size < 0:
+ return sys.maxsize
+ return size
+
+ def read(self, size=None):
+ size = self.getsize(size)
+ if size == 0:
+ return b""
+
+ if size < self.buf.tell():
+ data = self.buf.getvalue()
+ ret, rest = data[:size], data[size:]
+ self.buf = io.BytesIO()
+ self.buf.write(rest)
+ return ret
+
+ while size > self.buf.tell():
+ data = self.reader.read(1024)
+ if not data:
+ break
+ self.buf.write(data)
+
+ data = self.buf.getvalue()
+ ret, rest = data[:size], data[size:]
+ self.buf = io.BytesIO()
+ self.buf.write(rest)
+ return ret
+
+ def readline(self, size=None):
+ size = self.getsize(size)
+ if size == 0:
+ return b""
+
+ data = self.buf.getvalue()
+ self.buf = io.BytesIO()
+
+ ret = []
+ while 1:
+ idx = data.find(b"\n", 0, size)
+ idx = idx + 1 if idx >= 0 else size if len(data) >= size else 0
+ if idx:
+ ret.append(data[:idx])
+ self.buf.write(data[idx:])
+ break
+
+ ret.append(data)
+ size -= len(data)
+ data = self.reader.read(min(1024, size))
+ if not data:
+ break
+
+ return b"".join(ret)
+
+ def readlines(self, size=None):
+ ret = []
+ data = self.read()
+ while data:
+ pos = data.find(b"\n")
+ if pos < 0:
+ ret.append(data)
+ data = b""
+ else:
+ line, data = data[:pos + 1], data[pos + 1:]
+ ret.append(line)
+ return ret
diff --git a/.venv/lib/python3.12/site-packages/gunicorn/http/errors.py b/.venv/lib/python3.12/site-packages/gunicorn/http/errors.py
new file mode 100644
index 00000000..7839ef05
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/gunicorn/http/errors.py
@@ -0,0 +1,120 @@
+# -*- coding: utf-8 -
+#
+# This file is part of gunicorn released under the MIT license.
+# See the NOTICE for more information.
+
+# We don't need to call super() in __init__ methods of our
+# BaseException and Exception classes because we also define
+# our own __str__ methods so there is no need to pass 'message'
+# to the base class to get a meaningful output from 'str(exc)'.
+# pylint: disable=super-init-not-called
+
+
+class ParseException(Exception):
+ pass
+
+
+class NoMoreData(IOError):
+ def __init__(self, buf=None):
+ self.buf = buf
+
+ def __str__(self):
+ return "No more data after: %r" % self.buf
+
+
+class InvalidRequestLine(ParseException):
+ def __init__(self, req):
+ self.req = req
+ self.code = 400
+
+ def __str__(self):
+ return "Invalid HTTP request line: %r" % self.req
+
+
+class InvalidRequestMethod(ParseException):
+ def __init__(self, method):
+ self.method = method
+
+ def __str__(self):
+ return "Invalid HTTP method: %r" % self.method
+
+
+class InvalidHTTPVersion(ParseException):
+ def __init__(self, version):
+ self.version = version
+
+ def __str__(self):
+ return "Invalid HTTP Version: %r" % self.version
+
+
+class InvalidHeader(ParseException):
+ def __init__(self, hdr, req=None):
+ self.hdr = hdr
+ self.req = req
+
+ def __str__(self):
+ return "Invalid HTTP Header: %r" % self.hdr
+
+
+class InvalidHeaderName(ParseException):
+ def __init__(self, hdr):
+ self.hdr = hdr
+
+ def __str__(self):
+ return "Invalid HTTP header name: %r" % self.hdr
+
+
+class InvalidChunkSize(IOError):
+ def __init__(self, data):
+ self.data = data
+
+ def __str__(self):
+ return "Invalid chunk size: %r" % self.data
+
+
+class ChunkMissingTerminator(IOError):
+ def __init__(self, term):
+ self.term = term
+
+ def __str__(self):
+ return "Invalid chunk terminator is not '\\r\\n': %r" % self.term
+
+
+class LimitRequestLine(ParseException):
+ def __init__(self, size, max_size):
+ self.size = size
+ self.max_size = max_size
+
+ def __str__(self):
+ return "Request Line is too large (%s > %s)" % (self.size, self.max_size)
+
+
+class LimitRequestHeaders(ParseException):
+ def __init__(self, msg):
+ self.msg = msg
+
+ def __str__(self):
+ return self.msg
+
+
+class InvalidProxyLine(ParseException):
+ def __init__(self, line):
+ self.line = line
+ self.code = 400
+
+ def __str__(self):
+ return "Invalid PROXY line: %r" % self.line
+
+
+class ForbiddenProxyRequest(ParseException):
+ def __init__(self, host):
+ self.host = host
+ self.code = 403
+
+ def __str__(self):
+ return "Proxy request from %r not allowed" % self.host
+
+
+class InvalidSchemeHeaders(ParseException):
+ def __str__(self):
+ return "Contradictory scheme headers"
diff --git a/.venv/lib/python3.12/site-packages/gunicorn/http/message.py b/.venv/lib/python3.12/site-packages/gunicorn/http/message.py
new file mode 100644
index 00000000..1f93c714
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/gunicorn/http/message.py
@@ -0,0 +1,360 @@
+# -*- coding: utf-8 -
+#
+# This file is part of gunicorn released under the MIT license.
+# See the NOTICE for more information.
+
+import io
+import re
+import socket
+
+from gunicorn.http.body import ChunkedReader, LengthReader, EOFReader, Body
+from gunicorn.http.errors import (
+ InvalidHeader, InvalidHeaderName, NoMoreData,
+ InvalidRequestLine, InvalidRequestMethod, InvalidHTTPVersion,
+ LimitRequestLine, LimitRequestHeaders,
+)
+from gunicorn.http.errors import InvalidProxyLine, ForbiddenProxyRequest
+from gunicorn.http.errors import InvalidSchemeHeaders
+from gunicorn.util import bytes_to_str, split_request_uri
+
+MAX_REQUEST_LINE = 8190
+MAX_HEADERS = 32768
+DEFAULT_MAX_HEADERFIELD_SIZE = 8190
+
+HEADER_RE = re.compile(r"[\x00-\x1F\x7F()<>@,;:\[\]={} \t\\\"]")
+METH_RE = re.compile(r"[A-Z0-9$-_.]{3,20}")
+VERSION_RE = re.compile(r"HTTP/(\d+)\.(\d+)")
+
+
+class Message(object):
+ def __init__(self, cfg, unreader, peer_addr):
+ self.cfg = cfg
+ self.unreader = unreader
+ self.peer_addr = peer_addr
+ self.remote_addr = peer_addr
+ self.version = None
+ self.headers = []
+ self.trailers = []
+ self.body = None
+ self.scheme = "https" if cfg.is_ssl else "http"
+
+ # set headers limits
+ self.limit_request_fields = cfg.limit_request_fields
+ if (self.limit_request_fields <= 0
+ or self.limit_request_fields > MAX_HEADERS):
+ self.limit_request_fields = MAX_HEADERS
+ self.limit_request_field_size = cfg.limit_request_field_size
+ if self.limit_request_field_size < 0:
+ self.limit_request_field_size = DEFAULT_MAX_HEADERFIELD_SIZE
+
+ # set max header buffer size
+ max_header_field_size = self.limit_request_field_size or DEFAULT_MAX_HEADERFIELD_SIZE
+ self.max_buffer_headers = self.limit_request_fields * \
+ (max_header_field_size + 2) + 4
+
+ unused = self.parse(self.unreader)
+ self.unreader.unread(unused)
+ self.set_body_reader()
+
+ def parse(self, unreader):
+ raise NotImplementedError()
+
+ def parse_headers(self, data):
+ cfg = self.cfg
+ headers = []
+
+ # Split lines on \r\n keeping the \r\n on each line
+ lines = [bytes_to_str(line) + "\r\n" for line in data.split(b"\r\n")]
+
+ # handle scheme headers
+ scheme_header = False
+ secure_scheme_headers = {}
+ if ('*' in cfg.forwarded_allow_ips or
+ not isinstance(self.peer_addr, tuple)
+ or self.peer_addr[0] in cfg.forwarded_allow_ips):
+ secure_scheme_headers = cfg.secure_scheme_headers
+
+ # Parse headers into key/value pairs paying attention
+ # to continuation lines.
+ while lines:
+ if len(headers) >= self.limit_request_fields:
+ raise LimitRequestHeaders("limit request headers fields")
+
+ # Parse initial header name : value pair.
+ curr = lines.pop(0)
+ header_length = len(curr)
+ if curr.find(":") < 0:
+ raise InvalidHeader(curr.strip())
+ name, value = curr.split(":", 1)
+ if self.cfg.strip_header_spaces:
+ name = name.rstrip(" \t").upper()
+ else:
+ name = name.upper()
+ if HEADER_RE.search(name):
+ raise InvalidHeaderName(name)
+
+ name, value = name.strip(), [value.lstrip()]
+
+ # Consume value continuation lines
+ while lines and lines[0].startswith((" ", "\t")):
+ curr = lines.pop(0)
+ header_length += len(curr)
+ if header_length > self.limit_request_field_size > 0:
+ raise LimitRequestHeaders("limit request headers "
+ "fields size")
+ value.append(curr)
+ value = ''.join(value).rstrip()
+
+ if header_length > self.limit_request_field_size > 0:
+ raise LimitRequestHeaders("limit request headers fields size")
+
+ if name in secure_scheme_headers:
+ secure = value == secure_scheme_headers[name]
+ scheme = "https" if secure else "http"
+ if scheme_header:
+ if scheme != self.scheme:
+ raise InvalidSchemeHeaders()
+ else:
+ scheme_header = True
+ self.scheme = scheme
+
+ headers.append((name, value))
+
+ return headers
+
+ def set_body_reader(self):
+ chunked = False
+ content_length = None
+
+ for (name, value) in self.headers:
+ if name == "CONTENT-LENGTH":
+ if content_length is not None:
+ raise InvalidHeader("CONTENT-LENGTH", req=self)
+ content_length = value
+ elif name == "TRANSFER-ENCODING":
+ if value.lower() == "chunked":
+ chunked = True
+
+ if chunked:
+ self.body = Body(ChunkedReader(self, self.unreader))
+ elif content_length is not None:
+ try:
+ if str(content_length).isnumeric():
+ content_length = int(content_length)
+ else:
+ raise InvalidHeader("CONTENT-LENGTH", req=self)
+ except ValueError:
+ raise InvalidHeader("CONTENT-LENGTH", req=self)
+
+ if content_length < 0:
+ raise InvalidHeader("CONTENT-LENGTH", req=self)
+
+ self.body = Body(LengthReader(self.unreader, content_length))
+ else:
+ self.body = Body(EOFReader(self.unreader))
+
+ def should_close(self):
+ for (h, v) in self.headers:
+ if h == "CONNECTION":
+ v = v.lower().strip()
+ if v == "close":
+ return True
+ elif v == "keep-alive":
+ return False
+ break
+ return self.version <= (1, 0)
+
+
+class Request(Message):
+ def __init__(self, cfg, unreader, peer_addr, req_number=1):
+ self.method = None
+ self.uri = None
+ self.path = None
+ self.query = None
+ self.fragment = None
+
+ # get max request line size
+ self.limit_request_line = cfg.limit_request_line
+ if (self.limit_request_line < 0
+ or self.limit_request_line >= MAX_REQUEST_LINE):
+ self.limit_request_line = MAX_REQUEST_LINE
+
+ self.req_number = req_number
+ self.proxy_protocol_info = None
+ super().__init__(cfg, unreader, peer_addr)
+
+ def get_data(self, unreader, buf, stop=False):
+ data = unreader.read()
+ if not data:
+ if stop:
+ raise StopIteration()
+ raise NoMoreData(buf.getvalue())
+ buf.write(data)
+
+ def parse(self, unreader):
+ buf = io.BytesIO()
+ self.get_data(unreader, buf, stop=True)
+
+ # get request line
+ line, rbuf = self.read_line(unreader, buf, self.limit_request_line)
+
+ # proxy protocol
+ if self.proxy_protocol(bytes_to_str(line)):
+ # get next request line
+ buf = io.BytesIO()
+ buf.write(rbuf)
+ line, rbuf = self.read_line(unreader, buf, self.limit_request_line)
+
+ self.parse_request_line(line)
+ buf = io.BytesIO()
+ buf.write(rbuf)
+
+ # Headers
+ data = buf.getvalue()
+ idx = data.find(b"\r\n\r\n")
+
+ done = data[:2] == b"\r\n"
+ while True:
+ idx = data.find(b"\r\n\r\n")
+ done = data[:2] == b"\r\n"
+
+ if idx < 0 and not done:
+ self.get_data(unreader, buf)
+ data = buf.getvalue()
+ if len(data) > self.max_buffer_headers:
+ raise LimitRequestHeaders("max buffer headers")
+ else:
+ break
+
+ if done:
+ self.unreader.unread(data[2:])
+ return b""
+
+ self.headers = self.parse_headers(data[:idx])
+
+ ret = data[idx + 4:]
+ buf = None
+ return ret
+
+ def read_line(self, unreader, buf, limit=0):
+ data = buf.getvalue()
+
+ while True:
+ idx = data.find(b"\r\n")
+ if idx >= 0:
+ # check if the request line is too large
+ if idx > limit > 0:
+ raise LimitRequestLine(idx, limit)
+ break
+ if len(data) - 2 > limit > 0:
+ raise LimitRequestLine(len(data), limit)
+ self.get_data(unreader, buf)
+ data = buf.getvalue()
+
+ return (data[:idx], # request line,
+ data[idx + 2:]) # residue in the buffer, skip \r\n
+
+ def proxy_protocol(self, line):
+ """\
+ Detect, check and parse proxy protocol.
+
+ :raises: ForbiddenProxyRequest, InvalidProxyLine.
+ :return: True for proxy protocol line else False
+ """
+ if not self.cfg.proxy_protocol:
+ return False
+
+ if self.req_number != 1:
+ return False
+
+ if not line.startswith("PROXY"):
+ return False
+
+ self.proxy_protocol_access_check()
+ self.parse_proxy_protocol(line)
+
+ return True
+
+ def proxy_protocol_access_check(self):
+ # check in allow list
+ if ("*" not in self.cfg.proxy_allow_ips and
+ isinstance(self.peer_addr, tuple) and
+ self.peer_addr[0] not in self.cfg.proxy_allow_ips):
+ raise ForbiddenProxyRequest(self.peer_addr[0])
+
+ def parse_proxy_protocol(self, line):
+ bits = line.split()
+
+ if len(bits) != 6:
+ raise InvalidProxyLine(line)
+
+ # Extract data
+ proto = bits[1]
+ s_addr = bits[2]
+ d_addr = bits[3]
+
+ # Validation
+ if proto not in ["TCP4", "TCP6"]:
+ raise InvalidProxyLine("protocol '%s' not supported" % proto)
+ if proto == "TCP4":
+ try:
+ socket.inet_pton(socket.AF_INET, s_addr)
+ socket.inet_pton(socket.AF_INET, d_addr)
+ except socket.error:
+ raise InvalidProxyLine(line)
+ elif proto == "TCP6":
+ try:
+ socket.inet_pton(socket.AF_INET6, s_addr)
+ socket.inet_pton(socket.AF_INET6, d_addr)
+ except socket.error:
+ raise InvalidProxyLine(line)
+
+ try:
+ s_port = int(bits[4])
+ d_port = int(bits[5])
+ except ValueError:
+ raise InvalidProxyLine("invalid port %s" % line)
+
+ if not ((0 <= s_port <= 65535) and (0 <= d_port <= 65535)):
+ raise InvalidProxyLine("invalid port %s" % line)
+
+ # Set data
+ self.proxy_protocol_info = {
+ "proxy_protocol": proto,
+ "client_addr": s_addr,
+ "client_port": s_port,
+ "proxy_addr": d_addr,
+ "proxy_port": d_port
+ }
+
+ def parse_request_line(self, line_bytes):
+ bits = [bytes_to_str(bit) for bit in line_bytes.split(None, 2)]
+ if len(bits) != 3:
+ raise InvalidRequestLine(bytes_to_str(line_bytes))
+
+ # Method
+ if not METH_RE.match(bits[0]):
+ raise InvalidRequestMethod(bits[0])
+ self.method = bits[0].upper()
+
+ # URI
+ self.uri = bits[1]
+
+ try:
+ parts = split_request_uri(self.uri)
+ except ValueError:
+ raise InvalidRequestLine(bytes_to_str(line_bytes))
+ self.path = parts.path or ""
+ self.query = parts.query or ""
+ self.fragment = parts.fragment or ""
+
+ # Version
+ match = VERSION_RE.match(bits[2])
+ if match is None:
+ raise InvalidHTTPVersion(bits[2])
+ self.version = (int(match.group(1)), int(match.group(2)))
+
+ def set_body_reader(self):
+ super().set_body_reader()
+ if isinstance(self.body.reader, EOFReader):
+ self.body = Body(LengthReader(self.unreader, 0))
diff --git a/.venv/lib/python3.12/site-packages/gunicorn/http/parser.py b/.venv/lib/python3.12/site-packages/gunicorn/http/parser.py
new file mode 100644
index 00000000..5d689f06
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/gunicorn/http/parser.py
@@ -0,0 +1,52 @@
+# -*- coding: utf-8 -
+#
+# This file is part of gunicorn released under the MIT license.
+# See the NOTICE for more information.
+
+from gunicorn.http.message import Request
+from gunicorn.http.unreader import SocketUnreader, IterUnreader
+
+
+class Parser(object):
+
+ mesg_class = None
+
+ def __init__(self, cfg, source, source_addr):
+ self.cfg = cfg
+ if hasattr(source, "recv"):
+ self.unreader = SocketUnreader(source)
+ else:
+ self.unreader = IterUnreader(source)
+ self.mesg = None
+ self.source_addr = source_addr
+
+ # request counter (for keepalive connetions)
+ self.req_count = 0
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ # Stop if HTTP dictates a stop.
+ if self.mesg and self.mesg.should_close():
+ raise StopIteration()
+
+ # Discard any unread body of the previous message
+ if self.mesg:
+ data = self.mesg.body.read(8192)
+ while data:
+ data = self.mesg.body.read(8192)
+
+ # Parse the next request
+ self.req_count += 1
+ self.mesg = self.mesg_class(self.cfg, self.unreader, self.source_addr, self.req_count)
+ if not self.mesg:
+ raise StopIteration()
+ return self.mesg
+
+ next = __next__
+
+
+class RequestParser(Parser):
+
+ mesg_class = Request
diff --git a/.venv/lib/python3.12/site-packages/gunicorn/http/unreader.py b/.venv/lib/python3.12/site-packages/gunicorn/http/unreader.py
new file mode 100644
index 00000000..273bfc31
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/gunicorn/http/unreader.py
@@ -0,0 +1,79 @@
+# -*- coding: utf-8 -
+#
+# This file is part of gunicorn released under the MIT license.
+# See the NOTICE for more information.
+
+import io
+import os
+
+# Classes that can undo reading data from
+# a given type of data source.
+
+
+class Unreader(object):
+ def __init__(self):
+ self.buf = io.BytesIO()
+
+ def chunk(self):
+ raise NotImplementedError()
+
+ def read(self, size=None):
+ if size is not None and not isinstance(size, int):
+ raise TypeError("size parameter must be an int or long.")
+
+ if size is not None:
+ if size == 0:
+ return b""
+ if size < 0:
+ size = None
+
+ self.buf.seek(0, os.SEEK_END)
+
+ if size is None and self.buf.tell():
+ ret = self.buf.getvalue()
+ self.buf = io.BytesIO()
+ return ret
+ if size is None:
+ d = self.chunk()
+ return d
+
+ while self.buf.tell() < size:
+ chunk = self.chunk()
+ if not chunk:
+ ret = self.buf.getvalue()
+ self.buf = io.BytesIO()
+ return ret
+ self.buf.write(chunk)
+ data = self.buf.getvalue()
+ self.buf = io.BytesIO()
+ self.buf.write(data[size:])
+ return data[:size]
+
+ def unread(self, data):
+ self.buf.seek(0, os.SEEK_END)
+ self.buf.write(data)
+
+
+class SocketUnreader(Unreader):
+ def __init__(self, sock, max_chunk=8192):
+ super().__init__()
+ self.sock = sock
+ self.mxchunk = max_chunk
+
+ def chunk(self):
+ return self.sock.recv(self.mxchunk)
+
+
+class IterUnreader(Unreader):
+ def __init__(self, iterable):
+ super().__init__()
+ self.iter = iter(iterable)
+
+ def chunk(self):
+ if not self.iter:
+ return b""
+ try:
+ return next(self.iter)
+ except StopIteration:
+ self.iter = None
+ return b""
diff --git a/.venv/lib/python3.12/site-packages/gunicorn/http/wsgi.py b/.venv/lib/python3.12/site-packages/gunicorn/http/wsgi.py
new file mode 100644
index 00000000..25715eab
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/gunicorn/http/wsgi.py
@@ -0,0 +1,393 @@
+# -*- coding: utf-8 -
+#
+# This file is part of gunicorn released under the MIT license.
+# See the NOTICE for more information.
+
+import io
+import logging
+import os
+import re
+import sys
+
+from gunicorn.http.message import HEADER_RE
+from gunicorn.http.errors import InvalidHeader, InvalidHeaderName
+from gunicorn import SERVER_SOFTWARE, SERVER
+from gunicorn import util
+
+# Send files in at most 1GB blocks as some operating systems can have problems
+# with sending files in blocks over 2GB.
+BLKSIZE = 0x3FFFFFFF
+
+HEADER_VALUE_RE = re.compile(r'[\x00-\x1F\x7F]')
+
+log = logging.getLogger(__name__)
+
+
+class FileWrapper(object):
+
+ def __init__(self, filelike, blksize=8192):
+ self.filelike = filelike
+ self.blksize = blksize
+ if hasattr(filelike, 'close'):
+ self.close = filelike.close
+
+ def __getitem__(self, key):
+ data = self.filelike.read(self.blksize)
+ if data:
+ return data
+ raise IndexError
+
+
+class WSGIErrorsWrapper(io.RawIOBase):
+
+ def __init__(self, cfg):
+ # There is no public __init__ method for RawIOBase so
+ # we don't need to call super() in the __init__ method.
+ # pylint: disable=super-init-not-called
+ errorlog = logging.getLogger("gunicorn.error")
+ handlers = errorlog.handlers
+ self.streams = []
+
+ if cfg.errorlog == "-":
+ self.streams.append(sys.stderr)
+ handlers = handlers[1:]
+
+ for h in handlers:
+ if hasattr(h, "stream"):
+ self.streams.append(h.stream)
+
+ def write(self, data):
+ for stream in self.streams:
+ try:
+ stream.write(data)
+ except UnicodeError:
+ stream.write(data.encode("UTF-8"))
+ stream.flush()
+
+
+def base_environ(cfg):
+ return {
+ "wsgi.errors": WSGIErrorsWrapper(cfg),
+ "wsgi.version": (1, 0),
+ "wsgi.multithread": False,
+ "wsgi.multiprocess": (cfg.workers > 1),
+ "wsgi.run_once": False,
+ "wsgi.file_wrapper": FileWrapper,
+ "wsgi.input_terminated": True,
+ "SERVER_SOFTWARE": SERVER_SOFTWARE,
+ }
+
+
+def default_environ(req, sock, cfg):
+ env = base_environ(cfg)
+ env.update({
+ "wsgi.input": req.body,
+ "gunicorn.socket": sock,
+ "REQUEST_METHOD": req.method,
+ "QUERY_STRING": req.query,
+ "RAW_URI": req.uri,
+ "SERVER_PROTOCOL": "HTTP/%s" % ".".join([str(v) for v in req.version])
+ })
+ return env
+
+
+def proxy_environ(req):
+ info = req.proxy_protocol_info
+
+ if not info:
+ return {}
+
+ return {
+ "PROXY_PROTOCOL": info["proxy_protocol"],
+ "REMOTE_ADDR": info["client_addr"],
+ "REMOTE_PORT": str(info["client_port"]),
+ "PROXY_ADDR": info["proxy_addr"],
+ "PROXY_PORT": str(info["proxy_port"]),
+ }
+
+
+def create(req, sock, client, server, cfg):
+ resp = Response(req, sock, cfg)
+
+ # set initial environ
+ environ = default_environ(req, sock, cfg)
+
+ # default variables
+ host = None
+ script_name = os.environ.get("SCRIPT_NAME", "")
+
+ # add the headers to the environ
+ for hdr_name, hdr_value in req.headers:
+ if hdr_name == "EXPECT":
+ # handle expect
+ if hdr_value.lower() == "100-continue":
+ sock.send(b"HTTP/1.1 100 Continue\r\n\r\n")
+ elif hdr_name == 'HOST':
+ host = hdr_value
+ elif hdr_name == "SCRIPT_NAME":
+ script_name = hdr_value
+ elif hdr_name == "CONTENT-TYPE":
+ environ['CONTENT_TYPE'] = hdr_value
+ continue
+ elif hdr_name == "CONTENT-LENGTH":
+ environ['CONTENT_LENGTH'] = hdr_value
+ continue
+
+ key = 'HTTP_' + hdr_name.replace('-', '_')
+ if key in environ:
+ hdr_value = "%s,%s" % (environ[key], hdr_value)
+ environ[key] = hdr_value
+
+ # set the url scheme
+ environ['wsgi.url_scheme'] = req.scheme
+
+ # set the REMOTE_* keys in environ
+ # authors should be aware that REMOTE_HOST and REMOTE_ADDR
+ # may not qualify the remote addr:
+ # http://www.ietf.org/rfc/rfc3875
+ if isinstance(client, str):
+ environ['REMOTE_ADDR'] = client
+ elif isinstance(client, bytes):
+ environ['REMOTE_ADDR'] = client.decode()
+ else:
+ environ['REMOTE_ADDR'] = client[0]
+ environ['REMOTE_PORT'] = str(client[1])
+
+ # handle the SERVER_*
+ # Normally only the application should use the Host header but since the
+ # WSGI spec doesn't support unix sockets, we are using it to create
+ # viable SERVER_* if possible.
+ if isinstance(server, str):
+ server = server.split(":")
+ if len(server) == 1:
+ # unix socket
+ if host:
+ server = host.split(':')
+ if len(server) == 1:
+ if req.scheme == "http":
+ server.append(80)
+ elif req.scheme == "https":
+ server.append(443)
+ else:
+ server.append('')
+ else:
+ # no host header given which means that we are not behind a
+ # proxy, so append an empty port.
+ server.append('')
+ environ['SERVER_NAME'] = server[0]
+ environ['SERVER_PORT'] = str(server[1])
+
+ # set the path and script name
+ path_info = req.path
+ if script_name:
+ path_info = path_info.split(script_name, 1)[1]
+ environ['PATH_INFO'] = util.unquote_to_wsgi_str(path_info)
+ environ['SCRIPT_NAME'] = script_name
+
+ # override the environ with the correct remote and server address if
+ # we are behind a proxy using the proxy protocol.
+ environ.update(proxy_environ(req))
+ return resp, environ
+
+
+class Response(object):
+
+ def __init__(self, req, sock, cfg):
+ self.req = req
+ self.sock = sock
+ self.version = SERVER
+ self.status = None
+ self.chunked = False
+ self.must_close = False
+ self.headers = []
+ self.headers_sent = False
+ self.response_length = None
+ self.sent = 0
+ self.upgrade = False
+ self.cfg = cfg
+
+ def force_close(self):
+ self.must_close = True
+
+ def should_close(self):
+ if self.must_close or self.req.should_close():
+ return True
+ if self.response_length is not None or self.chunked:
+ return False
+ if self.req.method == 'HEAD':
+ return False
+ if self.status_code < 200 or self.status_code in (204, 304):
+ return False
+ return True
+
+ def start_response(self, status, headers, exc_info=None):
+ if exc_info:
+ try:
+ if self.status and self.headers_sent:
+ util.reraise(exc_info[0], exc_info[1], exc_info[2])
+ finally:
+ exc_info = None
+ elif self.status is not None:
+ raise AssertionError("Response headers already set!")
+
+ self.status = status
+
+ # get the status code from the response here so we can use it to check
+ # the need for the connection header later without parsing the string
+ # each time.
+ try:
+ self.status_code = int(self.status.split()[0])
+ except ValueError:
+ self.status_code = None
+
+ self.process_headers(headers)
+ self.chunked = self.is_chunked()
+ return self.write
+
+ def process_headers(self, headers):
+ for name, value in headers:
+ if not isinstance(name, str):
+ raise TypeError('%r is not a string' % name)
+
+ if HEADER_RE.search(name):
+ raise InvalidHeaderName('%r' % name)
+
+ if not isinstance(value, str):
+ raise TypeError('%r is not a string' % value)
+
+ if HEADER_VALUE_RE.search(value):
+ raise InvalidHeader('%r' % value)
+
+ value = value.strip()
+ lname = name.lower().strip()
+ if lname == "content-length":
+ self.response_length = int(value)
+ elif util.is_hoppish(name):
+ if lname == "connection":
+ # handle websocket
+ if value.lower().strip() == "upgrade":
+ self.upgrade = True
+ elif lname == "upgrade":
+ if value.lower().strip() == "websocket":
+ self.headers.append((name.strip(), value))
+
+ # ignore hopbyhop headers
+ continue
+ self.headers.append((name.strip(), value))
+
+ def is_chunked(self):
+ # Only use chunked responses when the client is
+ # speaking HTTP/1.1 or newer and there was
+ # no Content-Length header set.
+ if self.response_length is not None:
+ return False
+ elif self.req.version <= (1, 0):
+ return False
+ elif self.req.method == 'HEAD':
+ # Responses to a HEAD request MUST NOT contain a response body.
+ return False
+ elif self.status_code in (204, 304):
+ # Do not use chunked responses when the response is guaranteed to
+ # not have a response body.
+ return False
+ return True
+
+ def default_headers(self):
+ # set the connection header
+ if self.upgrade:
+ connection = "upgrade"
+ elif self.should_close():
+ connection = "close"
+ else:
+ connection = "keep-alive"
+
+ headers = [
+ "HTTP/%s.%s %s\r\n" % (self.req.version[0],
+ self.req.version[1], self.status),
+ "Server: %s\r\n" % self.version,
+ "Date: %s\r\n" % util.http_date(),
+ "Connection: %s\r\n" % connection
+ ]
+ if self.chunked:
+ headers.append("Transfer-Encoding: chunked\r\n")
+ return headers
+
+ def send_headers(self):
+ if self.headers_sent:
+ return
+ tosend = self.default_headers()
+ tosend.extend(["%s: %s\r\n" % (k, v) for k, v in self.headers])
+
+ header_str = "%s\r\n" % "".join(tosend)
+ util.write(self.sock, util.to_bytestring(header_str, "latin-1"))
+ self.headers_sent = True
+
+ def write(self, arg):
+ self.send_headers()
+ if not isinstance(arg, bytes):
+ raise TypeError('%r is not a byte' % arg)
+ arglen = len(arg)
+ tosend = arglen
+ if self.response_length is not None:
+ if self.sent >= self.response_length:
+ # Never write more than self.response_length bytes
+ return
+
+ tosend = min(self.response_length - self.sent, tosend)
+ if tosend < arglen:
+ arg = arg[:tosend]
+
+ # Sending an empty chunk signals the end of the
+ # response and prematurely closes the response
+ if self.chunked and tosend == 0:
+ return
+
+ self.sent += tosend
+ util.write(self.sock, arg, self.chunked)
+
+ def can_sendfile(self):
+ return self.cfg.sendfile is not False
+
+ def sendfile(self, respiter):
+ if self.cfg.is_ssl or not self.can_sendfile():
+ return False
+
+ if not util.has_fileno(respiter.filelike):
+ return False
+
+ fileno = respiter.filelike.fileno()
+ try:
+ offset = os.lseek(fileno, 0, os.SEEK_CUR)
+ if self.response_length is None:
+ filesize = os.fstat(fileno).st_size
+ nbytes = filesize - offset
+ else:
+ nbytes = self.response_length
+ except (OSError, io.UnsupportedOperation):
+ return False
+
+ self.send_headers()
+
+ if self.is_chunked():
+ chunk_size = "%X\r\n" % nbytes
+ self.sock.sendall(chunk_size.encode('utf-8'))
+ if nbytes > 0:
+ self.sock.sendfile(respiter.filelike, offset=offset, count=nbytes)
+
+ if self.is_chunked():
+ self.sock.sendall(b"\r\n")
+
+ os.lseek(fileno, offset, os.SEEK_SET)
+
+ return True
+
+ def write_file(self, respiter):
+ if not self.sendfile(respiter):
+ for item in respiter:
+ self.write(item)
+
+ def close(self):
+ if not self.headers_sent:
+ self.send_headers()
+ if self.chunked:
+ util.write_chunk(self.sock, b"")