fix: 포트 충돌 회피 — note_bridge 8098, intent_service 8099
Jellyfin(8096), OrbStack(8097) 포트 충돌으로 변경. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
30
.venv/lib/python3.9/site-packages/jh2/hpack/__init__.py
Normal file
30
.venv/lib/python3.9/site-packages/jh2/hpack/__init__.py
Normal file
@@ -0,0 +1,30 @@
|
||||
"""
|
||||
hpack
|
||||
~~~~~
|
||||
|
||||
HTTP/2 header encoding for Python.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from .exceptions import (
|
||||
HPACKDecodingError,
|
||||
HPACKError,
|
||||
InvalidTableIndex,
|
||||
InvalidTableSizeError,
|
||||
OversizedHeaderListError,
|
||||
)
|
||||
from .hpack import Decoder, Encoder
|
||||
from .struct import HeaderTuple, NeverIndexedHeaderTuple
|
||||
|
||||
__all__ = [
|
||||
"Encoder",
|
||||
"Decoder",
|
||||
"HeaderTuple",
|
||||
"NeverIndexedHeaderTuple",
|
||||
"HPACKError",
|
||||
"HPACKDecodingError",
|
||||
"InvalidTableIndex",
|
||||
"OversizedHeaderListError",
|
||||
"InvalidTableSizeError",
|
||||
]
|
||||
55
.venv/lib/python3.9/site-packages/jh2/hpack/exceptions.py
Normal file
55
.venv/lib/python3.9/site-packages/jh2/hpack/exceptions.py
Normal file
@@ -0,0 +1,55 @@
|
||||
"""
|
||||
hyper/http20/exceptions
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
This defines exceptions used in the HTTP/2 portion of hyper.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
||||
class HPACKError(Exception):
|
||||
"""
|
||||
The base class for all ``hpack`` exceptions.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class HPACKDecodingError(HPACKError):
|
||||
"""
|
||||
An error has been encountered while performing HPACK decoding.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class InvalidTableIndex(HPACKDecodingError):
|
||||
"""
|
||||
An invalid table index was received.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class OversizedHeaderListError(HPACKDecodingError):
|
||||
"""
|
||||
A header list that was larger than we allow has been received. This may be
|
||||
a DoS attack.
|
||||
|
||||
.. versionadded:: 2.3.0
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class InvalidTableSizeError(HPACKDecodingError):
|
||||
"""
|
||||
An attempt was made to change the decoder table size to a value larger than
|
||||
allowed, or the list was shrunk and the remote peer didn't shrink their
|
||||
table size.
|
||||
|
||||
.. versionadded:: 3.0.0
|
||||
"""
|
||||
|
||||
pass
|
||||
610
.venv/lib/python3.9/site-packages/jh2/hpack/hpack.py
Normal file
610
.venv/lib/python3.9/site-packages/jh2/hpack/hpack.py
Normal file
@@ -0,0 +1,610 @@
|
||||
"""
|
||||
hpack/hpack
|
||||
~~~~~~~~~~~
|
||||
|
||||
Implements the HPACK header compression algorithm as detailed by the IETF.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from .exceptions import (
|
||||
HPACKDecodingError,
|
||||
InvalidTableSizeError,
|
||||
OversizedHeaderListError,
|
||||
)
|
||||
from .huffman import HuffmanEncoder
|
||||
from .huffman_constants import REQUEST_CODES, REQUEST_CODES_LENGTH
|
||||
from .huffman_table import decode_huffman
|
||||
from .struct import HeaderTuple, NeverIndexedHeaderTuple
|
||||
from .table import HeaderTable, table_entry_size
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
INDEX_NONE = b"\x00"
|
||||
INDEX_NEVER = b"\x10"
|
||||
INDEX_INCREMENTAL = b"\x40"
|
||||
|
||||
# Precompute 2^i for 1-8 for use in prefix calcs.
|
||||
# Zero index is not used but there to save a subtraction
|
||||
# as prefix numbers are not zero indexed.
|
||||
_PREFIX_BIT_MAX_NUMBERS = [(2**i) - 1 for i in range(9)]
|
||||
|
||||
basestring = (str, bytes)
|
||||
|
||||
|
||||
# We default the maximum header list we're willing to accept to 64kB. That's a
|
||||
# lot of headers, but if applications want to raise it they can do.
|
||||
DEFAULT_MAX_HEADER_LIST_SIZE = 2**16
|
||||
|
||||
|
||||
def _unicode_if_needed(header, raw):
|
||||
"""
|
||||
Provides a header as a unicode string if raw is False, otherwise returns
|
||||
it as a bytestring.
|
||||
"""
|
||||
name = bytes(header[0])
|
||||
value = bytes(header[1])
|
||||
if not raw:
|
||||
name = name.decode("utf-8")
|
||||
value = value.decode("utf-8")
|
||||
return header.__class__(name, value)
|
||||
|
||||
|
||||
def encode_integer(integer, prefix_bits):
|
||||
"""
|
||||
This encodes an integer according to the wacky integer encoding rules
|
||||
defined in the HPACK spec.
|
||||
"""
|
||||
log.debug("Encoding %d with %d bits", integer, prefix_bits)
|
||||
|
||||
if integer < 0:
|
||||
raise ValueError("Can only encode positive integers, got %s" % integer)
|
||||
|
||||
if prefix_bits < 1 or prefix_bits > 8:
|
||||
raise ValueError("Prefix bits must be between 1 and 8, got %s" % prefix_bits)
|
||||
|
||||
max_number = _PREFIX_BIT_MAX_NUMBERS[prefix_bits]
|
||||
|
||||
if integer < max_number:
|
||||
return bytearray([integer]) # Seriously?
|
||||
else:
|
||||
elements = [max_number]
|
||||
integer -= max_number
|
||||
|
||||
while integer >= 128:
|
||||
elements.append((integer & 127) + 128)
|
||||
integer >>= 7
|
||||
|
||||
elements.append(integer)
|
||||
|
||||
return bytearray(elements)
|
||||
|
||||
|
||||
def decode_integer(data, prefix_bits):
|
||||
"""
|
||||
This decodes an integer according to the wacky integer encoding rules
|
||||
defined in the HPACK spec. Returns a tuple of the decoded integer and the
|
||||
number of bytes that were consumed from ``data`` in order to get that
|
||||
integer.
|
||||
"""
|
||||
if prefix_bits < 1 or prefix_bits > 8:
|
||||
raise ValueError("Prefix bits must be between 1 and 8, got %s" % prefix_bits)
|
||||
|
||||
max_number = _PREFIX_BIT_MAX_NUMBERS[prefix_bits]
|
||||
index = 1
|
||||
shift = 0
|
||||
mask = 0xFF >> (8 - prefix_bits)
|
||||
|
||||
try:
|
||||
number = data[0] & mask
|
||||
if number == max_number:
|
||||
while True:
|
||||
next_byte = data[index]
|
||||
index += 1
|
||||
|
||||
if next_byte >= 128:
|
||||
number += (next_byte - 128) << shift
|
||||
else:
|
||||
number += next_byte << shift
|
||||
break
|
||||
shift += 7
|
||||
|
||||
except IndexError:
|
||||
raise HPACKDecodingError(
|
||||
"Unable to decode HPACK integer representation from %r" % data
|
||||
)
|
||||
|
||||
log.debug("Decoded %d, consumed %d bytes", number, index)
|
||||
|
||||
return number, index
|
||||
|
||||
|
||||
def _dict_to_iterable(header_dict):
|
||||
"""
|
||||
This converts a dictionary to an iterable of two-tuples. This is a
|
||||
HPACK-specific function because it pulls "special-headers" out first and
|
||||
then emits them.
|
||||
"""
|
||||
assert isinstance(header_dict, dict)
|
||||
keys = sorted(header_dict.keys(), key=lambda k: not _to_bytes(k).startswith(b":"))
|
||||
for key in keys:
|
||||
yield key, header_dict[key]
|
||||
|
||||
|
||||
def _to_bytes(string):
|
||||
"""
|
||||
Convert string to bytes.
|
||||
"""
|
||||
if not isinstance(string, basestring): # pragma: no cover
|
||||
string = str(string)
|
||||
|
||||
return string if isinstance(string, bytes) else string.encode("utf-8")
|
||||
|
||||
|
||||
class Encoder:
|
||||
"""
|
||||
An HPACK encoder object. This object takes HTTP headers and emits encoded
|
||||
HTTP/2 header blocks.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.header_table = HeaderTable()
|
||||
self.huffman_coder = HuffmanEncoder(REQUEST_CODES, REQUEST_CODES_LENGTH)
|
||||
self.table_size_changes = []
|
||||
|
||||
@property
|
||||
def header_table_size(self):
|
||||
"""
|
||||
Controls the size of the HPACK header table.
|
||||
"""
|
||||
return self.header_table.maxsize
|
||||
|
||||
@header_table_size.setter
|
||||
def header_table_size(self, value):
|
||||
self.header_table.maxsize = value
|
||||
if self.header_table.resized:
|
||||
self.table_size_changes.append(value)
|
||||
|
||||
def encode(self, headers, huffman=True):
|
||||
"""
|
||||
Takes a set of headers and encodes them into a HPACK-encoded header
|
||||
block.
|
||||
|
||||
:param headers: The headers to encode. Must be either an iterable of
|
||||
tuples, an iterable of :class:`HeaderTuple
|
||||
<hpack.HeaderTuple>`, or a ``dict``.
|
||||
|
||||
If an iterable of tuples, the tuples may be either
|
||||
two-tuples or three-tuples. If they are two-tuples, the
|
||||
tuples must be of the format ``(name, value)``. If they
|
||||
are three-tuples, they must be of the format
|
||||
``(name, value, sensitive)``, where ``sensitive`` is a
|
||||
boolean value indicating whether the header should be
|
||||
added to header tables anywhere. If not present,
|
||||
``sensitive`` defaults to ``False``.
|
||||
|
||||
If an iterable of :class:`HeaderTuple
|
||||
<hpack.HeaderTuple>`, the tuples must always be
|
||||
two-tuples. Instead of using ``sensitive`` as a third
|
||||
tuple entry, use :class:`NeverIndexedHeaderTuple
|
||||
<hpack.NeverIndexedHeaderTuple>` to request that
|
||||
the field never be indexed.
|
||||
|
||||
.. warning:: HTTP/2 requires that all special headers
|
||||
(headers whose names begin with ``:`` characters)
|
||||
appear at the *start* of the header block. While
|
||||
this method will ensure that happens for ``dict``
|
||||
subclasses, callers using any other iterable of
|
||||
tuples **must** ensure they place their special
|
||||
headers at the start of the iterable.
|
||||
|
||||
For efficiency reasons users should prefer to use
|
||||
iterables of two-tuples: fixing the ordering of
|
||||
dictionary headers is an expensive operation that
|
||||
should be avoided if possible.
|
||||
|
||||
:param huffman: (optional) Whether to Huffman-encode any header sent as
|
||||
a literal value. Except for use when debugging, it is
|
||||
recommended that this be left enabled.
|
||||
|
||||
:returns: A bytestring containing the HPACK-encoded header block.
|
||||
"""
|
||||
# Transforming the headers into a header block is a procedure that can
|
||||
# be modeled as a chain or pipe. First, the headers are encoded. This
|
||||
# encoding can be done a number of ways. If the header name-value pair
|
||||
# are already in the header table we can represent them using the
|
||||
# indexed representation: the same is true if they are in the static
|
||||
# table. Otherwise, a literal representation will be used.
|
||||
header_block = []
|
||||
|
||||
# Turn the headers into a list of tuples if possible. This is the
|
||||
# natural way to interact with them in HPACK. Because dictionaries are
|
||||
# un-ordered, we need to make sure we grab the "special" headers first.
|
||||
if isinstance(headers, dict):
|
||||
headers = _dict_to_iterable(headers)
|
||||
|
||||
# Before we begin, if the header table size has been changed we need
|
||||
# to signal all changes since last emission appropriately.
|
||||
if self.header_table.resized:
|
||||
header_block.append(self._encode_table_size_change())
|
||||
self.header_table.resized = False
|
||||
|
||||
# Add each header to the header block
|
||||
for header in headers:
|
||||
sensitive = False
|
||||
if isinstance(header, HeaderTuple):
|
||||
sensitive = not header.indexable
|
||||
elif len(header) > 2:
|
||||
sensitive = header[2]
|
||||
|
||||
header = (_to_bytes(header[0]), _to_bytes(header[1]))
|
||||
header_block.append(self.add(header, sensitive, huffman))
|
||||
|
||||
header_block = b"".join(header_block)
|
||||
|
||||
log.debug("Encoded header block to %s", header_block)
|
||||
|
||||
return header_block
|
||||
|
||||
def add(self, to_add, sensitive, huffman=False):
|
||||
"""
|
||||
This function takes a header key-value tuple and serializes it.
|
||||
"""
|
||||
log.debug(
|
||||
"Adding %s to the header table, sensitive:%s, huffman:%s",
|
||||
to_add,
|
||||
sensitive,
|
||||
huffman,
|
||||
)
|
||||
|
||||
name, value = to_add
|
||||
|
||||
# Set our indexing mode
|
||||
indexbit = INDEX_INCREMENTAL if not sensitive else INDEX_NEVER
|
||||
|
||||
# Search for a matching header in the header table.
|
||||
match = self.header_table.search(name, value)
|
||||
|
||||
if match is None:
|
||||
# Not in the header table. Encode using the literal syntax,
|
||||
# and add it to the header table.
|
||||
encoded = self._encode_literal(name, value, indexbit, huffman)
|
||||
if not sensitive:
|
||||
self.header_table.add(name, value)
|
||||
return encoded
|
||||
|
||||
# The header is in the table, break out the values. If we matched
|
||||
# perfectly, we can use the indexed representation: otherwise we
|
||||
# can use the indexed literal.
|
||||
index, name, perfect = match
|
||||
|
||||
if perfect:
|
||||
# Indexed representation.
|
||||
encoded = self._encode_indexed(index)
|
||||
else:
|
||||
# Indexed literal. We are going to add header to the
|
||||
# header table unconditionally. It is a future todo to
|
||||
# filter out headers which are known to be ineffective for
|
||||
# indexing since they just take space in the table and
|
||||
# pushed out other valuable headers.
|
||||
encoded = self._encode_indexed_literal(index, value, indexbit, huffman)
|
||||
if not sensitive:
|
||||
self.header_table.add(name, value)
|
||||
|
||||
return encoded
|
||||
|
||||
def _encode_indexed(self, index):
|
||||
"""
|
||||
Encodes a header using the indexed representation.
|
||||
"""
|
||||
field = encode_integer(index, 7)
|
||||
field[0] |= 0x80 # we set the top bit
|
||||
return bytes(field)
|
||||
|
||||
def _encode_literal(self, name, value, indexbit, huffman=False):
|
||||
"""
|
||||
Encodes a header with a literal name and literal value. If ``indexing``
|
||||
is True, the header will be added to the header table: otherwise it
|
||||
will not.
|
||||
"""
|
||||
if huffman:
|
||||
name = self.huffman_coder.encode(name)
|
||||
value = self.huffman_coder.encode(value)
|
||||
|
||||
name_len = encode_integer(len(name), 7)
|
||||
value_len = encode_integer(len(value), 7)
|
||||
|
||||
if huffman:
|
||||
name_len[0] |= 0x80
|
||||
value_len[0] |= 0x80
|
||||
|
||||
return b"".join([indexbit, bytes(name_len), name, bytes(value_len), value])
|
||||
|
||||
def _encode_indexed_literal(self, index, value, indexbit, huffman=False):
|
||||
"""
|
||||
Encodes a header with an indexed name and a literal value and performs
|
||||
incremental indexing.
|
||||
"""
|
||||
if indexbit != INDEX_INCREMENTAL:
|
||||
prefix = encode_integer(index, 4)
|
||||
else:
|
||||
prefix = encode_integer(index, 6)
|
||||
|
||||
prefix[0] |= ord(indexbit)
|
||||
|
||||
if huffman:
|
||||
value = self.huffman_coder.encode(value)
|
||||
|
||||
value_len = encode_integer(len(value), 7)
|
||||
|
||||
if huffman:
|
||||
value_len[0] |= 0x80
|
||||
|
||||
return b"".join([bytes(prefix), bytes(value_len), value])
|
||||
|
||||
def _encode_table_size_change(self):
|
||||
"""
|
||||
Produces the encoded form of all header table size change context
|
||||
updates.
|
||||
"""
|
||||
block = b""
|
||||
for size_bytes in self.table_size_changes:
|
||||
size_bytes = encode_integer(size_bytes, 5)
|
||||
size_bytes[0] |= 0x20
|
||||
block += bytes(size_bytes)
|
||||
self.table_size_changes = []
|
||||
return block
|
||||
|
||||
|
||||
class Decoder:
|
||||
"""
|
||||
An HPACK decoder object.
|
||||
|
||||
.. versionchanged:: 2.3.0
|
||||
Added ``max_header_list_size`` argument.
|
||||
|
||||
:param max_header_list_size: The maximum decompressed size we will allow
|
||||
for any single header block. This is a protection against DoS attacks
|
||||
that attempt to force the application to expand a relatively small
|
||||
amount of data into a really large header list, allowing enormous
|
||||
amounts of memory to be allocated.
|
||||
|
||||
If this amount of data is exceeded, a `OversizedHeaderListError
|
||||
<hpack.OversizedHeaderListError>` exception will be raised. At this
|
||||
point the connection should be shut down, as the HPACK state will no
|
||||
longer be usable.
|
||||
|
||||
Defaults to 64kB.
|
||||
:type max_header_list_size: ``int``
|
||||
"""
|
||||
|
||||
def __init__(self, max_header_list_size=DEFAULT_MAX_HEADER_LIST_SIZE):
|
||||
self.header_table = HeaderTable()
|
||||
|
||||
#: The maximum decompressed size we will allow for any single header
|
||||
#: block. This is a protection against DoS attacks that attempt to
|
||||
#: force the application to expand a relatively small amount of data
|
||||
#: into a really large header list, allowing enormous amounts of memory
|
||||
#: to be allocated.
|
||||
#:
|
||||
#: If this amount of data is exceeded, a `OversizedHeaderListError
|
||||
#: <hpack.OversizedHeaderListError>` exception will be raised. At this
|
||||
#: point the connection should be shut down, as the HPACK state will no
|
||||
#: longer be usable.
|
||||
#:
|
||||
#: Defaults to 64kB.
|
||||
#:
|
||||
#: .. versionadded:: 2.3.0
|
||||
self.max_header_list_size = max_header_list_size
|
||||
|
||||
#: Maximum allowed header table size.
|
||||
#:
|
||||
#: A HTTP/2 implementation should set this to the most recent value of
|
||||
#: SETTINGS_HEADER_TABLE_SIZE that it sent *and has received an ACK
|
||||
#: for*. Once this setting is set, the actual header table size will be
|
||||
#: checked at the end of each decoding run and whenever it is changed,
|
||||
#: to confirm that it fits in this size.
|
||||
self.max_allowed_table_size = self.header_table.maxsize
|
||||
|
||||
@property
|
||||
def header_table_size(self):
|
||||
"""
|
||||
Controls the size of the HPACK header table.
|
||||
"""
|
||||
return self.header_table.maxsize
|
||||
|
||||
@header_table_size.setter
|
||||
def header_table_size(self, value):
|
||||
self.header_table.maxsize = value
|
||||
|
||||
def decode(self, data, raw=False):
|
||||
"""
|
||||
Takes an HPACK-encoded header block and decodes it into a header set.
|
||||
|
||||
:param data: A bytestring representing a complete HPACK-encoded header
|
||||
block.
|
||||
:param raw: (optional) Whether to return the headers as tuples of raw
|
||||
byte strings or to decode them as UTF-8 before returning
|
||||
them. The default value is False, which returns tuples of
|
||||
Unicode strings
|
||||
:returns: A list of two-tuples of ``(name, value)`` representing the
|
||||
HPACK-encoded headers, in the order they were decoded.
|
||||
:raises HPACKDecodingError: If an error is encountered while decoding
|
||||
the header block.
|
||||
"""
|
||||
log.debug("Decoding %s", data)
|
||||
|
||||
data_mem = memoryview(data)
|
||||
headers = []
|
||||
data_len = len(data)
|
||||
inflated_size = 0
|
||||
current_index = 0
|
||||
|
||||
while current_index < data_len:
|
||||
# Work out what kind of header we're decoding.
|
||||
# If the high bit is 1, it's an indexed field.
|
||||
current = data[current_index]
|
||||
indexed = True if current & 0x80 else False
|
||||
|
||||
# Otherwise, if the second-highest bit is 1 it's a field that does
|
||||
# alter the header table.
|
||||
literal_index = True if current & 0x40 else False
|
||||
|
||||
# Otherwise, if the third-highest bit is 1 it's an encoding context
|
||||
# update.
|
||||
encoding_update = True if current & 0x20 else False
|
||||
|
||||
if indexed:
|
||||
header, consumed = self._decode_indexed(data_mem[current_index:])
|
||||
elif literal_index:
|
||||
# It's a literal header that does affect the header table.
|
||||
header, consumed = self._decode_literal_index(data_mem[current_index:])
|
||||
elif encoding_update:
|
||||
# It's an update to the encoding context. These are forbidden
|
||||
# in a header block after any actual header.
|
||||
if headers:
|
||||
raise HPACKDecodingError(
|
||||
"Table size update not at the start of the block"
|
||||
)
|
||||
consumed = self._update_encoding_context(data_mem[current_index:])
|
||||
header = None
|
||||
else:
|
||||
# It's a literal header that does not affect the header table.
|
||||
header, consumed = self._decode_literal_no_index(
|
||||
data_mem[current_index:]
|
||||
)
|
||||
|
||||
if header:
|
||||
headers.append(header)
|
||||
inflated_size += table_entry_size(*header)
|
||||
|
||||
if inflated_size > self.max_header_list_size:
|
||||
raise OversizedHeaderListError(
|
||||
"A header list larger than %d has been received"
|
||||
% self.max_header_list_size
|
||||
)
|
||||
|
||||
current_index += consumed
|
||||
|
||||
# Confirm that the table size is lower than the maximum. We do this
|
||||
# here to ensure that we catch when the max has been *shrunk* and the
|
||||
# remote peer hasn't actually done that.
|
||||
self._assert_valid_table_size()
|
||||
|
||||
try:
|
||||
return [_unicode_if_needed(h, raw) for h in headers]
|
||||
except UnicodeDecodeError:
|
||||
raise HPACKDecodingError("Unable to decode headers as UTF-8.")
|
||||
|
||||
def _assert_valid_table_size(self):
|
||||
"""
|
||||
Check that the table size set by the encoder is lower than the maximum
|
||||
we expect to have.
|
||||
"""
|
||||
if self.header_table_size > self.max_allowed_table_size:
|
||||
raise InvalidTableSizeError(
|
||||
"Encoder did not shrink table size to within the max"
|
||||
)
|
||||
|
||||
def _update_encoding_context(self, data):
|
||||
"""
|
||||
Handles a byte that updates the encoding context.
|
||||
"""
|
||||
# We've been asked to resize the header table.
|
||||
new_size, consumed = decode_integer(data, 5)
|
||||
if new_size > self.max_allowed_table_size:
|
||||
raise InvalidTableSizeError("Encoder exceeded max allowable table size")
|
||||
self.header_table_size = new_size
|
||||
return consumed
|
||||
|
||||
def _decode_indexed(self, data):
|
||||
"""
|
||||
Decodes a header represented using the indexed representation.
|
||||
"""
|
||||
index, consumed = decode_integer(data, 7)
|
||||
header = HeaderTuple(*self.header_table.get_by_index(index))
|
||||
log.debug("Decoded %s, consumed %d", header, consumed)
|
||||
return header, consumed
|
||||
|
||||
def _decode_literal_no_index(self, data):
|
||||
return self._decode_literal(data, False)
|
||||
|
||||
def _decode_literal_index(self, data):
|
||||
return self._decode_literal(data, True)
|
||||
|
||||
def _decode_literal(self, data, should_index):
|
||||
"""
|
||||
Decodes a header represented with a literal.
|
||||
"""
|
||||
total_consumed = 0
|
||||
|
||||
# When should_index is true, if the low six bits of the first byte are
|
||||
# nonzero, the header name is indexed.
|
||||
# When should_index is false, if the low four bits of the first byte
|
||||
# are nonzero the header name is indexed.
|
||||
if should_index:
|
||||
indexed_name = data[0] & 0x3F
|
||||
name_len = 6
|
||||
not_indexable = False
|
||||
else:
|
||||
high_byte = data[0]
|
||||
indexed_name = high_byte & 0x0F
|
||||
name_len = 4
|
||||
not_indexable = high_byte & 0x10
|
||||
|
||||
if indexed_name:
|
||||
# Indexed header name.
|
||||
index, consumed = decode_integer(data, name_len)
|
||||
name = self.header_table.get_by_index(index)[0]
|
||||
|
||||
total_consumed = consumed
|
||||
length = 0
|
||||
else:
|
||||
# Literal header name. The first byte was consumed, so we need to
|
||||
# move forward.
|
||||
data = data[1:]
|
||||
|
||||
length, consumed = decode_integer(data, 7)
|
||||
name = data[consumed : consumed + length]
|
||||
if len(name) != length:
|
||||
raise HPACKDecodingError("Truncated header block")
|
||||
|
||||
if data[0] & 0x80:
|
||||
name = decode_huffman(name)
|
||||
total_consumed = consumed + length + 1 # Since we moved forward 1.
|
||||
|
||||
data = data[consumed + length :]
|
||||
|
||||
# The header value is definitely length-based.
|
||||
length, consumed = decode_integer(data, 7)
|
||||
value = data[consumed : consumed + length]
|
||||
if len(value) != length:
|
||||
raise HPACKDecodingError("Truncated header block")
|
||||
|
||||
if data[0] & 0x80:
|
||||
value = decode_huffman(value)
|
||||
|
||||
# Updated the total consumed length.
|
||||
total_consumed += length + consumed
|
||||
|
||||
# If we have been told never to index the header field, encode that in
|
||||
# the tuple we use.
|
||||
if not_indexable:
|
||||
header = NeverIndexedHeaderTuple(name, value)
|
||||
else:
|
||||
header = HeaderTuple(name, value)
|
||||
|
||||
# If we've been asked to index this, add it to the header table.
|
||||
if should_index:
|
||||
self.header_table.add(name, value)
|
||||
|
||||
log.debug(
|
||||
"Decoded %s, total consumed %d bytes, indexed %s",
|
||||
header,
|
||||
total_consumed,
|
||||
should_index,
|
||||
)
|
||||
|
||||
return header, total_consumed
|
||||
66
.venv/lib/python3.9/site-packages/jh2/hpack/huffman.py
Normal file
66
.venv/lib/python3.9/site-packages/jh2/hpack/huffman.py
Normal file
@@ -0,0 +1,66 @@
|
||||
"""
|
||||
hpack/huffman_decoder
|
||||
~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
An implementation of a bitwise prefix tree specially built for decoding
|
||||
Huffman-coded content where we already know the Huffman table.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
||||
class HuffmanEncoder:
|
||||
"""
|
||||
Encodes a string according to the Huffman encoding table defined in the
|
||||
HPACK specification.
|
||||
"""
|
||||
|
||||
def __init__(self, huffman_code_list, huffman_code_list_lengths):
|
||||
self.huffman_code_list = huffman_code_list
|
||||
self.huffman_code_list_lengths = huffman_code_list_lengths
|
||||
|
||||
def encode(self, bytes_to_encode):
|
||||
"""
|
||||
Given a string of bytes, encodes them according to the HPACK Huffman
|
||||
specification.
|
||||
"""
|
||||
# If handed the empty string, just immediately return.
|
||||
if not bytes_to_encode:
|
||||
return b""
|
||||
|
||||
final_num = 0
|
||||
final_int_len = 0
|
||||
|
||||
# Turn each byte into its huffman code. These codes aren't necessarily
|
||||
# octet aligned, so keep track of how far through an octet we are. To
|
||||
# handle this cleanly, just use a single giant integer.
|
||||
for byte in bytes_to_encode:
|
||||
bin_int_len = self.huffman_code_list_lengths[byte]
|
||||
bin_int = self.huffman_code_list[byte] & (2 ** (bin_int_len + 1) - 1)
|
||||
final_num <<= bin_int_len
|
||||
final_num |= bin_int
|
||||
final_int_len += bin_int_len
|
||||
|
||||
# Pad out to an octet with ones.
|
||||
bits_to_be_padded = (8 - (final_int_len % 8)) % 8
|
||||
final_num <<= bits_to_be_padded
|
||||
final_num |= (1 << bits_to_be_padded) - 1
|
||||
|
||||
# Convert the number to hex and strip off the leading '0x' and the
|
||||
# trailing 'L', if present.
|
||||
final_num = hex(final_num)[2:].rstrip("L")
|
||||
|
||||
# If this is odd, prepend a zero.
|
||||
final_num = "0" + final_num if len(final_num) % 2 != 0 else final_num
|
||||
|
||||
# This number should have twice as many digits as bytes. If not, we're
|
||||
# missing some leading zeroes. Work out how many bytes we want and how
|
||||
# many digits we have, then add the missing zero digits to the front.
|
||||
total_bytes = (final_int_len + bits_to_be_padded) // 8
|
||||
expected_digits = total_bytes * 2
|
||||
|
||||
if len(final_num) != expected_digits:
|
||||
missing_digits = expected_digits - len(final_num)
|
||||
final_num = ("0" * missing_digits) + final_num
|
||||
|
||||
return bytes.fromhex(final_num)
|
||||
530
.venv/lib/python3.9/site-packages/jh2/hpack/huffman_constants.py
Normal file
530
.venv/lib/python3.9/site-packages/jh2/hpack/huffman_constants.py
Normal file
@@ -0,0 +1,530 @@
|
||||
"""
|
||||
hpack/huffman_constants
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Defines the constant Huffman table. This takes up an upsetting amount of space,
|
||||
but c'est la vie.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
# flake8: noqa
|
||||
REQUEST_CODES = [
|
||||
0x1FF8,
|
||||
0x7FFFD8,
|
||||
0xFFFFFE2,
|
||||
0xFFFFFE3,
|
||||
0xFFFFFE4,
|
||||
0xFFFFFE5,
|
||||
0xFFFFFE6,
|
||||
0xFFFFFE7,
|
||||
0xFFFFFE8,
|
||||
0xFFFFEA,
|
||||
0x3FFFFFFC,
|
||||
0xFFFFFE9,
|
||||
0xFFFFFEA,
|
||||
0x3FFFFFFD,
|
||||
0xFFFFFEB,
|
||||
0xFFFFFEC,
|
||||
0xFFFFFED,
|
||||
0xFFFFFEE,
|
||||
0xFFFFFEF,
|
||||
0xFFFFFF0,
|
||||
0xFFFFFF1,
|
||||
0xFFFFFF2,
|
||||
0x3FFFFFFE,
|
||||
0xFFFFFF3,
|
||||
0xFFFFFF4,
|
||||
0xFFFFFF5,
|
||||
0xFFFFFF6,
|
||||
0xFFFFFF7,
|
||||
0xFFFFFF8,
|
||||
0xFFFFFF9,
|
||||
0xFFFFFFA,
|
||||
0xFFFFFFB,
|
||||
0x14,
|
||||
0x3F8,
|
||||
0x3F9,
|
||||
0xFFA,
|
||||
0x1FF9,
|
||||
0x15,
|
||||
0xF8,
|
||||
0x7FA,
|
||||
0x3FA,
|
||||
0x3FB,
|
||||
0xF9,
|
||||
0x7FB,
|
||||
0xFA,
|
||||
0x16,
|
||||
0x17,
|
||||
0x18,
|
||||
0x0,
|
||||
0x1,
|
||||
0x2,
|
||||
0x19,
|
||||
0x1A,
|
||||
0x1B,
|
||||
0x1C,
|
||||
0x1D,
|
||||
0x1E,
|
||||
0x1F,
|
||||
0x5C,
|
||||
0xFB,
|
||||
0x7FFC,
|
||||
0x20,
|
||||
0xFFB,
|
||||
0x3FC,
|
||||
0x1FFA,
|
||||
0x21,
|
||||
0x5D,
|
||||
0x5E,
|
||||
0x5F,
|
||||
0x60,
|
||||
0x61,
|
||||
0x62,
|
||||
0x63,
|
||||
0x64,
|
||||
0x65,
|
||||
0x66,
|
||||
0x67,
|
||||
0x68,
|
||||
0x69,
|
||||
0x6A,
|
||||
0x6B,
|
||||
0x6C,
|
||||
0x6D,
|
||||
0x6E,
|
||||
0x6F,
|
||||
0x70,
|
||||
0x71,
|
||||
0x72,
|
||||
0xFC,
|
||||
0x73,
|
||||
0xFD,
|
||||
0x1FFB,
|
||||
0x7FFF0,
|
||||
0x1FFC,
|
||||
0x3FFC,
|
||||
0x22,
|
||||
0x7FFD,
|
||||
0x3,
|
||||
0x23,
|
||||
0x4,
|
||||
0x24,
|
||||
0x5,
|
||||
0x25,
|
||||
0x26,
|
||||
0x27,
|
||||
0x6,
|
||||
0x74,
|
||||
0x75,
|
||||
0x28,
|
||||
0x29,
|
||||
0x2A,
|
||||
0x7,
|
||||
0x2B,
|
||||
0x76,
|
||||
0x2C,
|
||||
0x8,
|
||||
0x9,
|
||||
0x2D,
|
||||
0x77,
|
||||
0x78,
|
||||
0x79,
|
||||
0x7A,
|
||||
0x7B,
|
||||
0x7FFE,
|
||||
0x7FC,
|
||||
0x3FFD,
|
||||
0x1FFD,
|
||||
0xFFFFFFC,
|
||||
0xFFFE6,
|
||||
0x3FFFD2,
|
||||
0xFFFE7,
|
||||
0xFFFE8,
|
||||
0x3FFFD3,
|
||||
0x3FFFD4,
|
||||
0x3FFFD5,
|
||||
0x7FFFD9,
|
||||
0x3FFFD6,
|
||||
0x7FFFDA,
|
||||
0x7FFFDB,
|
||||
0x7FFFDC,
|
||||
0x7FFFDD,
|
||||
0x7FFFDE,
|
||||
0xFFFFEB,
|
||||
0x7FFFDF,
|
||||
0xFFFFEC,
|
||||
0xFFFFED,
|
||||
0x3FFFD7,
|
||||
0x7FFFE0,
|
||||
0xFFFFEE,
|
||||
0x7FFFE1,
|
||||
0x7FFFE2,
|
||||
0x7FFFE3,
|
||||
0x7FFFE4,
|
||||
0x1FFFDC,
|
||||
0x3FFFD8,
|
||||
0x7FFFE5,
|
||||
0x3FFFD9,
|
||||
0x7FFFE6,
|
||||
0x7FFFE7,
|
||||
0xFFFFEF,
|
||||
0x3FFFDA,
|
||||
0x1FFFDD,
|
||||
0xFFFE9,
|
||||
0x3FFFDB,
|
||||
0x3FFFDC,
|
||||
0x7FFFE8,
|
||||
0x7FFFE9,
|
||||
0x1FFFDE,
|
||||
0x7FFFEA,
|
||||
0x3FFFDD,
|
||||
0x3FFFDE,
|
||||
0xFFFFF0,
|
||||
0x1FFFDF,
|
||||
0x3FFFDF,
|
||||
0x7FFFEB,
|
||||
0x7FFFEC,
|
||||
0x1FFFE0,
|
||||
0x1FFFE1,
|
||||
0x3FFFE0,
|
||||
0x1FFFE2,
|
||||
0x7FFFED,
|
||||
0x3FFFE1,
|
||||
0x7FFFEE,
|
||||
0x7FFFEF,
|
||||
0xFFFEA,
|
||||
0x3FFFE2,
|
||||
0x3FFFE3,
|
||||
0x3FFFE4,
|
||||
0x7FFFF0,
|
||||
0x3FFFE5,
|
||||
0x3FFFE6,
|
||||
0x7FFFF1,
|
||||
0x3FFFFE0,
|
||||
0x3FFFFE1,
|
||||
0xFFFEB,
|
||||
0x7FFF1,
|
||||
0x3FFFE7,
|
||||
0x7FFFF2,
|
||||
0x3FFFE8,
|
||||
0x1FFFFEC,
|
||||
0x3FFFFE2,
|
||||
0x3FFFFE3,
|
||||
0x3FFFFE4,
|
||||
0x7FFFFDE,
|
||||
0x7FFFFDF,
|
||||
0x3FFFFE5,
|
||||
0xFFFFF1,
|
||||
0x1FFFFED,
|
||||
0x7FFF2,
|
||||
0x1FFFE3,
|
||||
0x3FFFFE6,
|
||||
0x7FFFFE0,
|
||||
0x7FFFFE1,
|
||||
0x3FFFFE7,
|
||||
0x7FFFFE2,
|
||||
0xFFFFF2,
|
||||
0x1FFFE4,
|
||||
0x1FFFE5,
|
||||
0x3FFFFE8,
|
||||
0x3FFFFE9,
|
||||
0xFFFFFFD,
|
||||
0x7FFFFE3,
|
||||
0x7FFFFE4,
|
||||
0x7FFFFE5,
|
||||
0xFFFEC,
|
||||
0xFFFFF3,
|
||||
0xFFFED,
|
||||
0x1FFFE6,
|
||||
0x3FFFE9,
|
||||
0x1FFFE7,
|
||||
0x1FFFE8,
|
||||
0x7FFFF3,
|
||||
0x3FFFEA,
|
||||
0x3FFFEB,
|
||||
0x1FFFFEE,
|
||||
0x1FFFFEF,
|
||||
0xFFFFF4,
|
||||
0xFFFFF5,
|
||||
0x3FFFFEA,
|
||||
0x7FFFF4,
|
||||
0x3FFFFEB,
|
||||
0x7FFFFE6,
|
||||
0x3FFFFEC,
|
||||
0x3FFFFED,
|
||||
0x7FFFFE7,
|
||||
0x7FFFFE8,
|
||||
0x7FFFFE9,
|
||||
0x7FFFFEA,
|
||||
0x7FFFFEB,
|
||||
0xFFFFFFE,
|
||||
0x7FFFFEC,
|
||||
0x7FFFFED,
|
||||
0x7FFFFEE,
|
||||
0x7FFFFEF,
|
||||
0x7FFFFF0,
|
||||
0x3FFFFEE,
|
||||
0x3FFFFFFF,
|
||||
]
|
||||
|
||||
REQUEST_CODES_LENGTH = [
|
||||
13,
|
||||
23,
|
||||
28,
|
||||
28,
|
||||
28,
|
||||
28,
|
||||
28,
|
||||
28,
|
||||
28,
|
||||
24,
|
||||
30,
|
||||
28,
|
||||
28,
|
||||
30,
|
||||
28,
|
||||
28,
|
||||
28,
|
||||
28,
|
||||
28,
|
||||
28,
|
||||
28,
|
||||
28,
|
||||
30,
|
||||
28,
|
||||
28,
|
||||
28,
|
||||
28,
|
||||
28,
|
||||
28,
|
||||
28,
|
||||
28,
|
||||
28,
|
||||
6,
|
||||
10,
|
||||
10,
|
||||
12,
|
||||
13,
|
||||
6,
|
||||
8,
|
||||
11,
|
||||
10,
|
||||
10,
|
||||
8,
|
||||
11,
|
||||
8,
|
||||
6,
|
||||
6,
|
||||
6,
|
||||
5,
|
||||
5,
|
||||
5,
|
||||
6,
|
||||
6,
|
||||
6,
|
||||
6,
|
||||
6,
|
||||
6,
|
||||
6,
|
||||
7,
|
||||
8,
|
||||
15,
|
||||
6,
|
||||
12,
|
||||
10,
|
||||
13,
|
||||
6,
|
||||
7,
|
||||
7,
|
||||
7,
|
||||
7,
|
||||
7,
|
||||
7,
|
||||
7,
|
||||
7,
|
||||
7,
|
||||
7,
|
||||
7,
|
||||
7,
|
||||
7,
|
||||
7,
|
||||
7,
|
||||
7,
|
||||
7,
|
||||
7,
|
||||
7,
|
||||
7,
|
||||
7,
|
||||
7,
|
||||
8,
|
||||
7,
|
||||
8,
|
||||
13,
|
||||
19,
|
||||
13,
|
||||
14,
|
||||
6,
|
||||
15,
|
||||
5,
|
||||
6,
|
||||
5,
|
||||
6,
|
||||
5,
|
||||
6,
|
||||
6,
|
||||
6,
|
||||
5,
|
||||
7,
|
||||
7,
|
||||
6,
|
||||
6,
|
||||
6,
|
||||
5,
|
||||
6,
|
||||
7,
|
||||
6,
|
||||
5,
|
||||
5,
|
||||
6,
|
||||
7,
|
||||
7,
|
||||
7,
|
||||
7,
|
||||
7,
|
||||
15,
|
||||
11,
|
||||
14,
|
||||
13,
|
||||
28,
|
||||
20,
|
||||
22,
|
||||
20,
|
||||
20,
|
||||
22,
|
||||
22,
|
||||
22,
|
||||
23,
|
||||
22,
|
||||
23,
|
||||
23,
|
||||
23,
|
||||
23,
|
||||
23,
|
||||
24,
|
||||
23,
|
||||
24,
|
||||
24,
|
||||
22,
|
||||
23,
|
||||
24,
|
||||
23,
|
||||
23,
|
||||
23,
|
||||
23,
|
||||
21,
|
||||
22,
|
||||
23,
|
||||
22,
|
||||
23,
|
||||
23,
|
||||
24,
|
||||
22,
|
||||
21,
|
||||
20,
|
||||
22,
|
||||
22,
|
||||
23,
|
||||
23,
|
||||
21,
|
||||
23,
|
||||
22,
|
||||
22,
|
||||
24,
|
||||
21,
|
||||
22,
|
||||
23,
|
||||
23,
|
||||
21,
|
||||
21,
|
||||
22,
|
||||
21,
|
||||
23,
|
||||
22,
|
||||
23,
|
||||
23,
|
||||
20,
|
||||
22,
|
||||
22,
|
||||
22,
|
||||
23,
|
||||
22,
|
||||
22,
|
||||
23,
|
||||
26,
|
||||
26,
|
||||
20,
|
||||
19,
|
||||
22,
|
||||
23,
|
||||
22,
|
||||
25,
|
||||
26,
|
||||
26,
|
||||
26,
|
||||
27,
|
||||
27,
|
||||
26,
|
||||
24,
|
||||
25,
|
||||
19,
|
||||
21,
|
||||
26,
|
||||
27,
|
||||
27,
|
||||
26,
|
||||
27,
|
||||
24,
|
||||
21,
|
||||
21,
|
||||
26,
|
||||
26,
|
||||
28,
|
||||
27,
|
||||
27,
|
||||
27,
|
||||
20,
|
||||
24,
|
||||
20,
|
||||
21,
|
||||
22,
|
||||
21,
|
||||
21,
|
||||
23,
|
||||
22,
|
||||
22,
|
||||
25,
|
||||
25,
|
||||
24,
|
||||
24,
|
||||
26,
|
||||
23,
|
||||
26,
|
||||
27,
|
||||
26,
|
||||
26,
|
||||
27,
|
||||
27,
|
||||
27,
|
||||
27,
|
||||
27,
|
||||
28,
|
||||
27,
|
||||
27,
|
||||
27,
|
||||
27,
|
||||
27,
|
||||
26,
|
||||
30,
|
||||
]
|
||||
4486
.venv/lib/python3.9/site-packages/jh2/hpack/huffman_table.py
Normal file
4486
.venv/lib/python3.9/site-packages/jh2/hpack/huffman_table.py
Normal file
File diff suppressed because it is too large
Load Diff
42
.venv/lib/python3.9/site-packages/jh2/hpack/struct.py
Normal file
42
.venv/lib/python3.9/site-packages/jh2/hpack/struct.py
Normal file
@@ -0,0 +1,42 @@
|
||||
"""
|
||||
hpack/struct
|
||||
~~~~~~~~~~~~
|
||||
|
||||
Contains structures for representing header fields with associated metadata.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
||||
class HeaderTuple(tuple):
|
||||
"""
|
||||
A data structure that stores a single header field.
|
||||
|
||||
HTTP headers can be thought of as tuples of ``(field name, field value)``.
|
||||
A single header block is a sequence of such tuples.
|
||||
|
||||
In HTTP/2, however, certain bits of additional information are required for
|
||||
compressing these headers: in particular, whether the header field can be
|
||||
safely added to the HPACK compression context.
|
||||
|
||||
This class stores a header that can be added to the compression context. In
|
||||
all other ways it behaves exactly like a tuple.
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
indexable = True
|
||||
|
||||
def __new__(cls, *args):
|
||||
return tuple.__new__(cls, args)
|
||||
|
||||
|
||||
class NeverIndexedHeaderTuple(HeaderTuple):
|
||||
"""
|
||||
A data structure that stores a single header field that cannot be added to
|
||||
a HTTP/2 header compression context.
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
indexable = False
|
||||
237
.venv/lib/python3.9/site-packages/jh2/hpack/table.py
Normal file
237
.venv/lib/python3.9/site-packages/jh2/hpack/table.py
Normal file
@@ -0,0 +1,237 @@
|
||||
# flake8: noqa
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from collections import deque
|
||||
|
||||
from .exceptions import InvalidTableIndex
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def table_entry_size(name, value):
|
||||
"""
|
||||
Calculates the size of a single entry
|
||||
|
||||
This size is mostly irrelevant to us and defined
|
||||
specifically to accommodate memory management for
|
||||
lower level implementations. The 32 extra bytes are
|
||||
considered the "maximum" overhead that would be
|
||||
required to represent each entry in the table.
|
||||
|
||||
See RFC7541 Section 4.1
|
||||
"""
|
||||
return 32 + len(name) + len(value)
|
||||
|
||||
|
||||
class HeaderTable:
|
||||
"""
|
||||
Implements the combined static and dynamic header table
|
||||
|
||||
The name and value arguments for all the functions
|
||||
should ONLY be byte strings (b'') however this is not
|
||||
strictly enforced in the interface.
|
||||
|
||||
See RFC7541 Section 2.3
|
||||
"""
|
||||
|
||||
#: Default maximum size of the dynamic table. See
|
||||
#: RFC7540 Section 6.5.2.
|
||||
DEFAULT_SIZE = 4096
|
||||
|
||||
#: Constant list of static headers. See RFC7541 Section
|
||||
#: 2.3.1 and Appendix A
|
||||
STATIC_TABLE = (
|
||||
(b":authority", b""), # noqa
|
||||
(b":method", b"GET"), # noqa
|
||||
(b":method", b"POST"), # noqa
|
||||
(b":path", b"/"), # noqa
|
||||
(b":path", b"/index.html"), # noqa
|
||||
(b":scheme", b"http"), # noqa
|
||||
(b":scheme", b"https"), # noqa
|
||||
(b":status", b"200"), # noqa
|
||||
(b":status", b"204"), # noqa
|
||||
(b":status", b"206"), # noqa
|
||||
(b":status", b"304"), # noqa
|
||||
(b":status", b"400"), # noqa
|
||||
(b":status", b"404"), # noqa
|
||||
(b":status", b"500"), # noqa
|
||||
(b"accept-charset", b""), # noqa
|
||||
(b"accept-encoding", b"gzip, deflate"), # noqa
|
||||
(b"accept-language", b""), # noqa
|
||||
(b"accept-ranges", b""), # noqa
|
||||
(b"accept", b""), # noqa
|
||||
(b"access-control-allow-origin", b""), # noqa
|
||||
(b"age", b""), # noqa
|
||||
(b"allow", b""), # noqa
|
||||
(b"authorization", b""), # noqa
|
||||
(b"cache-control", b""), # noqa
|
||||
(b"content-disposition", b""), # noqa
|
||||
(b"content-encoding", b""), # noqa
|
||||
(b"content-language", b""), # noqa
|
||||
(b"content-length", b""), # noqa
|
||||
(b"content-location", b""), # noqa
|
||||
(b"content-range", b""), # noqa
|
||||
(b"content-type", b""), # noqa
|
||||
(b"cookie", b""), # noqa
|
||||
(b"date", b""), # noqa
|
||||
(b"etag", b""), # noqa
|
||||
(b"expect", b""), # noqa
|
||||
(b"expires", b""), # noqa
|
||||
(b"from", b""), # noqa
|
||||
(b"host", b""), # noqa
|
||||
(b"if-match", b""), # noqa
|
||||
(b"if-modified-since", b""), # noqa
|
||||
(b"if-none-match", b""), # noqa
|
||||
(b"if-range", b""), # noqa
|
||||
(b"if-unmodified-since", b""), # noqa
|
||||
(b"last-modified", b""), # noqa
|
||||
(b"link", b""), # noqa
|
||||
(b"location", b""), # noqa
|
||||
(b"max-forwards", b""), # noqa
|
||||
(b"proxy-authenticate", b""), # noqa
|
||||
(b"proxy-authorization", b""), # noqa
|
||||
(b"range", b""), # noqa
|
||||
(b"referer", b""), # noqa
|
||||
(b"refresh", b""), # noqa
|
||||
(b"retry-after", b""), # noqa
|
||||
(b"server", b""), # noqa
|
||||
(b"set-cookie", b""), # noqa
|
||||
(b"strict-transport-security", b""), # noqa
|
||||
(b"transfer-encoding", b""), # noqa
|
||||
(b"user-agent", b""), # noqa
|
||||
(b"vary", b""), # noqa
|
||||
(b"via", b""), # noqa
|
||||
(b"www-authenticate", b""), # noqa
|
||||
) # noqa
|
||||
|
||||
STATIC_TABLE_LENGTH = len(STATIC_TABLE)
|
||||
|
||||
def __init__(self):
|
||||
self._maxsize = HeaderTable.DEFAULT_SIZE
|
||||
self._current_size = 0
|
||||
self.resized = False
|
||||
self.dynamic_entries = deque()
|
||||
|
||||
def get_by_index(self, index):
|
||||
"""
|
||||
Returns the entry specified by index
|
||||
|
||||
Note that the table is 1-based ie an index of 0 is
|
||||
invalid. This is due to the fact that a zero value
|
||||
index signals that a completely unindexed header
|
||||
follows.
|
||||
|
||||
The entry will either be from the static table or
|
||||
the dynamic table depending on the value of index.
|
||||
"""
|
||||
original_index = index
|
||||
index -= 1
|
||||
if 0 <= index:
|
||||
if index < HeaderTable.STATIC_TABLE_LENGTH:
|
||||
return HeaderTable.STATIC_TABLE[index]
|
||||
|
||||
index -= HeaderTable.STATIC_TABLE_LENGTH
|
||||
if index < len(self.dynamic_entries):
|
||||
return self.dynamic_entries[index]
|
||||
|
||||
raise InvalidTableIndex("Invalid table index %d" % original_index)
|
||||
|
||||
def __repr__(self):
|
||||
return "HeaderTable(%d, %s, %r)" % (
|
||||
self._maxsize,
|
||||
self.resized,
|
||||
self.dynamic_entries,
|
||||
)
|
||||
|
||||
def add(self, name, value):
|
||||
"""
|
||||
Adds a new entry to the table
|
||||
|
||||
We reduce the table size if the entry will make the
|
||||
table size greater than maxsize.
|
||||
"""
|
||||
# We just clear the table if the entry is too big
|
||||
size = table_entry_size(name, value)
|
||||
if size > self._maxsize:
|
||||
self.dynamic_entries.clear()
|
||||
self._current_size = 0
|
||||
else:
|
||||
# Add new entry
|
||||
self.dynamic_entries.appendleft((name, value))
|
||||
self._current_size += size
|
||||
self._shrink()
|
||||
|
||||
def search(self, name, value):
|
||||
"""
|
||||
Searches the table for the entry specified by name
|
||||
and value
|
||||
|
||||
Returns one of the following:
|
||||
- ``None``, no match at all
|
||||
- ``(index, name, None)`` for partial matches on name only.
|
||||
- ``(index, name, value)`` for perfect matches.
|
||||
"""
|
||||
partial = None
|
||||
|
||||
header_name_search_result = HeaderTable.STATIC_TABLE_MAPPING.get(name)
|
||||
if header_name_search_result:
|
||||
index = header_name_search_result[1].get(value)
|
||||
if index is not None:
|
||||
return index, name, value
|
||||
else:
|
||||
partial = (header_name_search_result[0], name, None)
|
||||
|
||||
offset = HeaderTable.STATIC_TABLE_LENGTH + 1
|
||||
for i, (n, v) in enumerate(self.dynamic_entries):
|
||||
if n == name:
|
||||
if v == value:
|
||||
return i + offset, n, v
|
||||
elif partial is None:
|
||||
partial = (i + offset, n, None)
|
||||
return partial
|
||||
|
||||
@property
|
||||
def maxsize(self):
|
||||
return self._maxsize
|
||||
|
||||
@maxsize.setter
|
||||
def maxsize(self, newmax):
|
||||
newmax = int(newmax)
|
||||
log.debug("Resizing header table to %d from %d", newmax, self._maxsize)
|
||||
oldmax = self._maxsize
|
||||
self._maxsize = newmax
|
||||
self.resized = newmax != oldmax
|
||||
if newmax <= 0:
|
||||
self.dynamic_entries.clear()
|
||||
self._current_size = 0
|
||||
elif oldmax > newmax:
|
||||
self._shrink()
|
||||
|
||||
def _shrink(self):
|
||||
"""
|
||||
Shrinks the dynamic table to be at or below maxsize
|
||||
"""
|
||||
cursize = self._current_size
|
||||
while cursize > self._maxsize:
|
||||
name, value = self.dynamic_entries.pop()
|
||||
cursize -= table_entry_size(name, value)
|
||||
log.debug("Evicting %s: %s from the header table", name, value)
|
||||
self._current_size = cursize
|
||||
|
||||
|
||||
def _build_static_table_mapping():
|
||||
"""
|
||||
Build static table mapping from header name to tuple with next structure:
|
||||
(<minimal index of header>, <mapping from header value to it index>).
|
||||
|
||||
static_table_mapping used for hash searching.
|
||||
"""
|
||||
static_table_mapping = {}
|
||||
for index, (name, value) in enumerate(HeaderTable.STATIC_TABLE, 1):
|
||||
header_name_search_result = static_table_mapping.setdefault(name, (index, {}))
|
||||
header_name_search_result[1][value] = index
|
||||
return static_table_mapping
|
||||
|
||||
|
||||
HeaderTable.STATIC_TABLE_MAPPING = _build_static_table_mapping()
|
||||
Reference in New Issue
Block a user