This commit is contained in:
Untriex Programming
2021-03-17 08:57:57 +01:00
parent 339be0ccd8
commit ed6afdb5c9
3074 changed files with 423348 additions and 0 deletions

View File

@@ -0,0 +1,30 @@
# -*- coding: utf-8 -*-
"""
hpack
~~~~~
HTTP/2 header encoding for Python.
"""
from .hpack import Encoder, Decoder
from .struct import HeaderTuple, NeverIndexedHeaderTuple
from .exceptions import (
HPACKError,
HPACKDecodingError,
InvalidTableIndex,
OversizedHeaderListError,
InvalidTableSizeError
)
__all__ = [
'Encoder',
'Decoder',
'HeaderTuple',
'NeverIndexedHeaderTuple',
'HPACKError',
'HPACKDecodingError',
'InvalidTableIndex',
'OversizedHeaderListError',
'InvalidTableSizeError',
]
__version__ = '4.0.0'

View File

@@ -0,0 +1,49 @@
# -*- coding: utf-8 -*-
"""
hyper/http20/exceptions
~~~~~~~~~~~~~~~~~~~~~~~
This defines exceptions used in the HTTP/2 portion of hyper.
"""
class HPACKError(Exception):
"""
The base class for all ``hpack`` exceptions.
"""
pass
class HPACKDecodingError(HPACKError):
"""
An error has been encountered while performing HPACK decoding.
"""
pass
class InvalidTableIndex(HPACKDecodingError):
"""
An invalid table index was received.
"""
pass
class OversizedHeaderListError(HPACKDecodingError):
"""
A header list that was larger than we allow has been received. This may be
a DoS attack.
.. versionadded:: 2.3.0
"""
pass
class InvalidTableSizeError(HPACKDecodingError):
"""
An attempt was made to change the decoder table size to a value larger than
allowed, or the list was shrunk and the remote peer didn't shrink their
table size.
.. versionadded:: 3.0.0
"""
pass

View File

@@ -0,0 +1,633 @@
# -*- coding: utf-8 -*-
"""
hpack/hpack
~~~~~~~~~~~
Implements the HPACK header compression algorithm as detailed by the IETF.
"""
import logging
from .table import HeaderTable, table_entry_size
from .exceptions import (
HPACKDecodingError, OversizedHeaderListError, InvalidTableSizeError
)
from .huffman import HuffmanEncoder
from .huffman_constants import (
REQUEST_CODES, REQUEST_CODES_LENGTH
)
from .huffman_table import decode_huffman
from .struct import HeaderTuple, NeverIndexedHeaderTuple
log = logging.getLogger(__name__)
INDEX_NONE = b'\x00'
INDEX_NEVER = b'\x10'
INDEX_INCREMENTAL = b'\x40'
# Precompute 2^i for 1-8 for use in prefix calcs.
# Zero index is not used but there to save a subtraction
# as prefix numbers are not zero indexed.
_PREFIX_BIT_MAX_NUMBERS = [(2 ** i) - 1 for i in range(9)]
try: # pragma: no cover
basestring = basestring
except NameError: # pragma: no cover
basestring = (str, bytes)
# We default the maximum header list we're willing to accept to 64kB. That's a
# lot of headers, but if applications want to raise it they can do.
DEFAULT_MAX_HEADER_LIST_SIZE = 2 ** 16
def _unicode_if_needed(header, raw):
"""
Provides a header as a unicode string if raw is False, otherwise returns
it as a bytestring.
"""
name = bytes(header[0])
value = bytes(header[1])
if not raw:
name = name.decode('utf-8')
value = value.decode('utf-8')
return header.__class__(name, value)
def encode_integer(integer, prefix_bits):
"""
This encodes an integer according to the wacky integer encoding rules
defined in the HPACK spec.
"""
log.debug("Encoding %d with %d bits", integer, prefix_bits)
if integer < 0:
raise ValueError(
"Can only encode positive integers, got %s" % integer
)
if prefix_bits < 1 or prefix_bits > 8:
raise ValueError(
"Prefix bits must be between 1 and 8, got %s" % prefix_bits
)
max_number = _PREFIX_BIT_MAX_NUMBERS[prefix_bits]
if integer < max_number:
return bytearray([integer]) # Seriously?
else:
elements = [max_number]
integer -= max_number
while integer >= 128:
elements.append((integer & 127) + 128)
integer >>= 7
elements.append(integer)
return bytearray(elements)
def decode_integer(data, prefix_bits):
"""
This decodes an integer according to the wacky integer encoding rules
defined in the HPACK spec. Returns a tuple of the decoded integer and the
number of bytes that were consumed from ``data`` in order to get that
integer.
"""
if prefix_bits < 1 or prefix_bits > 8:
raise ValueError(
"Prefix bits must be between 1 and 8, got %s" % prefix_bits
)
max_number = _PREFIX_BIT_MAX_NUMBERS[prefix_bits]
index = 1
shift = 0
mask = (0xFF >> (8 - prefix_bits))
try:
number = data[0] & mask
if number == max_number:
while True:
next_byte = data[index]
index += 1
if next_byte >= 128:
number += (next_byte - 128) << shift
else:
number += next_byte << shift
break
shift += 7
except IndexError:
raise HPACKDecodingError(
"Unable to decode HPACK integer representation from %r" % data
)
log.debug("Decoded %d, consumed %d bytes", number, index)
return number, index
def _dict_to_iterable(header_dict):
"""
This converts a dictionary to an iterable of two-tuples. This is a
HPACK-specific function because it pulls "special-headers" out first and
then emits them.
"""
assert isinstance(header_dict, dict)
keys = sorted(
header_dict.keys(),
key=lambda k: not _to_bytes(k).startswith(b':')
)
for key in keys:
yield key, header_dict[key]
def _to_bytes(string):
"""
Convert string to bytes.
"""
if not isinstance(string, basestring): # pragma: no cover
string = str(string)
return string if isinstance(string, bytes) else string.encode('utf-8')
class Encoder:
"""
An HPACK encoder object. This object takes HTTP headers and emits encoded
HTTP/2 header blocks.
"""
def __init__(self):
self.header_table = HeaderTable()
self.huffman_coder = HuffmanEncoder(
REQUEST_CODES, REQUEST_CODES_LENGTH
)
self.table_size_changes = []
@property
def header_table_size(self):
"""
Controls the size of the HPACK header table.
"""
return self.header_table.maxsize
@header_table_size.setter
def header_table_size(self, value):
self.header_table.maxsize = value
if self.header_table.resized:
self.table_size_changes.append(value)
def encode(self, headers, huffman=True):
"""
Takes a set of headers and encodes them into a HPACK-encoded header
block.
:param headers: The headers to encode. Must be either an iterable of
tuples, an iterable of :class:`HeaderTuple
<hpack.HeaderTuple>`, or a ``dict``.
If an iterable of tuples, the tuples may be either
two-tuples or three-tuples. If they are two-tuples, the
tuples must be of the format ``(name, value)``. If they
are three-tuples, they must be of the format
``(name, value, sensitive)``, where ``sensitive`` is a
boolean value indicating whether the header should be
added to header tables anywhere. If not present,
``sensitive`` defaults to ``False``.
If an iterable of :class:`HeaderTuple
<hpack.HeaderTuple>`, the tuples must always be
two-tuples. Instead of using ``sensitive`` as a third
tuple entry, use :class:`NeverIndexedHeaderTuple
<hpack.NeverIndexedHeaderTuple>` to request that
the field never be indexed.
.. warning:: HTTP/2 requires that all special headers
(headers whose names begin with ``:`` characters)
appear at the *start* of the header block. While
this method will ensure that happens for ``dict``
subclasses, callers using any other iterable of
tuples **must** ensure they place their special
headers at the start of the iterable.
For efficiency reasons users should prefer to use
iterables of two-tuples: fixing the ordering of
dictionary headers is an expensive operation that
should be avoided if possible.
:param huffman: (optional) Whether to Huffman-encode any header sent as
a literal value. Except for use when debugging, it is
recommended that this be left enabled.
:returns: A bytestring containing the HPACK-encoded header block.
"""
# Transforming the headers into a header block is a procedure that can
# be modeled as a chain or pipe. First, the headers are encoded. This
# encoding can be done a number of ways. If the header name-value pair
# are already in the header table we can represent them using the
# indexed representation: the same is true if they are in the static
# table. Otherwise, a literal representation will be used.
header_block = []
# Turn the headers into a list of tuples if possible. This is the
# natural way to interact with them in HPACK. Because dictionaries are
# un-ordered, we need to make sure we grab the "special" headers first.
if isinstance(headers, dict):
headers = _dict_to_iterable(headers)
# Before we begin, if the header table size has been changed we need
# to signal all changes since last emission appropriately.
if self.header_table.resized:
header_block.append(self._encode_table_size_change())
self.header_table.resized = False
# Add each header to the header block
for header in headers:
sensitive = False
if isinstance(header, HeaderTuple):
sensitive = not header.indexable
elif len(header) > 2:
sensitive = header[2]
header = (_to_bytes(header[0]), _to_bytes(header[1]))
header_block.append(self.add(header, sensitive, huffman))
header_block = b''.join(header_block)
log.debug("Encoded header block to %s", header_block)
return header_block
def add(self, to_add, sensitive, huffman=False):
"""
This function takes a header key-value tuple and serializes it.
"""
log.debug(
"Adding %s to the header table, sensitive:%s, huffman:%s",
to_add,
sensitive,
huffman
)
name, value = to_add
# Set our indexing mode
indexbit = INDEX_INCREMENTAL if not sensitive else INDEX_NEVER
# Search for a matching header in the header table.
match = self.header_table.search(name, value)
if match is None:
# Not in the header table. Encode using the literal syntax,
# and add it to the header table.
encoded = self._encode_literal(name, value, indexbit, huffman)
if not sensitive:
self.header_table.add(name, value)
return encoded
# The header is in the table, break out the values. If we matched
# perfectly, we can use the indexed representation: otherwise we
# can use the indexed literal.
index, name, perfect = match
if perfect:
# Indexed representation.
encoded = self._encode_indexed(index)
else:
# Indexed literal. We are going to add header to the
# header table unconditionally. It is a future todo to
# filter out headers which are known to be ineffective for
# indexing since they just take space in the table and
# pushed out other valuable headers.
encoded = self._encode_indexed_literal(
index, value, indexbit, huffman
)
if not sensitive:
self.header_table.add(name, value)
return encoded
def _encode_indexed(self, index):
"""
Encodes a header using the indexed representation.
"""
field = encode_integer(index, 7)
field[0] |= 0x80 # we set the top bit
return bytes(field)
def _encode_literal(self, name, value, indexbit, huffman=False):
"""
Encodes a header with a literal name and literal value. If ``indexing``
is True, the header will be added to the header table: otherwise it
will not.
"""
if huffman:
name = self.huffman_coder.encode(name)
value = self.huffman_coder.encode(value)
name_len = encode_integer(len(name), 7)
value_len = encode_integer(len(value), 7)
if huffman:
name_len[0] |= 0x80
value_len[0] |= 0x80
return b''.join(
[indexbit, bytes(name_len), name, bytes(value_len), value]
)
def _encode_indexed_literal(self, index, value, indexbit, huffman=False):
"""
Encodes a header with an indexed name and a literal value and performs
incremental indexing.
"""
if indexbit != INDEX_INCREMENTAL:
prefix = encode_integer(index, 4)
else:
prefix = encode_integer(index, 6)
prefix[0] |= ord(indexbit)
if huffman:
value = self.huffman_coder.encode(value)
value_len = encode_integer(len(value), 7)
if huffman:
value_len[0] |= 0x80
return b''.join([bytes(prefix), bytes(value_len), value])
def _encode_table_size_change(self):
"""
Produces the encoded form of all header table size change context
updates.
"""
block = b''
for size_bytes in self.table_size_changes:
size_bytes = encode_integer(size_bytes, 5)
size_bytes[0] |= 0x20
block += bytes(size_bytes)
self.table_size_changes = []
return block
class Decoder:
"""
An HPACK decoder object.
.. versionchanged:: 2.3.0
Added ``max_header_list_size`` argument.
:param max_header_list_size: The maximum decompressed size we will allow
for any single header block. This is a protection against DoS attacks
that attempt to force the application to expand a relatively small
amount of data into a really large header list, allowing enormous
amounts of memory to be allocated.
If this amount of data is exceeded, a `OversizedHeaderListError
<hpack.OversizedHeaderListError>` exception will be raised. At this
point the connection should be shut down, as the HPACK state will no
longer be usable.
Defaults to 64kB.
:type max_header_list_size: ``int``
"""
def __init__(self, max_header_list_size=DEFAULT_MAX_HEADER_LIST_SIZE):
self.header_table = HeaderTable()
#: The maximum decompressed size we will allow for any single header
#: block. This is a protection against DoS attacks that attempt to
#: force the application to expand a relatively small amount of data
#: into a really large header list, allowing enormous amounts of memory
#: to be allocated.
#:
#: If this amount of data is exceeded, a `OversizedHeaderListError
#: <hpack.OversizedHeaderListError>` exception will be raised. At this
#: point the connection should be shut down, as the HPACK state will no
#: longer be usable.
#:
#: Defaults to 64kB.
#:
#: .. versionadded:: 2.3.0
self.max_header_list_size = max_header_list_size
#: Maximum allowed header table size.
#:
#: A HTTP/2 implementation should set this to the most recent value of
#: SETTINGS_HEADER_TABLE_SIZE that it sent *and has received an ACK
#: for*. Once this setting is set, the actual header table size will be
#: checked at the end of each decoding run and whenever it is changed,
#: to confirm that it fits in this size.
self.max_allowed_table_size = self.header_table.maxsize
@property
def header_table_size(self):
"""
Controls the size of the HPACK header table.
"""
return self.header_table.maxsize
@header_table_size.setter
def header_table_size(self, value):
self.header_table.maxsize = value
def decode(self, data, raw=False):
"""
Takes an HPACK-encoded header block and decodes it into a header set.
:param data: A bytestring representing a complete HPACK-encoded header
block.
:param raw: (optional) Whether to return the headers as tuples of raw
byte strings or to decode them as UTF-8 before returning
them. The default value is False, which returns tuples of
Unicode strings
:returns: A list of two-tuples of ``(name, value)`` representing the
HPACK-encoded headers, in the order they were decoded.
:raises HPACKDecodingError: If an error is encountered while decoding
the header block.
"""
log.debug("Decoding %s", data)
data_mem = memoryview(data)
headers = []
data_len = len(data)
inflated_size = 0
current_index = 0
while current_index < data_len:
# Work out what kind of header we're decoding.
# If the high bit is 1, it's an indexed field.
current = data[current_index]
indexed = True if current & 0x80 else False
# Otherwise, if the second-highest bit is 1 it's a field that does
# alter the header table.
literal_index = True if current & 0x40 else False
# Otherwise, if the third-highest bit is 1 it's an encoding context
# update.
encoding_update = True if current & 0x20 else False
if indexed:
header, consumed = self._decode_indexed(
data_mem[current_index:]
)
elif literal_index:
# It's a literal header that does affect the header table.
header, consumed = self._decode_literal_index(
data_mem[current_index:]
)
elif encoding_update:
# It's an update to the encoding context. These are forbidden
# in a header block after any actual header.
if headers:
raise HPACKDecodingError(
"Table size update not at the start of the block"
)
consumed = self._update_encoding_context(
data_mem[current_index:]
)
header = None
else:
# It's a literal header that does not affect the header table.
header, consumed = self._decode_literal_no_index(
data_mem[current_index:]
)
if header:
headers.append(header)
inflated_size += table_entry_size(*header)
if inflated_size > self.max_header_list_size:
raise OversizedHeaderListError(
"A header list larger than %d has been received" %
self.max_header_list_size
)
current_index += consumed
# Confirm that the table size is lower than the maximum. We do this
# here to ensure that we catch when the max has been *shrunk* and the
# remote peer hasn't actually done that.
self._assert_valid_table_size()
try:
return [_unicode_if_needed(h, raw) for h in headers]
except UnicodeDecodeError:
raise HPACKDecodingError("Unable to decode headers as UTF-8.")
def _assert_valid_table_size(self):
"""
Check that the table size set by the encoder is lower than the maximum
we expect to have.
"""
if self.header_table_size > self.max_allowed_table_size:
raise InvalidTableSizeError(
"Encoder did not shrink table size to within the max"
)
def _update_encoding_context(self, data):
"""
Handles a byte that updates the encoding context.
"""
# We've been asked to resize the header table.
new_size, consumed = decode_integer(data, 5)
if new_size > self.max_allowed_table_size:
raise InvalidTableSizeError(
"Encoder exceeded max allowable table size"
)
self.header_table_size = new_size
return consumed
def _decode_indexed(self, data):
"""
Decodes a header represented using the indexed representation.
"""
index, consumed = decode_integer(data, 7)
header = HeaderTuple(*self.header_table.get_by_index(index))
log.debug("Decoded %s, consumed %d", header, consumed)
return header, consumed
def _decode_literal_no_index(self, data):
return self._decode_literal(data, False)
def _decode_literal_index(self, data):
return self._decode_literal(data, True)
def _decode_literal(self, data, should_index):
"""
Decodes a header represented with a literal.
"""
total_consumed = 0
# When should_index is true, if the low six bits of the first byte are
# nonzero, the header name is indexed.
# When should_index is false, if the low four bits of the first byte
# are nonzero the header name is indexed.
if should_index:
indexed_name = data[0] & 0x3F
name_len = 6
not_indexable = False
else:
high_byte = data[0]
indexed_name = high_byte & 0x0F
name_len = 4
not_indexable = high_byte & 0x10
if indexed_name:
# Indexed header name.
index, consumed = decode_integer(data, name_len)
name = self.header_table.get_by_index(index)[0]
total_consumed = consumed
length = 0
else:
# Literal header name. The first byte was consumed, so we need to
# move forward.
data = data[1:]
length, consumed = decode_integer(data, 7)
name = data[consumed:consumed + length]
if len(name) != length:
raise HPACKDecodingError("Truncated header block")
if data[0] & 0x80:
name = decode_huffman(name)
total_consumed = consumed + length + 1 # Since we moved forward 1.
data = data[consumed + length:]
# The header value is definitely length-based.
length, consumed = decode_integer(data, 7)
value = data[consumed:consumed + length]
if len(value) != length:
raise HPACKDecodingError("Truncated header block")
if data[0] & 0x80:
value = decode_huffman(value)
# Updated the total consumed length.
total_consumed += length + consumed
# If we have been told never to index the header field, encode that in
# the tuple we use.
if not_indexable:
header = NeverIndexedHeaderTuple(name, value)
else:
header = HeaderTuple(name, value)
# If we've been asked to index this, add it to the header table.
if should_index:
self.header_table.add(name, value)
log.debug(
"Decoded %s, total consumed %d bytes, indexed %s",
header,
total_consumed,
should_index
)
return header, total_consumed

View File

@@ -0,0 +1,66 @@
# -*- coding: utf-8 -*-
"""
hpack/huffman_decoder
~~~~~~~~~~~~~~~~~~~~~
An implementation of a bitwise prefix tree specially built for decoding
Huffman-coded content where we already know the Huffman table.
"""
class HuffmanEncoder:
"""
Encodes a string according to the Huffman encoding table defined in the
HPACK specification.
"""
def __init__(self, huffman_code_list, huffman_code_list_lengths):
self.huffman_code_list = huffman_code_list
self.huffman_code_list_lengths = huffman_code_list_lengths
def encode(self, bytes_to_encode):
"""
Given a string of bytes, encodes them according to the HPACK Huffman
specification.
"""
# If handed the empty string, just immediately return.
if not bytes_to_encode:
return b''
final_num = 0
final_int_len = 0
# Turn each byte into its huffman code. These codes aren't necessarily
# octet aligned, so keep track of how far through an octet we are. To
# handle this cleanly, just use a single giant integer.
for byte in bytes_to_encode:
bin_int_len = self.huffman_code_list_lengths[byte]
bin_int = self.huffman_code_list[byte] & (
2 ** (bin_int_len + 1) - 1
)
final_num <<= bin_int_len
final_num |= bin_int
final_int_len += bin_int_len
# Pad out to an octet with ones.
bits_to_be_padded = (8 - (final_int_len % 8)) % 8
final_num <<= bits_to_be_padded
final_num |= (1 << bits_to_be_padded) - 1
# Convert the number to hex and strip off the leading '0x' and the
# trailing 'L', if present.
final_num = hex(final_num)[2:].rstrip('L')
# If this is odd, prepend a zero.
final_num = '0' + final_num if len(final_num) % 2 != 0 else final_num
# This number should have twice as many digits as bytes. If not, we're
# missing some leading zeroes. Work out how many bytes we want and how
# many digits we have, then add the missing zero digits to the front.
total_bytes = (final_int_len + bits_to_be_padded) // 8
expected_digits = total_bytes * 2
if len(final_num) != expected_digits:
missing_digits = expected_digits - len(final_num)
final_num = ('0' * missing_digits) + final_num
return bytes.fromhex(final_num)

View File

@@ -0,0 +1,289 @@
# -*- coding: utf-8 -*-
"""
hpack/huffman_constants
~~~~~~~~~~~~~~~~~~~~~~~
Defines the constant Huffman table. This takes up an upsetting amount of space,
but c'est la vie.
"""
# flake8: noqa
REQUEST_CODES = [
0x1ff8,
0x7fffd8,
0xfffffe2,
0xfffffe3,
0xfffffe4,
0xfffffe5,
0xfffffe6,
0xfffffe7,
0xfffffe8,
0xffffea,
0x3ffffffc,
0xfffffe9,
0xfffffea,
0x3ffffffd,
0xfffffeb,
0xfffffec,
0xfffffed,
0xfffffee,
0xfffffef,
0xffffff0,
0xffffff1,
0xffffff2,
0x3ffffffe,
0xffffff3,
0xffffff4,
0xffffff5,
0xffffff6,
0xffffff7,
0xffffff8,
0xffffff9,
0xffffffa,
0xffffffb,
0x14,
0x3f8,
0x3f9,
0xffa,
0x1ff9,
0x15,
0xf8,
0x7fa,
0x3fa,
0x3fb,
0xf9,
0x7fb,
0xfa,
0x16,
0x17,
0x18,
0x0,
0x1,
0x2,
0x19,
0x1a,
0x1b,
0x1c,
0x1d,
0x1e,
0x1f,
0x5c,
0xfb,
0x7ffc,
0x20,
0xffb,
0x3fc,
0x1ffa,
0x21,
0x5d,
0x5e,
0x5f,
0x60,
0x61,
0x62,
0x63,
0x64,
0x65,
0x66,
0x67,
0x68,
0x69,
0x6a,
0x6b,
0x6c,
0x6d,
0x6e,
0x6f,
0x70,
0x71,
0x72,
0xfc,
0x73,
0xfd,
0x1ffb,
0x7fff0,
0x1ffc,
0x3ffc,
0x22,
0x7ffd,
0x3,
0x23,
0x4,
0x24,
0x5,
0x25,
0x26,
0x27,
0x6,
0x74,
0x75,
0x28,
0x29,
0x2a,
0x7,
0x2b,
0x76,
0x2c,
0x8,
0x9,
0x2d,
0x77,
0x78,
0x79,
0x7a,
0x7b,
0x7ffe,
0x7fc,
0x3ffd,
0x1ffd,
0xffffffc,
0xfffe6,
0x3fffd2,
0xfffe7,
0xfffe8,
0x3fffd3,
0x3fffd4,
0x3fffd5,
0x7fffd9,
0x3fffd6,
0x7fffda,
0x7fffdb,
0x7fffdc,
0x7fffdd,
0x7fffde,
0xffffeb,
0x7fffdf,
0xffffec,
0xffffed,
0x3fffd7,
0x7fffe0,
0xffffee,
0x7fffe1,
0x7fffe2,
0x7fffe3,
0x7fffe4,
0x1fffdc,
0x3fffd8,
0x7fffe5,
0x3fffd9,
0x7fffe6,
0x7fffe7,
0xffffef,
0x3fffda,
0x1fffdd,
0xfffe9,
0x3fffdb,
0x3fffdc,
0x7fffe8,
0x7fffe9,
0x1fffde,
0x7fffea,
0x3fffdd,
0x3fffde,
0xfffff0,
0x1fffdf,
0x3fffdf,
0x7fffeb,
0x7fffec,
0x1fffe0,
0x1fffe1,
0x3fffe0,
0x1fffe2,
0x7fffed,
0x3fffe1,
0x7fffee,
0x7fffef,
0xfffea,
0x3fffe2,
0x3fffe3,
0x3fffe4,
0x7ffff0,
0x3fffe5,
0x3fffe6,
0x7ffff1,
0x3ffffe0,
0x3ffffe1,
0xfffeb,
0x7fff1,
0x3fffe7,
0x7ffff2,
0x3fffe8,
0x1ffffec,
0x3ffffe2,
0x3ffffe3,
0x3ffffe4,
0x7ffffde,
0x7ffffdf,
0x3ffffe5,
0xfffff1,
0x1ffffed,
0x7fff2,
0x1fffe3,
0x3ffffe6,
0x7ffffe0,
0x7ffffe1,
0x3ffffe7,
0x7ffffe2,
0xfffff2,
0x1fffe4,
0x1fffe5,
0x3ffffe8,
0x3ffffe9,
0xffffffd,
0x7ffffe3,
0x7ffffe4,
0x7ffffe5,
0xfffec,
0xfffff3,
0xfffed,
0x1fffe6,
0x3fffe9,
0x1fffe7,
0x1fffe8,
0x7ffff3,
0x3fffea,
0x3fffeb,
0x1ffffee,
0x1ffffef,
0xfffff4,
0xfffff5,
0x3ffffea,
0x7ffff4,
0x3ffffeb,
0x7ffffe6,
0x3ffffec,
0x3ffffed,
0x7ffffe7,
0x7ffffe8,
0x7ffffe9,
0x7ffffea,
0x7ffffeb,
0xffffffe,
0x7ffffec,
0x7ffffed,
0x7ffffee,
0x7ffffef,
0x7fffff0,
0x3ffffee,
0x3fffffff,
]
REQUEST_CODES_LENGTH = [
13, 23, 28, 28, 28, 28, 28, 28, 28, 24, 30, 28, 28, 30, 28, 28,
28, 28, 28, 28, 28, 28, 30, 28, 28, 28, 28, 28, 28, 28, 28, 28,
6, 10, 10, 12, 13, 6, 8, 11, 10, 10, 8, 11, 8, 6, 6, 6,
5, 5, 5, 6, 6, 6, 6, 6, 6, 6, 7, 8, 15, 6, 12, 10,
13, 6, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
7, 7, 7, 7, 7, 7, 7, 7, 8, 7, 8, 13, 19, 13, 14, 6,
15, 5, 6, 5, 6, 5, 6, 6, 6, 5, 7, 7, 6, 6, 6, 5,
6, 7, 6, 5, 5, 6, 7, 7, 7, 7, 7, 15, 11, 14, 13, 28,
20, 22, 20, 20, 22, 22, 22, 23, 22, 23, 23, 23, 23, 23, 24, 23,
24, 24, 22, 23, 24, 23, 23, 23, 23, 21, 22, 23, 22, 23, 23, 24,
22, 21, 20, 22, 22, 23, 23, 21, 23, 22, 22, 24, 21, 22, 23, 23,
21, 21, 22, 21, 23, 22, 23, 23, 20, 22, 22, 22, 23, 22, 22, 23,
26, 26, 20, 19, 22, 23, 22, 25, 26, 26, 26, 27, 27, 26, 24, 25,
19, 21, 26, 27, 27, 26, 27, 24, 21, 21, 26, 26, 28, 27, 27, 27,
20, 24, 20, 21, 22, 21, 21, 23, 22, 22, 25, 25, 24, 24, 26, 23,
26, 27, 26, 26, 27, 27, 27, 27, 27, 28, 27, 27, 27, 27, 27, 26,
30,
]

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,39 @@
# -*- coding: utf-8 -*-
"""
hpack/struct
~~~~~~~~~~~~
Contains structures for representing header fields with associated metadata.
"""
class HeaderTuple(tuple):
"""
A data structure that stores a single header field.
HTTP headers can be thought of as tuples of ``(field name, field value)``.
A single header block is a sequence of such tuples.
In HTTP/2, however, certain bits of additional information are required for
compressing these headers: in particular, whether the header field can be
safely added to the HPACK compression context.
This class stores a header that can be added to the compression context. In
all other ways it behaves exactly like a tuple.
"""
__slots__ = ()
indexable = True
def __new__(cls, *args):
return tuple.__new__(cls, args)
class NeverIndexedHeaderTuple(HeaderTuple):
"""
A data structure that stores a single header field that cannot be added to
a HTTP/2 header compression context.
"""
__slots__ = ()
indexable = False

View File

@@ -0,0 +1,235 @@
# -*- coding: utf-8 -*-
# flake8: noqa
from collections import deque
import logging
from .exceptions import InvalidTableIndex
log = logging.getLogger(__name__)
def table_entry_size(name, value):
"""
Calculates the size of a single entry
This size is mostly irrelevant to us and defined
specifically to accommodate memory management for
lower level implementations. The 32 extra bytes are
considered the "maximum" overhead that would be
required to represent each entry in the table.
See RFC7541 Section 4.1
"""
return 32 + len(name) + len(value)
class HeaderTable:
"""
Implements the combined static and dynamic header table
The name and value arguments for all the functions
should ONLY be byte strings (b'') however this is not
strictly enforced in the interface.
See RFC7541 Section 2.3
"""
#: Default maximum size of the dynamic table. See
#: RFC7540 Section 6.5.2.
DEFAULT_SIZE = 4096
#: Constant list of static headers. See RFC7541 Section
#: 2.3.1 and Appendix A
STATIC_TABLE = (
(b':authority' , b'' ), # noqa
(b':method' , b'GET' ), # noqa
(b':method' , b'POST' ), # noqa
(b':path' , b'/' ), # noqa
(b':path' , b'/index.html' ), # noqa
(b':scheme' , b'http' ), # noqa
(b':scheme' , b'https' ), # noqa
(b':status' , b'200' ), # noqa
(b':status' , b'204' ), # noqa
(b':status' , b'206' ), # noqa
(b':status' , b'304' ), # noqa
(b':status' , b'400' ), # noqa
(b':status' , b'404' ), # noqa
(b':status' , b'500' ), # noqa
(b'accept-charset' , b'' ), # noqa
(b'accept-encoding' , b'gzip, deflate'), # noqa
(b'accept-language' , b'' ), # noqa
(b'accept-ranges' , b'' ), # noqa
(b'accept' , b'' ), # noqa
(b'access-control-allow-origin' , b'' ), # noqa
(b'age' , b'' ), # noqa
(b'allow' , b'' ), # noqa
(b'authorization' , b'' ), # noqa
(b'cache-control' , b'' ), # noqa
(b'content-disposition' , b'' ), # noqa
(b'content-encoding' , b'' ), # noqa
(b'content-language' , b'' ), # noqa
(b'content-length' , b'' ), # noqa
(b'content-location' , b'' ), # noqa
(b'content-range' , b'' ), # noqa
(b'content-type' , b'' ), # noqa
(b'cookie' , b'' ), # noqa
(b'date' , b'' ), # noqa
(b'etag' , b'' ), # noqa
(b'expect' , b'' ), # noqa
(b'expires' , b'' ), # noqa
(b'from' , b'' ), # noqa
(b'host' , b'' ), # noqa
(b'if-match' , b'' ), # noqa
(b'if-modified-since' , b'' ), # noqa
(b'if-none-match' , b'' ), # noqa
(b'if-range' , b'' ), # noqa
(b'if-unmodified-since' , b'' ), # noqa
(b'last-modified' , b'' ), # noqa
(b'link' , b'' ), # noqa
(b'location' , b'' ), # noqa
(b'max-forwards' , b'' ), # noqa
(b'proxy-authenticate' , b'' ), # noqa
(b'proxy-authorization' , b'' ), # noqa
(b'range' , b'' ), # noqa
(b'referer' , b'' ), # noqa
(b'refresh' , b'' ), # noqa
(b'retry-after' , b'' ), # noqa
(b'server' , b'' ), # noqa
(b'set-cookie' , b'' ), # noqa
(b'strict-transport-security' , b'' ), # noqa
(b'transfer-encoding' , b'' ), # noqa
(b'user-agent' , b'' ), # noqa
(b'vary' , b'' ), # noqa
(b'via' , b'' ), # noqa
(b'www-authenticate' , b'' ), # noqa
) # noqa
STATIC_TABLE_LENGTH = len(STATIC_TABLE)
def __init__(self):
self._maxsize = HeaderTable.DEFAULT_SIZE
self._current_size = 0
self.resized = False
self.dynamic_entries = deque()
def get_by_index(self, index):
"""
Returns the entry specified by index
Note that the table is 1-based ie an index of 0 is
invalid. This is due to the fact that a zero value
index signals that a completely unindexed header
follows.
The entry will either be from the static table or
the dynamic table depending on the value of index.
"""
original_index = index
index -= 1
if 0 <= index:
if index < HeaderTable.STATIC_TABLE_LENGTH:
return HeaderTable.STATIC_TABLE[index]
index -= HeaderTable.STATIC_TABLE_LENGTH
if index < len(self.dynamic_entries):
return self.dynamic_entries[index]
raise InvalidTableIndex("Invalid table index %d" % original_index)
def __repr__(self):
return "HeaderTable(%d, %s, %r)" % (
self._maxsize,
self.resized,
self.dynamic_entries
)
def add(self, name, value):
"""
Adds a new entry to the table
We reduce the table size if the entry will make the
table size greater than maxsize.
"""
# We just clear the table if the entry is too big
size = table_entry_size(name, value)
if size > self._maxsize:
self.dynamic_entries.clear()
self._current_size = 0
else:
# Add new entry
self.dynamic_entries.appendleft((name, value))
self._current_size += size
self._shrink()
def search(self, name, value):
"""
Searches the table for the entry specified by name
and value
Returns one of the following:
- ``None``, no match at all
- ``(index, name, None)`` for partial matches on name only.
- ``(index, name, value)`` for perfect matches.
"""
partial = None
header_name_search_result = HeaderTable.STATIC_TABLE_MAPPING.get(name)
if header_name_search_result:
index = header_name_search_result[1].get(value)
if index is not None:
return index, name, value
else:
partial = (header_name_search_result[0], name, None)
offset = HeaderTable.STATIC_TABLE_LENGTH + 1
for (i, (n, v)) in enumerate(self.dynamic_entries):
if n == name:
if v == value:
return i + offset, n, v
elif partial is None:
partial = (i + offset, n, None)
return partial
@property
def maxsize(self):
return self._maxsize
@maxsize.setter
def maxsize(self, newmax):
newmax = int(newmax)
log.debug("Resizing header table to %d from %d", newmax, self._maxsize)
oldmax = self._maxsize
self._maxsize = newmax
self.resized = (newmax != oldmax)
if newmax <= 0:
self.dynamic_entries.clear()
self._current_size = 0
elif oldmax > newmax:
self._shrink()
def _shrink(self):
"""
Shrinks the dynamic table to be at or below maxsize
"""
cursize = self._current_size
while cursize > self._maxsize:
name, value = self.dynamic_entries.pop()
cursize -= table_entry_size(name, value)
log.debug("Evicting %s: %s from the header table", name, value)
self._current_size = cursize
def _build_static_table_mapping():
"""
Build static table mapping from header name to tuple with next structure:
(<minimal index of header>, <mapping from header value to it index>).
static_table_mapping used for hash searching.
"""
static_table_mapping = {}
for index, (name, value) in enumerate(HeaderTable.STATIC_TABLE, 1):
header_name_search_result = static_table_mapping.setdefault(name, (index, {}))
header_name_search_result[1][value] = index
return static_table_mapping
HeaderTable.STATIC_TABLE_MAPPING = _build_static_table_mapping()