aboutsummaryrefslogtreecommitdiff
path: root/.venv/lib/python3.12/site-packages/asyncpg/protocol/codecs
diff options
context:
space:
mode:
Diffstat (limited to '.venv/lib/python3.12/site-packages/asyncpg/protocol/codecs')
-rw-r--r--.venv/lib/python3.12/site-packages/asyncpg/protocol/codecs/__init__.py0
-rw-r--r--.venv/lib/python3.12/site-packages/asyncpg/protocol/codecs/array.pyx875
-rw-r--r--.venv/lib/python3.12/site-packages/asyncpg/protocol/codecs/base.pxd187
-rw-r--r--.venv/lib/python3.12/site-packages/asyncpg/protocol/codecs/base.pyx895
-rw-r--r--.venv/lib/python3.12/site-packages/asyncpg/protocol/codecs/pgproto.pyx484
-rw-r--r--.venv/lib/python3.12/site-packages/asyncpg/protocol/codecs/range.pyx207
-rw-r--r--.venv/lib/python3.12/site-packages/asyncpg/protocol/codecs/record.pyx71
-rw-r--r--.venv/lib/python3.12/site-packages/asyncpg/protocol/codecs/textutils.pyx99
8 files changed, 2818 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/asyncpg/protocol/codecs/__init__.py b/.venv/lib/python3.12/site-packages/asyncpg/protocol/codecs/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/asyncpg/protocol/codecs/__init__.py
diff --git a/.venv/lib/python3.12/site-packages/asyncpg/protocol/codecs/array.pyx b/.venv/lib/python3.12/site-packages/asyncpg/protocol/codecs/array.pyx
new file mode 100644
index 00000000..f8f9b8dd
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/asyncpg/protocol/codecs/array.pyx
@@ -0,0 +1,875 @@
+# Copyright (C) 2016-present the asyncpg authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of asyncpg and is released under
+# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0
+
+
+from collections.abc import (Iterable as IterableABC,
+ Mapping as MappingABC,
+ Sized as SizedABC)
+
+from asyncpg import exceptions
+
+
+DEF ARRAY_MAXDIM = 6 # defined in postgresql/src/includes/c.h
+
+# "NULL"
+cdef Py_UCS4 *APG_NULL = [0x004E, 0x0055, 0x004C, 0x004C, 0x0000]
+
+
+ctypedef object (*encode_func_ex)(ConnectionSettings settings,
+ WriteBuffer buf,
+ object obj,
+ const void *arg)
+
+
+ctypedef object (*decode_func_ex)(ConnectionSettings settings,
+ FRBuffer *buf,
+ const void *arg)
+
+
+cdef inline bint _is_trivial_container(object obj):
+ return cpython.PyUnicode_Check(obj) or cpython.PyBytes_Check(obj) or \
+ cpythonx.PyByteArray_Check(obj) or cpythonx.PyMemoryView_Check(obj)
+
+
+cdef inline _is_array_iterable(object obj):
+ return (
+ isinstance(obj, IterableABC) and
+ isinstance(obj, SizedABC) and
+ not _is_trivial_container(obj) and
+ not isinstance(obj, MappingABC)
+ )
+
+
+cdef inline _is_sub_array_iterable(object obj):
+ # Sub-arrays have a specialized check, because we treat
+ # nested tuples as records.
+ return _is_array_iterable(obj) and not cpython.PyTuple_Check(obj)
+
+
+cdef _get_array_shape(object obj, int32_t *dims, int32_t *ndims):
+ cdef:
+ ssize_t mylen = len(obj)
+ ssize_t elemlen = -2
+ object it
+
+ if mylen > _MAXINT32:
+ raise ValueError('too many elements in array value')
+
+ if ndims[0] > ARRAY_MAXDIM:
+ raise ValueError(
+ 'number of array dimensions ({}) exceed the maximum expected ({})'.
+ format(ndims[0], ARRAY_MAXDIM))
+
+ dims[ndims[0] - 1] = <int32_t>mylen
+
+ for elem in obj:
+ if _is_sub_array_iterable(elem):
+ if elemlen == -2:
+ elemlen = len(elem)
+ if elemlen > _MAXINT32:
+ raise ValueError('too many elements in array value')
+ ndims[0] += 1
+ _get_array_shape(elem, dims, ndims)
+ else:
+ if len(elem) != elemlen:
+ raise ValueError('non-homogeneous array')
+ else:
+ if elemlen >= 0:
+ raise ValueError('non-homogeneous array')
+ else:
+ elemlen = -1
+
+
+cdef _write_array_data(ConnectionSettings settings, object obj, int32_t ndims,
+ int32_t dim, WriteBuffer elem_data,
+ encode_func_ex encoder, const void *encoder_arg):
+ if dim < ndims - 1:
+ for item in obj:
+ _write_array_data(settings, item, ndims, dim + 1, elem_data,
+ encoder, encoder_arg)
+ else:
+ for item in obj:
+ if item is None:
+ elem_data.write_int32(-1)
+ else:
+ try:
+ encoder(settings, elem_data, item, encoder_arg)
+ except TypeError as e:
+ raise ValueError(
+ 'invalid array element: {}'.format(e.args[0])) from None
+
+
+cdef inline array_encode(ConnectionSettings settings, WriteBuffer buf,
+ object obj, uint32_t elem_oid,
+ encode_func_ex encoder, const void *encoder_arg):
+ cdef:
+ WriteBuffer elem_data
+ int32_t dims[ARRAY_MAXDIM]
+ int32_t ndims = 1
+ int32_t i
+
+ if not _is_array_iterable(obj):
+ raise TypeError(
+ 'a sized iterable container expected (got type {!r})'.format(
+ type(obj).__name__))
+
+ _get_array_shape(obj, dims, &ndims)
+
+ elem_data = WriteBuffer.new()
+
+ if ndims > 1:
+ _write_array_data(settings, obj, ndims, 0, elem_data,
+ encoder, encoder_arg)
+ else:
+ for i, item in enumerate(obj):
+ if item is None:
+ elem_data.write_int32(-1)
+ else:
+ try:
+ encoder(settings, elem_data, item, encoder_arg)
+ except TypeError as e:
+ raise ValueError(
+ 'invalid array element at index {}: {}'.format(
+ i, e.args[0])) from None
+
+ buf.write_int32(12 + 8 * ndims + elem_data.len())
+ # Number of dimensions
+ buf.write_int32(ndims)
+ # flags
+ buf.write_int32(0)
+ # element type
+ buf.write_int32(<int32_t>elem_oid)
+ # upper / lower bounds
+ for i in range(ndims):
+ buf.write_int32(dims[i])
+ buf.write_int32(1)
+ # element data
+ buf.write_buffer(elem_data)
+
+
+cdef _write_textarray_data(ConnectionSettings settings, object obj,
+ int32_t ndims, int32_t dim, WriteBuffer array_data,
+ encode_func_ex encoder, const void *encoder_arg,
+ Py_UCS4 typdelim):
+ cdef:
+ ssize_t i = 0
+ int8_t delim = <int8_t>typdelim
+ WriteBuffer elem_data
+ Py_buffer pybuf
+ const char *elem_str
+ char ch
+ ssize_t elem_len
+ ssize_t quoted_elem_len
+ bint need_quoting
+
+ array_data.write_byte(b'{')
+
+ if dim < ndims - 1:
+ for item in obj:
+ if i > 0:
+ array_data.write_byte(delim)
+ array_data.write_byte(b' ')
+ _write_textarray_data(settings, item, ndims, dim + 1, array_data,
+ encoder, encoder_arg, typdelim)
+ i += 1
+ else:
+ for item in obj:
+ elem_data = WriteBuffer.new()
+
+ if i > 0:
+ array_data.write_byte(delim)
+ array_data.write_byte(b' ')
+
+ if item is None:
+ array_data.write_bytes(b'NULL')
+ i += 1
+ continue
+ else:
+ try:
+ encoder(settings, elem_data, item, encoder_arg)
+ except TypeError as e:
+ raise ValueError(
+ 'invalid array element: {}'.format(
+ e.args[0])) from None
+
+ # element string length (first four bytes are the encoded length.)
+ elem_len = elem_data.len() - 4
+
+ if elem_len == 0:
+ # Empty string
+ array_data.write_bytes(b'""')
+ else:
+ cpython.PyObject_GetBuffer(
+ elem_data, &pybuf, cpython.PyBUF_SIMPLE)
+
+ elem_str = <const char*>(pybuf.buf) + 4
+
+ try:
+ if not apg_strcasecmp_char(elem_str, b'NULL'):
+ array_data.write_byte(b'"')
+ array_data.write_cstr(elem_str, 4)
+ array_data.write_byte(b'"')
+ else:
+ quoted_elem_len = elem_len
+ need_quoting = False
+
+ for i in range(elem_len):
+ ch = elem_str[i]
+ if ch == b'"' or ch == b'\\':
+ # Quotes and backslashes need escaping.
+ quoted_elem_len += 1
+ need_quoting = True
+ elif (ch == b'{' or ch == b'}' or ch == delim or
+ apg_ascii_isspace(<uint32_t>ch)):
+ need_quoting = True
+
+ if need_quoting:
+ array_data.write_byte(b'"')
+
+ if quoted_elem_len == elem_len:
+ array_data.write_cstr(elem_str, elem_len)
+ else:
+ # Escaping required.
+ for i in range(elem_len):
+ ch = elem_str[i]
+ if ch == b'"' or ch == b'\\':
+ array_data.write_byte(b'\\')
+ array_data.write_byte(ch)
+
+ array_data.write_byte(b'"')
+ else:
+ array_data.write_cstr(elem_str, elem_len)
+ finally:
+ cpython.PyBuffer_Release(&pybuf)
+
+ i += 1
+
+ array_data.write_byte(b'}')
+
+
+cdef inline textarray_encode(ConnectionSettings settings, WriteBuffer buf,
+ object obj, encode_func_ex encoder,
+ const void *encoder_arg, Py_UCS4 typdelim):
+ cdef:
+ WriteBuffer array_data
+ int32_t dims[ARRAY_MAXDIM]
+ int32_t ndims = 1
+ int32_t i
+
+ if not _is_array_iterable(obj):
+ raise TypeError(
+ 'a sized iterable container expected (got type {!r})'.format(
+ type(obj).__name__))
+
+ _get_array_shape(obj, dims, &ndims)
+
+ array_data = WriteBuffer.new()
+ _write_textarray_data(settings, obj, ndims, 0, array_data,
+ encoder, encoder_arg, typdelim)
+ buf.write_int32(array_data.len())
+ buf.write_buffer(array_data)
+
+
+cdef inline array_decode(ConnectionSettings settings, FRBuffer *buf,
+ decode_func_ex decoder, const void *decoder_arg):
+ cdef:
+ int32_t ndims = hton.unpack_int32(frb_read(buf, 4))
+ int32_t flags = hton.unpack_int32(frb_read(buf, 4))
+ uint32_t elem_oid = <uint32_t>hton.unpack_int32(frb_read(buf, 4))
+ list result
+ int i
+ int32_t elem_len
+ int32_t elem_count = 1
+ FRBuffer elem_buf
+ int32_t dims[ARRAY_MAXDIM]
+ Codec elem_codec
+
+ if ndims == 0:
+ return []
+
+ if ndims > ARRAY_MAXDIM:
+ raise exceptions.ProtocolError(
+ 'number of array dimensions ({}) exceed the maximum expected ({})'.
+ format(ndims, ARRAY_MAXDIM))
+ elif ndims < 0:
+ raise exceptions.ProtocolError(
+ 'unexpected array dimensions value: {}'.format(ndims))
+
+ for i in range(ndims):
+ dims[i] = hton.unpack_int32(frb_read(buf, 4))
+ if dims[i] < 0:
+ raise exceptions.ProtocolError(
+ 'unexpected array dimension size: {}'.format(dims[i]))
+ # Ignore the lower bound information
+ frb_read(buf, 4)
+
+ if ndims == 1:
+ # Fast path for flat arrays
+ elem_count = dims[0]
+ result = cpython.PyList_New(elem_count)
+
+ for i in range(elem_count):
+ elem_len = hton.unpack_int32(frb_read(buf, 4))
+ if elem_len == -1:
+ elem = None
+ else:
+ frb_slice_from(&elem_buf, buf, elem_len)
+ elem = decoder(settings, &elem_buf, decoder_arg)
+
+ cpython.Py_INCREF(elem)
+ cpython.PyList_SET_ITEM(result, i, elem)
+
+ else:
+ result = _nested_array_decode(settings, buf,
+ decoder, decoder_arg, ndims, dims,
+ &elem_buf)
+
+ return result
+
+
+cdef _nested_array_decode(ConnectionSettings settings,
+ FRBuffer *buf,
+ decode_func_ex decoder,
+ const void *decoder_arg,
+ int32_t ndims, int32_t *dims,
+ FRBuffer *elem_buf):
+
+ cdef:
+ int32_t elem_len
+ int64_t i, j
+ int64_t array_len = 1
+ object elem, stride
+ # An array of pointers to lists for each current array level.
+ void *strides[ARRAY_MAXDIM]
+ # An array of current positions at each array level.
+ int32_t indexes[ARRAY_MAXDIM]
+
+ for i in range(ndims):
+ array_len *= dims[i]
+ indexes[i] = 0
+ strides[i] = NULL
+
+ if array_len == 0:
+ # A multidimensional array with a zero-sized dimension?
+ return []
+
+ elif array_len < 0:
+ # Array length overflow
+ raise exceptions.ProtocolError('array length overflow')
+
+ for i in range(array_len):
+ # Decode the element.
+ elem_len = hton.unpack_int32(frb_read(buf, 4))
+ if elem_len == -1:
+ elem = None
+ else:
+ elem = decoder(settings,
+ frb_slice_from(elem_buf, buf, elem_len),
+ decoder_arg)
+
+ # Take an explicit reference for PyList_SET_ITEM in the below
+ # loop expects this.
+ cpython.Py_INCREF(elem)
+
+ # Iterate over array dimentions and put the element in
+ # the correctly nested sublist.
+ for j in reversed(range(ndims)):
+ if indexes[j] == 0:
+ # Allocate the list for this array level.
+ stride = cpython.PyList_New(dims[j])
+
+ strides[j] = <void*><cpython.PyObject>stride
+ # Take an explicit reference for PyList_SET_ITEM below
+ # expects this.
+ cpython.Py_INCREF(stride)
+
+ stride = <object><cpython.PyObject*>strides[j]
+ cpython.PyList_SET_ITEM(stride, indexes[j], elem)
+ indexes[j] += 1
+
+ if indexes[j] == dims[j] and j != 0:
+ # This array level is full, continue the
+ # ascent in the dimensions so that this level
+ # sublist will be appened to the parent list.
+ elem = stride
+ # Reset the index, this will cause the
+ # new list to be allocated on the next
+ # iteration on this array axis.
+ indexes[j] = 0
+ else:
+ break
+
+ stride = <object><cpython.PyObject*>strides[0]
+ # Since each element in strides has a refcount of 1,
+ # returning strides[0] will increment it to 2, so
+ # balance that.
+ cpython.Py_DECREF(stride)
+ return stride
+
+
+cdef textarray_decode(ConnectionSettings settings, FRBuffer *buf,
+ decode_func_ex decoder, const void *decoder_arg,
+ Py_UCS4 typdelim):
+ cdef:
+ Py_UCS4 *array_text
+ str s
+
+ # Make a copy of array data since we will be mutating it for
+ # the purposes of element decoding.
+ s = pgproto.text_decode(settings, buf)
+ array_text = cpythonx.PyUnicode_AsUCS4Copy(s)
+
+ try:
+ return _textarray_decode(
+ settings, array_text, decoder, decoder_arg, typdelim)
+ except ValueError as e:
+ raise exceptions.ProtocolError(
+ 'malformed array literal {!r}: {}'.format(s, e.args[0]))
+ finally:
+ cpython.PyMem_Free(array_text)
+
+
+cdef _textarray_decode(ConnectionSettings settings,
+ Py_UCS4 *array_text,
+ decode_func_ex decoder,
+ const void *decoder_arg,
+ Py_UCS4 typdelim):
+
+ cdef:
+ bytearray array_bytes
+ list result
+ list new_stride
+ Py_UCS4 *ptr
+ int32_t ndims = 0
+ int32_t ubound = 0
+ int32_t lbound = 0
+ int32_t dims[ARRAY_MAXDIM]
+ int32_t inferred_dims[ARRAY_MAXDIM]
+ int32_t inferred_ndims = 0
+ void *strides[ARRAY_MAXDIM]
+ int32_t indexes[ARRAY_MAXDIM]
+ int32_t nest_level = 0
+ int32_t item_level = 0
+ bint end_of_array = False
+
+ bint end_of_item = False
+ bint has_quoting = False
+ bint strip_spaces = False
+ bint in_quotes = False
+ Py_UCS4 *item_start
+ Py_UCS4 *item_ptr
+ Py_UCS4 *item_end
+
+ int i
+ object item
+ str item_text
+ FRBuffer item_buf
+ char *pg_item_str
+ ssize_t pg_item_len
+
+ ptr = array_text
+
+ while True:
+ while apg_ascii_isspace(ptr[0]):
+ ptr += 1
+
+ if ptr[0] != '[':
+ # Finished parsing dimensions spec.
+ break
+
+ ptr += 1 # '['
+
+ if ndims > ARRAY_MAXDIM:
+ raise ValueError(
+ 'number of array dimensions ({}) exceed the '
+ 'maximum expected ({})'.format(ndims, ARRAY_MAXDIM))
+
+ ptr = apg_parse_int32(ptr, &ubound)
+ if ptr == NULL:
+ raise ValueError('missing array dimension value')
+
+ if ptr[0] == ':':
+ ptr += 1
+ lbound = ubound
+
+ # [lower:upper] spec. We disregard the lbound for decoding.
+ ptr = apg_parse_int32(ptr, &ubound)
+ if ptr == NULL:
+ raise ValueError('missing array dimension value')
+ else:
+ lbound = 1
+
+ if ptr[0] != ']':
+ raise ValueError('missing \']\' after array dimensions')
+
+ ptr += 1 # ']'
+
+ dims[ndims] = ubound - lbound + 1
+ ndims += 1
+
+ if ndims != 0:
+ # If dimensions were given, the '=' token is expected.
+ if ptr[0] != '=':
+ raise ValueError('missing \'=\' after array dimensions')
+
+ ptr += 1 # '='
+
+ # Skip any whitespace after the '=', whitespace
+ # before was consumed in the above loop.
+ while apg_ascii_isspace(ptr[0]):
+ ptr += 1
+
+ # Infer the dimensions from the brace structure in the
+ # array literal body, and check that it matches the explicit
+ # spec. This also validates that the array literal is sane.
+ _infer_array_dims(ptr, typdelim, inferred_dims, &inferred_ndims)
+
+ if inferred_ndims != ndims:
+ raise ValueError(
+ 'specified array dimensions do not match array content')
+
+ for i in range(ndims):
+ if inferred_dims[i] != dims[i]:
+ raise ValueError(
+ 'specified array dimensions do not match array content')
+ else:
+ # Infer the dimensions from the brace structure in the array literal
+ # body. This also validates that the array literal is sane.
+ _infer_array_dims(ptr, typdelim, dims, &ndims)
+
+ while not end_of_array:
+ # We iterate over the literal character by character
+ # and modify the string in-place removing the array-specific
+ # quoting and determining the boundaries of each element.
+ end_of_item = has_quoting = in_quotes = False
+ strip_spaces = True
+
+ # Pointers to array element start, end, and the current pointer
+ # tracking the position where characters are written when
+ # escaping is folded.
+ item_start = item_end = item_ptr = ptr
+ item_level = 0
+
+ while not end_of_item:
+ if ptr[0] == '"':
+ in_quotes = not in_quotes
+ if in_quotes:
+ strip_spaces = False
+ else:
+ item_end = item_ptr
+ has_quoting = True
+
+ elif ptr[0] == '\\':
+ # Quoted character, collapse the backslash.
+ ptr += 1
+ has_quoting = True
+ item_ptr[0] = ptr[0]
+ item_ptr += 1
+ strip_spaces = False
+ item_end = item_ptr
+
+ elif in_quotes:
+ # Consume the string until we see the closing quote.
+ item_ptr[0] = ptr[0]
+ item_ptr += 1
+
+ elif ptr[0] == '{':
+ # Nesting level increase.
+ nest_level += 1
+
+ indexes[nest_level - 1] = 0
+ new_stride = cpython.PyList_New(dims[nest_level - 1])
+ strides[nest_level - 1] = \
+ <void*>(<cpython.PyObject>new_stride)
+
+ if nest_level > 1:
+ cpython.Py_INCREF(new_stride)
+ cpython.PyList_SET_ITEM(
+ <object><cpython.PyObject*>strides[nest_level - 2],
+ indexes[nest_level - 2],
+ new_stride)
+ else:
+ result = new_stride
+
+ elif ptr[0] == '}':
+ if item_level == 0:
+ # Make sure we keep track of which nesting
+ # level the item belongs to, as the loop
+ # will continue to consume closing braces
+ # until the delimiter or the end of input.
+ item_level = nest_level
+
+ nest_level -= 1
+
+ if nest_level == 0:
+ end_of_array = end_of_item = True
+
+ elif ptr[0] == typdelim:
+ # Array element delimiter,
+ end_of_item = True
+ if item_level == 0:
+ item_level = nest_level
+
+ elif apg_ascii_isspace(ptr[0]):
+ if not strip_spaces:
+ item_ptr[0] = ptr[0]
+ item_ptr += 1
+ # Ignore the leading literal whitespace.
+
+ else:
+ item_ptr[0] = ptr[0]
+ item_ptr += 1
+ strip_spaces = False
+ item_end = item_ptr
+
+ ptr += 1
+
+ # end while not end_of_item
+
+ if item_end == item_start:
+ # Empty array
+ continue
+
+ item_end[0] = '\0'
+
+ if not has_quoting and apg_strcasecmp(item_start, APG_NULL) == 0:
+ # NULL element.
+ item = None
+ else:
+ # XXX: find a way to avoid the redundant encode/decode
+ # cycle here.
+ item_text = cpythonx.PyUnicode_FromKindAndData(
+ cpythonx.PyUnicode_4BYTE_KIND,
+ <void *>item_start,
+ item_end - item_start)
+
+ # Prepare the element buffer and call the text decoder
+ # for the element type.
+ pgproto.as_pg_string_and_size(
+ settings, item_text, &pg_item_str, &pg_item_len)
+ frb_init(&item_buf, pg_item_str, pg_item_len)
+ item = decoder(settings, &item_buf, decoder_arg)
+
+ # Place the decoded element in the array.
+ cpython.Py_INCREF(item)
+ cpython.PyList_SET_ITEM(
+ <object><cpython.PyObject*>strides[item_level - 1],
+ indexes[item_level - 1],
+ item)
+
+ if nest_level > 0:
+ indexes[nest_level - 1] += 1
+
+ return result
+
+
+cdef enum _ArrayParseState:
+ APS_START = 1
+ APS_STRIDE_STARTED = 2
+ APS_STRIDE_DONE = 3
+ APS_STRIDE_DELIMITED = 4
+ APS_ELEM_STARTED = 5
+ APS_ELEM_DELIMITED = 6
+
+
+cdef _UnexpectedCharacter(const Py_UCS4 *array_text, const Py_UCS4 *ptr):
+ return ValueError('unexpected character {!r} at position {}'.format(
+ cpython.PyUnicode_FromOrdinal(<int>ptr[0]), ptr - array_text + 1))
+
+
+cdef _infer_array_dims(const Py_UCS4 *array_text,
+ Py_UCS4 typdelim,
+ int32_t *dims,
+ int32_t *ndims):
+ cdef:
+ const Py_UCS4 *ptr = array_text
+ int i
+ int nest_level = 0
+ bint end_of_array = False
+ bint end_of_item = False
+ bint in_quotes = False
+ bint array_is_empty = True
+ int stride_len[ARRAY_MAXDIM]
+ int prev_stride_len[ARRAY_MAXDIM]
+ _ArrayParseState parse_state = APS_START
+
+ for i in range(ARRAY_MAXDIM):
+ dims[i] = prev_stride_len[i] = 0
+ stride_len[i] = 1
+
+ while not end_of_array:
+ end_of_item = False
+
+ while not end_of_item:
+ if ptr[0] == '\0':
+ raise ValueError('unexpected end of string')
+
+ elif ptr[0] == '"':
+ if (parse_state not in (APS_STRIDE_STARTED,
+ APS_ELEM_DELIMITED) and
+ not (parse_state == APS_ELEM_STARTED and in_quotes)):
+ raise _UnexpectedCharacter(array_text, ptr)
+
+ in_quotes = not in_quotes
+ if in_quotes:
+ parse_state = APS_ELEM_STARTED
+ array_is_empty = False
+
+ elif ptr[0] == '\\':
+ if parse_state not in (APS_STRIDE_STARTED,
+ APS_ELEM_STARTED,
+ APS_ELEM_DELIMITED):
+ raise _UnexpectedCharacter(array_text, ptr)
+
+ parse_state = APS_ELEM_STARTED
+ array_is_empty = False
+
+ if ptr[1] != '\0':
+ ptr += 1
+ else:
+ raise ValueError('unexpected end of string')
+
+ elif in_quotes:
+ # Ignore everything inside the quotes.
+ pass
+
+ elif ptr[0] == '{':
+ if parse_state not in (APS_START,
+ APS_STRIDE_STARTED,
+ APS_STRIDE_DELIMITED):
+ raise _UnexpectedCharacter(array_text, ptr)
+
+ parse_state = APS_STRIDE_STARTED
+ if nest_level >= ARRAY_MAXDIM:
+ raise ValueError(
+ 'number of array dimensions ({}) exceed the '
+ 'maximum expected ({})'.format(
+ nest_level, ARRAY_MAXDIM))
+
+ dims[nest_level] = 0
+ nest_level += 1
+ if ndims[0] < nest_level:
+ ndims[0] = nest_level
+
+ elif ptr[0] == '}':
+ if (parse_state not in (APS_ELEM_STARTED, APS_STRIDE_DONE) and
+ not (nest_level == 1 and
+ parse_state == APS_STRIDE_STARTED)):
+ raise _UnexpectedCharacter(array_text, ptr)
+
+ parse_state = APS_STRIDE_DONE
+
+ if nest_level == 0:
+ raise _UnexpectedCharacter(array_text, ptr)
+
+ nest_level -= 1
+
+ if (prev_stride_len[nest_level] != 0 and
+ stride_len[nest_level] != prev_stride_len[nest_level]):
+ raise ValueError(
+ 'inconsistent sub-array dimensions'
+ ' at position {}'.format(
+ ptr - array_text + 1))
+
+ prev_stride_len[nest_level] = stride_len[nest_level]
+ stride_len[nest_level] = 1
+ if nest_level == 0:
+ end_of_array = end_of_item = True
+ else:
+ dims[nest_level - 1] += 1
+
+ elif ptr[0] == typdelim:
+ if parse_state not in (APS_ELEM_STARTED, APS_STRIDE_DONE):
+ raise _UnexpectedCharacter(array_text, ptr)
+
+ if parse_state == APS_STRIDE_DONE:
+ parse_state = APS_STRIDE_DELIMITED
+ else:
+ parse_state = APS_ELEM_DELIMITED
+ end_of_item = True
+ stride_len[nest_level - 1] += 1
+
+ elif not apg_ascii_isspace(ptr[0]):
+ if parse_state not in (APS_STRIDE_STARTED,
+ APS_ELEM_STARTED,
+ APS_ELEM_DELIMITED):
+ raise _UnexpectedCharacter(array_text, ptr)
+
+ parse_state = APS_ELEM_STARTED
+ array_is_empty = False
+
+ if not end_of_item:
+ ptr += 1
+
+ if not array_is_empty:
+ dims[ndims[0] - 1] += 1
+
+ ptr += 1
+
+ # only whitespace is allowed after the closing brace
+ while ptr[0] != '\0':
+ if not apg_ascii_isspace(ptr[0]):
+ raise _UnexpectedCharacter(array_text, ptr)
+
+ ptr += 1
+
+ if array_is_empty:
+ ndims[0] = 0
+
+
+cdef uint4_encode_ex(ConnectionSettings settings, WriteBuffer buf, object obj,
+ const void *arg):
+ return pgproto.uint4_encode(settings, buf, obj)
+
+
+cdef uint4_decode_ex(ConnectionSettings settings, FRBuffer *buf,
+ const void *arg):
+ return pgproto.uint4_decode(settings, buf)
+
+
+cdef arrayoid_encode(ConnectionSettings settings, WriteBuffer buf, items):
+ array_encode(settings, buf, items, OIDOID,
+ <encode_func_ex>&uint4_encode_ex, NULL)
+
+
+cdef arrayoid_decode(ConnectionSettings settings, FRBuffer *buf):
+ return array_decode(settings, buf, <decode_func_ex>&uint4_decode_ex, NULL)
+
+
+cdef text_encode_ex(ConnectionSettings settings, WriteBuffer buf, object obj,
+ const void *arg):
+ return pgproto.text_encode(settings, buf, obj)
+
+
+cdef text_decode_ex(ConnectionSettings settings, FRBuffer *buf,
+ const void *arg):
+ return pgproto.text_decode(settings, buf)
+
+
+cdef arraytext_encode(ConnectionSettings settings, WriteBuffer buf, items):
+ array_encode(settings, buf, items, TEXTOID,
+ <encode_func_ex>&text_encode_ex, NULL)
+
+
+cdef arraytext_decode(ConnectionSettings settings, FRBuffer *buf):
+ return array_decode(settings, buf, <decode_func_ex>&text_decode_ex, NULL)
+
+
+cdef init_array_codecs():
+ # oid[] and text[] are registered as core codecs
+ # to make type introspection query work
+ #
+ register_core_codec(_OIDOID,
+ <encode_func>&arrayoid_encode,
+ <decode_func>&arrayoid_decode,
+ PG_FORMAT_BINARY)
+
+ register_core_codec(_TEXTOID,
+ <encode_func>&arraytext_encode,
+ <decode_func>&arraytext_decode,
+ PG_FORMAT_BINARY)
+
+init_array_codecs()
diff --git a/.venv/lib/python3.12/site-packages/asyncpg/protocol/codecs/base.pxd b/.venv/lib/python3.12/site-packages/asyncpg/protocol/codecs/base.pxd
new file mode 100644
index 00000000..1cfed833
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/asyncpg/protocol/codecs/base.pxd
@@ -0,0 +1,187 @@
+# Copyright (C) 2016-present the asyncpg authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of asyncpg and is released under
+# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0
+
+
+ctypedef object (*encode_func)(ConnectionSettings settings,
+ WriteBuffer buf,
+ object obj)
+
+ctypedef object (*decode_func)(ConnectionSettings settings,
+ FRBuffer *buf)
+
+ctypedef object (*codec_encode_func)(Codec codec,
+ ConnectionSettings settings,
+ WriteBuffer buf,
+ object obj)
+
+ctypedef object (*codec_decode_func)(Codec codec,
+ ConnectionSettings settings,
+ FRBuffer *buf)
+
+
+cdef enum CodecType:
+ CODEC_UNDEFINED = 0
+ CODEC_C = 1
+ CODEC_PY = 2
+ CODEC_ARRAY = 3
+ CODEC_COMPOSITE = 4
+ CODEC_RANGE = 5
+ CODEC_MULTIRANGE = 6
+
+
+cdef enum ServerDataFormat:
+ PG_FORMAT_ANY = -1
+ PG_FORMAT_TEXT = 0
+ PG_FORMAT_BINARY = 1
+
+
+cdef enum ClientExchangeFormat:
+ PG_XFORMAT_OBJECT = 1
+ PG_XFORMAT_TUPLE = 2
+
+
+cdef class Codec:
+ cdef:
+ uint32_t oid
+
+ str name
+ str schema
+ str kind
+
+ CodecType type
+ ServerDataFormat format
+ ClientExchangeFormat xformat
+
+ encode_func c_encoder
+ decode_func c_decoder
+ Codec base_codec
+
+ object py_encoder
+ object py_decoder
+
+ # arrays
+ Codec element_codec
+ Py_UCS4 element_delimiter
+
+ # composite types
+ tuple element_type_oids
+ object element_names
+ object record_desc
+ list element_codecs
+
+ # Pointers to actual encoder/decoder functions for this codec
+ codec_encode_func encoder
+ codec_decode_func decoder
+
+ cdef init(self, str name, str schema, str kind,
+ CodecType type, ServerDataFormat format,
+ ClientExchangeFormat xformat,
+ encode_func c_encoder, decode_func c_decoder,
+ Codec base_codec,
+ object py_encoder, object py_decoder,
+ Codec element_codec, tuple element_type_oids,
+ object element_names, list element_codecs,
+ Py_UCS4 element_delimiter)
+
+ cdef encode_scalar(self, ConnectionSettings settings, WriteBuffer buf,
+ object obj)
+
+ cdef encode_array(self, ConnectionSettings settings, WriteBuffer buf,
+ object obj)
+
+ cdef encode_array_text(self, ConnectionSettings settings, WriteBuffer buf,
+ object obj)
+
+ cdef encode_range(self, ConnectionSettings settings, WriteBuffer buf,
+ object obj)
+
+ cdef encode_multirange(self, ConnectionSettings settings, WriteBuffer buf,
+ object obj)
+
+ cdef encode_composite(self, ConnectionSettings settings, WriteBuffer buf,
+ object obj)
+
+ cdef encode_in_python(self, ConnectionSettings settings, WriteBuffer buf,
+ object obj)
+
+ cdef decode_scalar(self, ConnectionSettings settings, FRBuffer *buf)
+
+ cdef decode_array(self, ConnectionSettings settings, FRBuffer *buf)
+
+ cdef decode_array_text(self, ConnectionSettings settings, FRBuffer *buf)
+
+ cdef decode_range(self, ConnectionSettings settings, FRBuffer *buf)
+
+ cdef decode_multirange(self, ConnectionSettings settings, FRBuffer *buf)
+
+ cdef decode_composite(self, ConnectionSettings settings, FRBuffer *buf)
+
+ cdef decode_in_python(self, ConnectionSettings settings, FRBuffer *buf)
+
+ cdef inline encode(self,
+ ConnectionSettings settings,
+ WriteBuffer buf,
+ object obj)
+
+ cdef inline decode(self, ConnectionSettings settings, FRBuffer *buf)
+
+ cdef has_encoder(self)
+ cdef has_decoder(self)
+ cdef is_binary(self)
+
+ cdef inline Codec copy(self)
+
+ @staticmethod
+ cdef Codec new_array_codec(uint32_t oid,
+ str name,
+ str schema,
+ Codec element_codec,
+ Py_UCS4 element_delimiter)
+
+ @staticmethod
+ cdef Codec new_range_codec(uint32_t oid,
+ str name,
+ str schema,
+ Codec element_codec)
+
+ @staticmethod
+ cdef Codec new_multirange_codec(uint32_t oid,
+ str name,
+ str schema,
+ Codec element_codec)
+
+ @staticmethod
+ cdef Codec new_composite_codec(uint32_t oid,
+ str name,
+ str schema,
+ ServerDataFormat format,
+ list element_codecs,
+ tuple element_type_oids,
+ object element_names)
+
+ @staticmethod
+ cdef Codec new_python_codec(uint32_t oid,
+ str name,
+ str schema,
+ str kind,
+ object encoder,
+ object decoder,
+ encode_func c_encoder,
+ decode_func c_decoder,
+ Codec base_codec,
+ ServerDataFormat format,
+ ClientExchangeFormat xformat)
+
+
+cdef class DataCodecConfig:
+ cdef:
+ dict _derived_type_codecs
+ dict _custom_type_codecs
+
+ cdef inline Codec get_codec(self, uint32_t oid, ServerDataFormat format,
+ bint ignore_custom_codec=*)
+ cdef inline Codec get_custom_codec(self, uint32_t oid,
+ ServerDataFormat format)
diff --git a/.venv/lib/python3.12/site-packages/asyncpg/protocol/codecs/base.pyx b/.venv/lib/python3.12/site-packages/asyncpg/protocol/codecs/base.pyx
new file mode 100644
index 00000000..c269e374
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/asyncpg/protocol/codecs/base.pyx
@@ -0,0 +1,895 @@
+# Copyright (C) 2016-present the asyncpg authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of asyncpg and is released under
+# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0
+
+
+from collections.abc import Mapping as MappingABC
+
+import asyncpg
+from asyncpg import exceptions
+
+
+cdef void* binary_codec_map[(MAXSUPPORTEDOID + 1) * 2]
+cdef void* text_codec_map[(MAXSUPPORTEDOID + 1) * 2]
+cdef dict EXTRA_CODECS = {}
+
+
+@cython.final
+cdef class Codec:
+
+ def __cinit__(self, uint32_t oid):
+ self.oid = oid
+ self.type = CODEC_UNDEFINED
+
+ cdef init(
+ self,
+ str name,
+ str schema,
+ str kind,
+ CodecType type,
+ ServerDataFormat format,
+ ClientExchangeFormat xformat,
+ encode_func c_encoder,
+ decode_func c_decoder,
+ Codec base_codec,
+ object py_encoder,
+ object py_decoder,
+ Codec element_codec,
+ tuple element_type_oids,
+ object element_names,
+ list element_codecs,
+ Py_UCS4 element_delimiter,
+ ):
+
+ self.name = name
+ self.schema = schema
+ self.kind = kind
+ self.type = type
+ self.format = format
+ self.xformat = xformat
+ self.c_encoder = c_encoder
+ self.c_decoder = c_decoder
+ self.base_codec = base_codec
+ self.py_encoder = py_encoder
+ self.py_decoder = py_decoder
+ self.element_codec = element_codec
+ self.element_type_oids = element_type_oids
+ self.element_codecs = element_codecs
+ self.element_delimiter = element_delimiter
+ self.element_names = element_names
+
+ if base_codec is not None:
+ if c_encoder != NULL or c_decoder != NULL:
+ raise exceptions.InternalClientError(
+ 'base_codec is mutually exclusive with c_encoder/c_decoder'
+ )
+
+ if element_names is not None:
+ self.record_desc = record.ApgRecordDesc_New(
+ element_names, tuple(element_names))
+ else:
+ self.record_desc = None
+
+ if type == CODEC_C:
+ self.encoder = <codec_encode_func>&self.encode_scalar
+ self.decoder = <codec_decode_func>&self.decode_scalar
+ elif type == CODEC_ARRAY:
+ if format == PG_FORMAT_BINARY:
+ self.encoder = <codec_encode_func>&self.encode_array
+ self.decoder = <codec_decode_func>&self.decode_array
+ else:
+ self.encoder = <codec_encode_func>&self.encode_array_text
+ self.decoder = <codec_decode_func>&self.decode_array_text
+ elif type == CODEC_RANGE:
+ if format != PG_FORMAT_BINARY:
+ raise exceptions.UnsupportedClientFeatureError(
+ 'cannot decode type "{}"."{}": text encoding of '
+ 'range types is not supported'.format(schema, name))
+ self.encoder = <codec_encode_func>&self.encode_range
+ self.decoder = <codec_decode_func>&self.decode_range
+ elif type == CODEC_MULTIRANGE:
+ if format != PG_FORMAT_BINARY:
+ raise exceptions.UnsupportedClientFeatureError(
+ 'cannot decode type "{}"."{}": text encoding of '
+ 'range types is not supported'.format(schema, name))
+ self.encoder = <codec_encode_func>&self.encode_multirange
+ self.decoder = <codec_decode_func>&self.decode_multirange
+ elif type == CODEC_COMPOSITE:
+ if format != PG_FORMAT_BINARY:
+ raise exceptions.UnsupportedClientFeatureError(
+ 'cannot decode type "{}"."{}": text encoding of '
+ 'composite types is not supported'.format(schema, name))
+ self.encoder = <codec_encode_func>&self.encode_composite
+ self.decoder = <codec_decode_func>&self.decode_composite
+ elif type == CODEC_PY:
+ self.encoder = <codec_encode_func>&self.encode_in_python
+ self.decoder = <codec_decode_func>&self.decode_in_python
+ else:
+ raise exceptions.InternalClientError(
+ 'unexpected codec type: {}'.format(type))
+
+ cdef Codec copy(self):
+ cdef Codec codec
+
+ codec = Codec(self.oid)
+ codec.init(self.name, self.schema, self.kind,
+ self.type, self.format, self.xformat,
+ self.c_encoder, self.c_decoder, self.base_codec,
+ self.py_encoder, self.py_decoder,
+ self.element_codec,
+ self.element_type_oids, self.element_names,
+ self.element_codecs, self.element_delimiter)
+
+ return codec
+
+ cdef encode_scalar(self, ConnectionSettings settings, WriteBuffer buf,
+ object obj):
+ self.c_encoder(settings, buf, obj)
+
+ cdef encode_array(self, ConnectionSettings settings, WriteBuffer buf,
+ object obj):
+ array_encode(settings, buf, obj, self.element_codec.oid,
+ codec_encode_func_ex,
+ <void*>(<cpython.PyObject>self.element_codec))
+
+ cdef encode_array_text(self, ConnectionSettings settings, WriteBuffer buf,
+ object obj):
+ return textarray_encode(settings, buf, obj,
+ codec_encode_func_ex,
+ <void*>(<cpython.PyObject>self.element_codec),
+ self.element_delimiter)
+
+ cdef encode_range(self, ConnectionSettings settings, WriteBuffer buf,
+ object obj):
+ range_encode(settings, buf, obj, self.element_codec.oid,
+ codec_encode_func_ex,
+ <void*>(<cpython.PyObject>self.element_codec))
+
+ cdef encode_multirange(self, ConnectionSettings settings, WriteBuffer buf,
+ object obj):
+ multirange_encode(settings, buf, obj, self.element_codec.oid,
+ codec_encode_func_ex,
+ <void*>(<cpython.PyObject>self.element_codec))
+
+ cdef encode_composite(self, ConnectionSettings settings, WriteBuffer buf,
+ object obj):
+ cdef:
+ WriteBuffer elem_data
+ int i
+ list elem_codecs = self.element_codecs
+ ssize_t count
+ ssize_t composite_size
+ tuple rec
+
+ if isinstance(obj, MappingABC):
+ # Input is dict-like, form a tuple
+ composite_size = len(self.element_type_oids)
+ rec = cpython.PyTuple_New(composite_size)
+
+ for i in range(composite_size):
+ cpython.Py_INCREF(None)
+ cpython.PyTuple_SET_ITEM(rec, i, None)
+
+ for field in obj:
+ try:
+ i = self.element_names[field]
+ except KeyError:
+ raise ValueError(
+ '{!r} is not a valid element of composite '
+ 'type {}'.format(field, self.name)) from None
+
+ item = obj[field]
+ cpython.Py_INCREF(item)
+ cpython.PyTuple_SET_ITEM(rec, i, item)
+
+ obj = rec
+
+ count = len(obj)
+ if count > _MAXINT32:
+ raise ValueError('too many elements in composite type record')
+
+ elem_data = WriteBuffer.new()
+ i = 0
+ for item in obj:
+ elem_data.write_int32(<int32_t>self.element_type_oids[i])
+ if item is None:
+ elem_data.write_int32(-1)
+ else:
+ (<Codec>elem_codecs[i]).encode(settings, elem_data, item)
+ i += 1
+
+ record_encode_frame(settings, buf, elem_data, <int32_t>count)
+
+ cdef encode_in_python(self, ConnectionSettings settings, WriteBuffer buf,
+ object obj):
+ data = self.py_encoder(obj)
+ if self.xformat == PG_XFORMAT_OBJECT:
+ if self.format == PG_FORMAT_BINARY:
+ pgproto.bytea_encode(settings, buf, data)
+ elif self.format == PG_FORMAT_TEXT:
+ pgproto.text_encode(settings, buf, data)
+ else:
+ raise exceptions.InternalClientError(
+ 'unexpected data format: {}'.format(self.format))
+ elif self.xformat == PG_XFORMAT_TUPLE:
+ if self.base_codec is not None:
+ self.base_codec.encode(settings, buf, data)
+ else:
+ self.c_encoder(settings, buf, data)
+ else:
+ raise exceptions.InternalClientError(
+ 'unexpected exchange format: {}'.format(self.xformat))
+
+ cdef encode(self, ConnectionSettings settings, WriteBuffer buf,
+ object obj):
+ return self.encoder(self, settings, buf, obj)
+
+ cdef decode_scalar(self, ConnectionSettings settings, FRBuffer *buf):
+ return self.c_decoder(settings, buf)
+
+ cdef decode_array(self, ConnectionSettings settings, FRBuffer *buf):
+ return array_decode(settings, buf, codec_decode_func_ex,
+ <void*>(<cpython.PyObject>self.element_codec))
+
+ cdef decode_array_text(self, ConnectionSettings settings,
+ FRBuffer *buf):
+ return textarray_decode(settings, buf, codec_decode_func_ex,
+ <void*>(<cpython.PyObject>self.element_codec),
+ self.element_delimiter)
+
+ cdef decode_range(self, ConnectionSettings settings, FRBuffer *buf):
+ return range_decode(settings, buf, codec_decode_func_ex,
+ <void*>(<cpython.PyObject>self.element_codec))
+
+ cdef decode_multirange(self, ConnectionSettings settings, FRBuffer *buf):
+ return multirange_decode(settings, buf, codec_decode_func_ex,
+ <void*>(<cpython.PyObject>self.element_codec))
+
+ cdef decode_composite(self, ConnectionSettings settings,
+ FRBuffer *buf):
+ cdef:
+ object result
+ ssize_t elem_count
+ ssize_t i
+ int32_t elem_len
+ uint32_t elem_typ
+ uint32_t received_elem_typ
+ Codec elem_codec
+ FRBuffer elem_buf
+
+ elem_count = <ssize_t><uint32_t>hton.unpack_int32(frb_read(buf, 4))
+ if elem_count != len(self.element_type_oids):
+ raise exceptions.OutdatedSchemaCacheError(
+ 'unexpected number of attributes of composite type: '
+ '{}, expected {}'
+ .format(
+ elem_count,
+ len(self.element_type_oids),
+ ),
+ schema=self.schema,
+ data_type=self.name,
+ )
+ result = record.ApgRecord_New(asyncpg.Record, self.record_desc, elem_count)
+ for i in range(elem_count):
+ elem_typ = self.element_type_oids[i]
+ received_elem_typ = <uint32_t>hton.unpack_int32(frb_read(buf, 4))
+
+ if received_elem_typ != elem_typ:
+ raise exceptions.OutdatedSchemaCacheError(
+ 'unexpected data type of composite type attribute {}: '
+ '{!r}, expected {!r}'
+ .format(
+ i,
+ BUILTIN_TYPE_OID_MAP.get(
+ received_elem_typ, received_elem_typ),
+ BUILTIN_TYPE_OID_MAP.get(
+ elem_typ, elem_typ)
+ ),
+ schema=self.schema,
+ data_type=self.name,
+ position=i,
+ )
+
+ elem_len = hton.unpack_int32(frb_read(buf, 4))
+ if elem_len == -1:
+ elem = None
+ else:
+ elem_codec = self.element_codecs[i]
+ elem = elem_codec.decode(
+ settings, frb_slice_from(&elem_buf, buf, elem_len))
+
+ cpython.Py_INCREF(elem)
+ record.ApgRecord_SET_ITEM(result, i, elem)
+
+ return result
+
+ cdef decode_in_python(self, ConnectionSettings settings,
+ FRBuffer *buf):
+ if self.xformat == PG_XFORMAT_OBJECT:
+ if self.format == PG_FORMAT_BINARY:
+ data = pgproto.bytea_decode(settings, buf)
+ elif self.format == PG_FORMAT_TEXT:
+ data = pgproto.text_decode(settings, buf)
+ else:
+ raise exceptions.InternalClientError(
+ 'unexpected data format: {}'.format(self.format))
+ elif self.xformat == PG_XFORMAT_TUPLE:
+ if self.base_codec is not None:
+ data = self.base_codec.decode(settings, buf)
+ else:
+ data = self.c_decoder(settings, buf)
+ else:
+ raise exceptions.InternalClientError(
+ 'unexpected exchange format: {}'.format(self.xformat))
+
+ return self.py_decoder(data)
+
+ cdef inline decode(self, ConnectionSettings settings, FRBuffer *buf):
+ return self.decoder(self, settings, buf)
+
+ cdef inline has_encoder(self):
+ cdef Codec elem_codec
+
+ if self.c_encoder is not NULL or self.py_encoder is not None:
+ return True
+
+ elif (
+ self.type == CODEC_ARRAY
+ or self.type == CODEC_RANGE
+ or self.type == CODEC_MULTIRANGE
+ ):
+ return self.element_codec.has_encoder()
+
+ elif self.type == CODEC_COMPOSITE:
+ for elem_codec in self.element_codecs:
+ if not elem_codec.has_encoder():
+ return False
+ return True
+
+ else:
+ return False
+
+ cdef has_decoder(self):
+ cdef Codec elem_codec
+
+ if self.c_decoder is not NULL or self.py_decoder is not None:
+ return True
+
+ elif (
+ self.type == CODEC_ARRAY
+ or self.type == CODEC_RANGE
+ or self.type == CODEC_MULTIRANGE
+ ):
+ return self.element_codec.has_decoder()
+
+ elif self.type == CODEC_COMPOSITE:
+ for elem_codec in self.element_codecs:
+ if not elem_codec.has_decoder():
+ return False
+ return True
+
+ else:
+ return False
+
+ cdef is_binary(self):
+ return self.format == PG_FORMAT_BINARY
+
+ def __repr__(self):
+ return '<Codec oid={} elem_oid={} core={}>'.format(
+ self.oid,
+ 'NA' if self.element_codec is None else self.element_codec.oid,
+ has_core_codec(self.oid))
+
+ @staticmethod
+ cdef Codec new_array_codec(uint32_t oid,
+ str name,
+ str schema,
+ Codec element_codec,
+ Py_UCS4 element_delimiter):
+ cdef Codec codec
+ codec = Codec(oid)
+ codec.init(name, schema, 'array', CODEC_ARRAY, element_codec.format,
+ PG_XFORMAT_OBJECT, NULL, NULL, None, None, None,
+ element_codec, None, None, None, element_delimiter)
+ return codec
+
+ @staticmethod
+ cdef Codec new_range_codec(uint32_t oid,
+ str name,
+ str schema,
+ Codec element_codec):
+ cdef Codec codec
+ codec = Codec(oid)
+ codec.init(name, schema, 'range', CODEC_RANGE, element_codec.format,
+ PG_XFORMAT_OBJECT, NULL, NULL, None, None, None,
+ element_codec, None, None, None, 0)
+ return codec
+
+ @staticmethod
+ cdef Codec new_multirange_codec(uint32_t oid,
+ str name,
+ str schema,
+ Codec element_codec):
+ cdef Codec codec
+ codec = Codec(oid)
+ codec.init(name, schema, 'multirange', CODEC_MULTIRANGE,
+ element_codec.format, PG_XFORMAT_OBJECT, NULL, NULL, None,
+ None, None, element_codec, None, None, None, 0)
+ return codec
+
+ @staticmethod
+ cdef Codec new_composite_codec(uint32_t oid,
+ str name,
+ str schema,
+ ServerDataFormat format,
+ list element_codecs,
+ tuple element_type_oids,
+ object element_names):
+ cdef Codec codec
+ codec = Codec(oid)
+ codec.init(name, schema, 'composite', CODEC_COMPOSITE,
+ format, PG_XFORMAT_OBJECT, NULL, NULL, None, None, None,
+ None, element_type_oids, element_names, element_codecs, 0)
+ return codec
+
+ @staticmethod
+ cdef Codec new_python_codec(uint32_t oid,
+ str name,
+ str schema,
+ str kind,
+ object encoder,
+ object decoder,
+ encode_func c_encoder,
+ decode_func c_decoder,
+ Codec base_codec,
+ ServerDataFormat format,
+ ClientExchangeFormat xformat):
+ cdef Codec codec
+ codec = Codec(oid)
+ codec.init(name, schema, kind, CODEC_PY, format, xformat,
+ c_encoder, c_decoder, base_codec, encoder, decoder,
+ None, None, None, None, 0)
+ return codec
+
+
+# Encode callback for arrays
+cdef codec_encode_func_ex(ConnectionSettings settings, WriteBuffer buf,
+ object obj, const void *arg):
+ return (<Codec>arg).encode(settings, buf, obj)
+
+
+# Decode callback for arrays
+cdef codec_decode_func_ex(ConnectionSettings settings, FRBuffer *buf,
+ const void *arg):
+ return (<Codec>arg).decode(settings, buf)
+
+
+cdef uint32_t pylong_as_oid(val) except? 0xFFFFFFFFl:
+ cdef:
+ int64_t oid = 0
+ bint overflow = False
+
+ try:
+ oid = cpython.PyLong_AsLongLong(val)
+ except OverflowError:
+ overflow = True
+
+ if overflow or (oid < 0 or oid > UINT32_MAX):
+ raise OverflowError('OID value too large: {!r}'.format(val))
+
+ return <uint32_t>val
+
+
+cdef class DataCodecConfig:
+ def __init__(self, cache_key):
+ # Codec instance cache for derived types:
+ # composites, arrays, ranges, domains and their combinations.
+ self._derived_type_codecs = {}
+ # Codec instances set up by the user for the connection.
+ self._custom_type_codecs = {}
+
+ def add_types(self, types):
+ cdef:
+ Codec elem_codec
+ list comp_elem_codecs
+ ServerDataFormat format
+ ServerDataFormat elem_format
+ bint has_text_elements
+ Py_UCS4 elem_delim
+
+ for ti in types:
+ oid = ti['oid']
+
+ if self.get_codec(oid, PG_FORMAT_ANY) is not None:
+ continue
+
+ name = ti['name']
+ schema = ti['ns']
+ array_element_oid = ti['elemtype']
+ range_subtype_oid = ti['range_subtype']
+ if ti['attrtypoids']:
+ comp_type_attrs = tuple(ti['attrtypoids'])
+ else:
+ comp_type_attrs = None
+ base_type = ti['basetype']
+
+ if array_element_oid:
+ # Array type (note, there is no separate 'kind' for arrays)
+
+ # Canonicalize type name to "elemtype[]"
+ if name.startswith('_'):
+ name = name[1:]
+ name = '{}[]'.format(name)
+
+ elem_codec = self.get_codec(array_element_oid, PG_FORMAT_ANY)
+ if elem_codec is None:
+ elem_codec = self.declare_fallback_codec(
+ array_element_oid, ti['elemtype_name'], schema)
+
+ elem_delim = <Py_UCS4>ti['elemdelim'][0]
+
+ self._derived_type_codecs[oid, elem_codec.format] = \
+ Codec.new_array_codec(
+ oid, name, schema, elem_codec, elem_delim)
+
+ elif ti['kind'] == b'c':
+ # Composite type
+
+ if not comp_type_attrs:
+ raise exceptions.InternalClientError(
+ f'type record missing field types for composite {oid}')
+
+ comp_elem_codecs = []
+ has_text_elements = False
+
+ for typoid in comp_type_attrs:
+ elem_codec = self.get_codec(typoid, PG_FORMAT_ANY)
+ if elem_codec is None:
+ raise exceptions.InternalClientError(
+ f'no codec for composite attribute type {typoid}')
+ if elem_codec.format is PG_FORMAT_TEXT:
+ has_text_elements = True
+ comp_elem_codecs.append(elem_codec)
+
+ element_names = collections.OrderedDict()
+ for i, attrname in enumerate(ti['attrnames']):
+ element_names[attrname] = i
+
+ # If at least one element is text-encoded, we must
+ # encode the whole composite as text.
+ if has_text_elements:
+ elem_format = PG_FORMAT_TEXT
+ else:
+ elem_format = PG_FORMAT_BINARY
+
+ self._derived_type_codecs[oid, elem_format] = \
+ Codec.new_composite_codec(
+ oid, name, schema, elem_format, comp_elem_codecs,
+ comp_type_attrs, element_names)
+
+ elif ti['kind'] == b'd':
+ # Domain type
+
+ if not base_type:
+ raise exceptions.InternalClientError(
+ f'type record missing base type for domain {oid}')
+
+ elem_codec = self.get_codec(base_type, PG_FORMAT_ANY)
+ if elem_codec is None:
+ elem_codec = self.declare_fallback_codec(
+ base_type, ti['basetype_name'], schema)
+
+ self._derived_type_codecs[oid, elem_codec.format] = elem_codec
+
+ elif ti['kind'] == b'r':
+ # Range type
+
+ if not range_subtype_oid:
+ raise exceptions.InternalClientError(
+ f'type record missing base type for range {oid}')
+
+ elem_codec = self.get_codec(range_subtype_oid, PG_FORMAT_ANY)
+ if elem_codec is None:
+ elem_codec = self.declare_fallback_codec(
+ range_subtype_oid, ti['range_subtype_name'], schema)
+
+ self._derived_type_codecs[oid, elem_codec.format] = \
+ Codec.new_range_codec(oid, name, schema, elem_codec)
+
+ elif ti['kind'] == b'm':
+ # Multirange type
+
+ if not range_subtype_oid:
+ raise exceptions.InternalClientError(
+ f'type record missing base type for multirange {oid}')
+
+ elem_codec = self.get_codec(range_subtype_oid, PG_FORMAT_ANY)
+ if elem_codec is None:
+ elem_codec = self.declare_fallback_codec(
+ range_subtype_oid, ti['range_subtype_name'], schema)
+
+ self._derived_type_codecs[oid, elem_codec.format] = \
+ Codec.new_multirange_codec(oid, name, schema, elem_codec)
+
+ elif ti['kind'] == b'e':
+ # Enum types are essentially text
+ self._set_builtin_type_codec(oid, name, schema, 'scalar',
+ TEXTOID, PG_FORMAT_ANY)
+ else:
+ self.declare_fallback_codec(oid, name, schema)
+
+ def add_python_codec(self, typeoid, typename, typeschema, typekind,
+ typeinfos, encoder, decoder, format, xformat):
+ cdef:
+ Codec core_codec = None
+ encode_func c_encoder = NULL
+ decode_func c_decoder = NULL
+ Codec base_codec = None
+ uint32_t oid = pylong_as_oid(typeoid)
+ bint codec_set = False
+
+ # Clear all previous overrides (this also clears type cache).
+ self.remove_python_codec(typeoid, typename, typeschema)
+
+ if typeinfos:
+ self.add_types(typeinfos)
+
+ if format == PG_FORMAT_ANY:
+ formats = (PG_FORMAT_TEXT, PG_FORMAT_BINARY)
+ else:
+ formats = (format,)
+
+ for fmt in formats:
+ if xformat == PG_XFORMAT_TUPLE:
+ if typekind == "scalar":
+ core_codec = get_core_codec(oid, fmt, xformat)
+ if core_codec is None:
+ continue
+ c_encoder = core_codec.c_encoder
+ c_decoder = core_codec.c_decoder
+ elif typekind == "composite":
+ base_codec = self.get_codec(oid, fmt)
+ if base_codec is None:
+ continue
+
+ self._custom_type_codecs[typeoid, fmt] = \
+ Codec.new_python_codec(oid, typename, typeschema, typekind,
+ encoder, decoder, c_encoder, c_decoder,
+ base_codec, fmt, xformat)
+ codec_set = True
+
+ if not codec_set:
+ raise exceptions.InterfaceError(
+ "{} type does not support the 'tuple' exchange format".format(
+ typename))
+
+ def remove_python_codec(self, typeoid, typename, typeschema):
+ for fmt in (PG_FORMAT_BINARY, PG_FORMAT_TEXT):
+ self._custom_type_codecs.pop((typeoid, fmt), None)
+ self.clear_type_cache()
+
+ def _set_builtin_type_codec(self, typeoid, typename, typeschema, typekind,
+ alias_to, format=PG_FORMAT_ANY):
+ cdef:
+ Codec codec
+ Codec target_codec
+ uint32_t oid = pylong_as_oid(typeoid)
+ uint32_t alias_oid = 0
+ bint codec_set = False
+
+ if format == PG_FORMAT_ANY:
+ formats = (PG_FORMAT_BINARY, PG_FORMAT_TEXT)
+ else:
+ formats = (format,)
+
+ if isinstance(alias_to, int):
+ alias_oid = pylong_as_oid(alias_to)
+ else:
+ alias_oid = BUILTIN_TYPE_NAME_MAP.get(alias_to, 0)
+
+ for format in formats:
+ if alias_oid != 0:
+ target_codec = self.get_codec(alias_oid, format)
+ else:
+ target_codec = get_extra_codec(alias_to, format)
+
+ if target_codec is None:
+ continue
+
+ codec = target_codec.copy()
+ codec.oid = typeoid
+ codec.name = typename
+ codec.schema = typeschema
+ codec.kind = typekind
+
+ self._custom_type_codecs[typeoid, format] = codec
+ codec_set = True
+
+ if not codec_set:
+ if format == PG_FORMAT_BINARY:
+ codec_str = 'binary'
+ elif format == PG_FORMAT_TEXT:
+ codec_str = 'text'
+ else:
+ codec_str = 'text or binary'
+
+ raise exceptions.InterfaceError(
+ f'cannot alias {typename} to {alias_to}: '
+ f'there is no {codec_str} codec for {alias_to}')
+
+ def set_builtin_type_codec(self, typeoid, typename, typeschema, typekind,
+ alias_to, format=PG_FORMAT_ANY):
+ self._set_builtin_type_codec(typeoid, typename, typeschema, typekind,
+ alias_to, format)
+ self.clear_type_cache()
+
+ def clear_type_cache(self):
+ self._derived_type_codecs.clear()
+
+ def declare_fallback_codec(self, uint32_t oid, str name, str schema):
+ cdef Codec codec
+
+ if oid <= MAXBUILTINOID:
+ # This is a BKI type, for which asyncpg has no
+ # defined codec. This should only happen for newly
+ # added builtin types, for which this version of
+ # asyncpg is lacking support.
+ #
+ raise exceptions.UnsupportedClientFeatureError(
+ f'unhandled standard data type {name!r} (OID {oid})')
+ else:
+ # This is a non-BKI type, and as such, has no
+ # stable OID, so no possibility of a builtin codec.
+ # In this case, fallback to text format. Applications
+ # can avoid this by specifying a codec for this type
+ # using Connection.set_type_codec().
+ #
+ self._set_builtin_type_codec(oid, name, schema, 'scalar',
+ TEXTOID, PG_FORMAT_TEXT)
+
+ codec = self.get_codec(oid, PG_FORMAT_TEXT)
+
+ return codec
+
+ cdef inline Codec get_codec(self, uint32_t oid, ServerDataFormat format,
+ bint ignore_custom_codec=False):
+ cdef Codec codec
+
+ if format == PG_FORMAT_ANY:
+ codec = self.get_codec(
+ oid, PG_FORMAT_BINARY, ignore_custom_codec)
+ if codec is None:
+ codec = self.get_codec(
+ oid, PG_FORMAT_TEXT, ignore_custom_codec)
+ return codec
+ else:
+ if not ignore_custom_codec:
+ codec = self.get_custom_codec(oid, PG_FORMAT_ANY)
+ if codec is not None:
+ if codec.format != format:
+ # The codec for this OID has been overridden by
+ # set_{builtin}_type_codec with a different format.
+ # We must respect that and not return a core codec.
+ return None
+ else:
+ return codec
+
+ codec = get_core_codec(oid, format)
+ if codec is not None:
+ return codec
+ else:
+ try:
+ return self._derived_type_codecs[oid, format]
+ except KeyError:
+ return None
+
+ cdef inline Codec get_custom_codec(
+ self,
+ uint32_t oid,
+ ServerDataFormat format
+ ):
+ cdef Codec codec
+
+ if format == PG_FORMAT_ANY:
+ codec = self.get_custom_codec(oid, PG_FORMAT_BINARY)
+ if codec is None:
+ codec = self.get_custom_codec(oid, PG_FORMAT_TEXT)
+ else:
+ codec = self._custom_type_codecs.get((oid, format))
+
+ return codec
+
+
+cdef inline Codec get_core_codec(
+ uint32_t oid, ServerDataFormat format,
+ ClientExchangeFormat xformat=PG_XFORMAT_OBJECT):
+ cdef:
+ void *ptr = NULL
+
+ if oid > MAXSUPPORTEDOID:
+ return None
+ if format == PG_FORMAT_BINARY:
+ ptr = binary_codec_map[oid * xformat]
+ elif format == PG_FORMAT_TEXT:
+ ptr = text_codec_map[oid * xformat]
+
+ if ptr is NULL:
+ return None
+ else:
+ return <Codec>ptr
+
+
+cdef inline Codec get_any_core_codec(
+ uint32_t oid, ServerDataFormat format,
+ ClientExchangeFormat xformat=PG_XFORMAT_OBJECT):
+ """A version of get_core_codec that accepts PG_FORMAT_ANY."""
+ cdef:
+ Codec codec
+
+ if format == PG_FORMAT_ANY:
+ codec = get_core_codec(oid, PG_FORMAT_BINARY, xformat)
+ if codec is None:
+ codec = get_core_codec(oid, PG_FORMAT_TEXT, xformat)
+ else:
+ codec = get_core_codec(oid, format, xformat)
+
+ return codec
+
+
+cdef inline int has_core_codec(uint32_t oid):
+ return binary_codec_map[oid] != NULL or text_codec_map[oid] != NULL
+
+
+cdef register_core_codec(uint32_t oid,
+ encode_func encode,
+ decode_func decode,
+ ServerDataFormat format,
+ ClientExchangeFormat xformat=PG_XFORMAT_OBJECT):
+
+ if oid > MAXSUPPORTEDOID:
+ raise exceptions.InternalClientError(
+ 'cannot register core codec for OID {}: it is greater '
+ 'than MAXSUPPORTEDOID ({})'.format(oid, MAXSUPPORTEDOID))
+
+ cdef:
+ Codec codec
+ str name
+ str kind
+
+ name = BUILTIN_TYPE_OID_MAP[oid]
+ kind = 'array' if oid in ARRAY_TYPES else 'scalar'
+
+ codec = Codec(oid)
+ codec.init(name, 'pg_catalog', kind, CODEC_C, format, xformat,
+ encode, decode, None, None, None, None, None, None, None, 0)
+ cpython.Py_INCREF(codec) # immortalize
+
+ if format == PG_FORMAT_BINARY:
+ binary_codec_map[oid * xformat] = <void*>codec
+ elif format == PG_FORMAT_TEXT:
+ text_codec_map[oid * xformat] = <void*>codec
+ else:
+ raise exceptions.InternalClientError(
+ 'invalid data format: {}'.format(format))
+
+
+cdef register_extra_codec(str name,
+ encode_func encode,
+ decode_func decode,
+ ServerDataFormat format):
+ cdef:
+ Codec codec
+ str kind
+
+ kind = 'scalar'
+
+ codec = Codec(INVALIDOID)
+ codec.init(name, None, kind, CODEC_C, format, PG_XFORMAT_OBJECT,
+ encode, decode, None, None, None, None, None, None, None, 0)
+ EXTRA_CODECS[name, format] = codec
+
+
+cdef inline Codec get_extra_codec(str name, ServerDataFormat format):
+ return EXTRA_CODECS.get((name, format))
diff --git a/.venv/lib/python3.12/site-packages/asyncpg/protocol/codecs/pgproto.pyx b/.venv/lib/python3.12/site-packages/asyncpg/protocol/codecs/pgproto.pyx
new file mode 100644
index 00000000..51d650d0
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/asyncpg/protocol/codecs/pgproto.pyx
@@ -0,0 +1,484 @@
+# Copyright (C) 2016-present the asyncpg authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of asyncpg and is released under
+# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0
+
+
+cdef init_bits_codecs():
+ register_core_codec(BITOID,
+ <encode_func>pgproto.bits_encode,
+ <decode_func>pgproto.bits_decode,
+ PG_FORMAT_BINARY)
+
+ register_core_codec(VARBITOID,
+ <encode_func>pgproto.bits_encode,
+ <decode_func>pgproto.bits_decode,
+ PG_FORMAT_BINARY)
+
+
+cdef init_bytea_codecs():
+ register_core_codec(BYTEAOID,
+ <encode_func>pgproto.bytea_encode,
+ <decode_func>pgproto.bytea_decode,
+ PG_FORMAT_BINARY)
+
+ register_core_codec(CHAROID,
+ <encode_func>pgproto.bytea_encode,
+ <decode_func>pgproto.bytea_decode,
+ PG_FORMAT_BINARY)
+
+
+cdef init_datetime_codecs():
+ register_core_codec(DATEOID,
+ <encode_func>pgproto.date_encode,
+ <decode_func>pgproto.date_decode,
+ PG_FORMAT_BINARY)
+
+ register_core_codec(DATEOID,
+ <encode_func>pgproto.date_encode_tuple,
+ <decode_func>pgproto.date_decode_tuple,
+ PG_FORMAT_BINARY,
+ PG_XFORMAT_TUPLE)
+
+ register_core_codec(TIMEOID,
+ <encode_func>pgproto.time_encode,
+ <decode_func>pgproto.time_decode,
+ PG_FORMAT_BINARY)
+
+ register_core_codec(TIMEOID,
+ <encode_func>pgproto.time_encode_tuple,
+ <decode_func>pgproto.time_decode_tuple,
+ PG_FORMAT_BINARY,
+ PG_XFORMAT_TUPLE)
+
+ register_core_codec(TIMETZOID,
+ <encode_func>pgproto.timetz_encode,
+ <decode_func>pgproto.timetz_decode,
+ PG_FORMAT_BINARY)
+
+ register_core_codec(TIMETZOID,
+ <encode_func>pgproto.timetz_encode_tuple,
+ <decode_func>pgproto.timetz_decode_tuple,
+ PG_FORMAT_BINARY,
+ PG_XFORMAT_TUPLE)
+
+ register_core_codec(TIMESTAMPOID,
+ <encode_func>pgproto.timestamp_encode,
+ <decode_func>pgproto.timestamp_decode,
+ PG_FORMAT_BINARY)
+
+ register_core_codec(TIMESTAMPOID,
+ <encode_func>pgproto.timestamp_encode_tuple,
+ <decode_func>pgproto.timestamp_decode_tuple,
+ PG_FORMAT_BINARY,
+ PG_XFORMAT_TUPLE)
+
+ register_core_codec(TIMESTAMPTZOID,
+ <encode_func>pgproto.timestamptz_encode,
+ <decode_func>pgproto.timestamptz_decode,
+ PG_FORMAT_BINARY)
+
+ register_core_codec(TIMESTAMPTZOID,
+ <encode_func>pgproto.timestamp_encode_tuple,
+ <decode_func>pgproto.timestamp_decode_tuple,
+ PG_FORMAT_BINARY,
+ PG_XFORMAT_TUPLE)
+
+ register_core_codec(INTERVALOID,
+ <encode_func>pgproto.interval_encode,
+ <decode_func>pgproto.interval_decode,
+ PG_FORMAT_BINARY)
+
+ register_core_codec(INTERVALOID,
+ <encode_func>pgproto.interval_encode_tuple,
+ <decode_func>pgproto.interval_decode_tuple,
+ PG_FORMAT_BINARY,
+ PG_XFORMAT_TUPLE)
+
+ # For obsolete abstime/reltime/tinterval, we do not bother to
+ # interpret the value, and simply return and pass it as text.
+ #
+ register_core_codec(ABSTIMEOID,
+ <encode_func>pgproto.text_encode,
+ <decode_func>pgproto.text_decode,
+ PG_FORMAT_TEXT)
+
+ register_core_codec(RELTIMEOID,
+ <encode_func>pgproto.text_encode,
+ <decode_func>pgproto.text_decode,
+ PG_FORMAT_TEXT)
+
+ register_core_codec(TINTERVALOID,
+ <encode_func>pgproto.text_encode,
+ <decode_func>pgproto.text_decode,
+ PG_FORMAT_TEXT)
+
+
+cdef init_float_codecs():
+ register_core_codec(FLOAT4OID,
+ <encode_func>pgproto.float4_encode,
+ <decode_func>pgproto.float4_decode,
+ PG_FORMAT_BINARY)
+
+ register_core_codec(FLOAT8OID,
+ <encode_func>pgproto.float8_encode,
+ <decode_func>pgproto.float8_decode,
+ PG_FORMAT_BINARY)
+
+
+cdef init_geometry_codecs():
+ register_core_codec(BOXOID,
+ <encode_func>pgproto.box_encode,
+ <decode_func>pgproto.box_decode,
+ PG_FORMAT_BINARY)
+
+ register_core_codec(LINEOID,
+ <encode_func>pgproto.line_encode,
+ <decode_func>pgproto.line_decode,
+ PG_FORMAT_BINARY)
+
+ register_core_codec(LSEGOID,
+ <encode_func>pgproto.lseg_encode,
+ <decode_func>pgproto.lseg_decode,
+ PG_FORMAT_BINARY)
+
+ register_core_codec(POINTOID,
+ <encode_func>pgproto.point_encode,
+ <decode_func>pgproto.point_decode,
+ PG_FORMAT_BINARY)
+
+ register_core_codec(PATHOID,
+ <encode_func>pgproto.path_encode,
+ <decode_func>pgproto.path_decode,
+ PG_FORMAT_BINARY)
+
+ register_core_codec(POLYGONOID,
+ <encode_func>pgproto.poly_encode,
+ <decode_func>pgproto.poly_decode,
+ PG_FORMAT_BINARY)
+
+ register_core_codec(CIRCLEOID,
+ <encode_func>pgproto.circle_encode,
+ <decode_func>pgproto.circle_decode,
+ PG_FORMAT_BINARY)
+
+
+cdef init_hstore_codecs():
+ register_extra_codec('pg_contrib.hstore',
+ <encode_func>pgproto.hstore_encode,
+ <decode_func>pgproto.hstore_decode,
+ PG_FORMAT_BINARY)
+
+
+cdef init_json_codecs():
+ register_core_codec(JSONOID,
+ <encode_func>pgproto.text_encode,
+ <decode_func>pgproto.text_decode,
+ PG_FORMAT_BINARY)
+ register_core_codec(JSONBOID,
+ <encode_func>pgproto.jsonb_encode,
+ <decode_func>pgproto.jsonb_decode,
+ PG_FORMAT_BINARY)
+ register_core_codec(JSONPATHOID,
+ <encode_func>pgproto.jsonpath_encode,
+ <decode_func>pgproto.jsonpath_decode,
+ PG_FORMAT_BINARY)
+
+
+cdef init_int_codecs():
+
+ register_core_codec(BOOLOID,
+ <encode_func>pgproto.bool_encode,
+ <decode_func>pgproto.bool_decode,
+ PG_FORMAT_BINARY)
+
+ register_core_codec(INT2OID,
+ <encode_func>pgproto.int2_encode,
+ <decode_func>pgproto.int2_decode,
+ PG_FORMAT_BINARY)
+
+ register_core_codec(INT4OID,
+ <encode_func>pgproto.int4_encode,
+ <decode_func>pgproto.int4_decode,
+ PG_FORMAT_BINARY)
+
+ register_core_codec(INT8OID,
+ <encode_func>pgproto.int8_encode,
+ <decode_func>pgproto.int8_decode,
+ PG_FORMAT_BINARY)
+
+
+cdef init_pseudo_codecs():
+ # Void type is returned by SELECT void_returning_function()
+ register_core_codec(VOIDOID,
+ <encode_func>pgproto.void_encode,
+ <decode_func>pgproto.void_decode,
+ PG_FORMAT_BINARY)
+
+ # Unknown type, always decoded as text
+ register_core_codec(UNKNOWNOID,
+ <encode_func>pgproto.text_encode,
+ <decode_func>pgproto.text_decode,
+ PG_FORMAT_TEXT)
+
+ # OID and friends
+ oid_types = [
+ OIDOID, XIDOID, CIDOID
+ ]
+
+ for oid_type in oid_types:
+ register_core_codec(oid_type,
+ <encode_func>pgproto.uint4_encode,
+ <decode_func>pgproto.uint4_decode,
+ PG_FORMAT_BINARY)
+
+ # 64-bit OID types
+ oid8_types = [
+ XID8OID,
+ ]
+
+ for oid_type in oid8_types:
+ register_core_codec(oid_type,
+ <encode_func>pgproto.uint8_encode,
+ <decode_func>pgproto.uint8_decode,
+ PG_FORMAT_BINARY)
+
+ # reg* types -- these are really system catalog OIDs, but
+ # allow the catalog object name as an input. We could just
+ # decode these as OIDs, but handling them as text seems more
+ # useful.
+ #
+ reg_types = [
+ REGPROCOID, REGPROCEDUREOID, REGOPEROID, REGOPERATOROID,
+ REGCLASSOID, REGTYPEOID, REGCONFIGOID, REGDICTIONARYOID,
+ REGNAMESPACEOID, REGROLEOID, REFCURSOROID, REGCOLLATIONOID,
+ ]
+
+ for reg_type in reg_types:
+ register_core_codec(reg_type,
+ <encode_func>pgproto.text_encode,
+ <decode_func>pgproto.text_decode,
+ PG_FORMAT_TEXT)
+
+ # cstring type is used by Postgres' I/O functions
+ register_core_codec(CSTRINGOID,
+ <encode_func>pgproto.text_encode,
+ <decode_func>pgproto.text_decode,
+ PG_FORMAT_BINARY)
+
+ # various system pseudotypes with no I/O
+ no_io_types = [
+ ANYOID, TRIGGEROID, EVENT_TRIGGEROID, LANGUAGE_HANDLEROID,
+ FDW_HANDLEROID, TSM_HANDLEROID, INTERNALOID, OPAQUEOID,
+ ANYELEMENTOID, ANYNONARRAYOID, ANYCOMPATIBLEOID,
+ ANYCOMPATIBLEARRAYOID, ANYCOMPATIBLENONARRAYOID,
+ ANYCOMPATIBLERANGEOID, ANYCOMPATIBLEMULTIRANGEOID,
+ ANYRANGEOID, ANYMULTIRANGEOID, ANYARRAYOID,
+ PG_DDL_COMMANDOID, INDEX_AM_HANDLEROID, TABLE_AM_HANDLEROID,
+ ]
+
+ register_core_codec(ANYENUMOID,
+ NULL,
+ <decode_func>pgproto.text_decode,
+ PG_FORMAT_TEXT)
+
+ for no_io_type in no_io_types:
+ register_core_codec(no_io_type,
+ NULL,
+ NULL,
+ PG_FORMAT_BINARY)
+
+ # ACL specification string
+ register_core_codec(ACLITEMOID,
+ <encode_func>pgproto.text_encode,
+ <decode_func>pgproto.text_decode,
+ PG_FORMAT_TEXT)
+
+ # Postgres' serialized expression tree type
+ register_core_codec(PG_NODE_TREEOID,
+ NULL,
+ <decode_func>pgproto.text_decode,
+ PG_FORMAT_TEXT)
+
+ # pg_lsn type -- a pointer to a location in the XLOG.
+ register_core_codec(PG_LSNOID,
+ <encode_func>pgproto.int8_encode,
+ <decode_func>pgproto.int8_decode,
+ PG_FORMAT_BINARY)
+
+ register_core_codec(SMGROID,
+ <encode_func>pgproto.text_encode,
+ <decode_func>pgproto.text_decode,
+ PG_FORMAT_TEXT)
+
+ # pg_dependencies and pg_ndistinct are special types
+ # used in pg_statistic_ext columns.
+ register_core_codec(PG_DEPENDENCIESOID,
+ <encode_func>pgproto.text_encode,
+ <decode_func>pgproto.text_decode,
+ PG_FORMAT_TEXT)
+
+ register_core_codec(PG_NDISTINCTOID,
+ <encode_func>pgproto.text_encode,
+ <decode_func>pgproto.text_decode,
+ PG_FORMAT_TEXT)
+
+ # pg_mcv_list is a special type used in pg_statistic_ext_data
+ # system catalog
+ register_core_codec(PG_MCV_LISTOID,
+ <encode_func>pgproto.bytea_encode,
+ <decode_func>pgproto.bytea_decode,
+ PG_FORMAT_BINARY)
+
+ # These two are internal to BRIN index support and are unlikely
+ # to be sent, but since I/O functions for these exist, add decoders
+ # nonetheless.
+ register_core_codec(PG_BRIN_BLOOM_SUMMARYOID,
+ NULL,
+ <decode_func>pgproto.bytea_decode,
+ PG_FORMAT_BINARY)
+
+ register_core_codec(PG_BRIN_MINMAX_MULTI_SUMMARYOID,
+ NULL,
+ <decode_func>pgproto.bytea_decode,
+ PG_FORMAT_BINARY)
+
+
+cdef init_text_codecs():
+ textoids = [
+ NAMEOID,
+ BPCHAROID,
+ VARCHAROID,
+ TEXTOID,
+ XMLOID
+ ]
+
+ for oid in textoids:
+ register_core_codec(oid,
+ <encode_func>pgproto.text_encode,
+ <decode_func>pgproto.text_decode,
+ PG_FORMAT_BINARY)
+
+ register_core_codec(oid,
+ <encode_func>pgproto.text_encode,
+ <decode_func>pgproto.text_decode,
+ PG_FORMAT_TEXT)
+
+
+cdef init_tid_codecs():
+ register_core_codec(TIDOID,
+ <encode_func>pgproto.tid_encode,
+ <decode_func>pgproto.tid_decode,
+ PG_FORMAT_BINARY)
+
+
+cdef init_txid_codecs():
+ register_core_codec(TXID_SNAPSHOTOID,
+ <encode_func>pgproto.pg_snapshot_encode,
+ <decode_func>pgproto.pg_snapshot_decode,
+ PG_FORMAT_BINARY)
+
+ register_core_codec(PG_SNAPSHOTOID,
+ <encode_func>pgproto.pg_snapshot_encode,
+ <decode_func>pgproto.pg_snapshot_decode,
+ PG_FORMAT_BINARY)
+
+
+cdef init_tsearch_codecs():
+ ts_oids = [
+ TSQUERYOID,
+ TSVECTOROID,
+ ]
+
+ for oid in ts_oids:
+ register_core_codec(oid,
+ <encode_func>pgproto.text_encode,
+ <decode_func>pgproto.text_decode,
+ PG_FORMAT_TEXT)
+
+ register_core_codec(GTSVECTOROID,
+ NULL,
+ <decode_func>pgproto.text_decode,
+ PG_FORMAT_TEXT)
+
+
+cdef init_uuid_codecs():
+ register_core_codec(UUIDOID,
+ <encode_func>pgproto.uuid_encode,
+ <decode_func>pgproto.uuid_decode,
+ PG_FORMAT_BINARY)
+
+
+cdef init_numeric_codecs():
+ register_core_codec(NUMERICOID,
+ <encode_func>pgproto.numeric_encode_text,
+ <decode_func>pgproto.numeric_decode_text,
+ PG_FORMAT_TEXT)
+
+ register_core_codec(NUMERICOID,
+ <encode_func>pgproto.numeric_encode_binary,
+ <decode_func>pgproto.numeric_decode_binary,
+ PG_FORMAT_BINARY)
+
+
+cdef init_network_codecs():
+ register_core_codec(CIDROID,
+ <encode_func>pgproto.cidr_encode,
+ <decode_func>pgproto.cidr_decode,
+ PG_FORMAT_BINARY)
+
+ register_core_codec(INETOID,
+ <encode_func>pgproto.inet_encode,
+ <decode_func>pgproto.inet_decode,
+ PG_FORMAT_BINARY)
+
+ register_core_codec(MACADDROID,
+ <encode_func>pgproto.text_encode,
+ <decode_func>pgproto.text_decode,
+ PG_FORMAT_TEXT)
+
+ register_core_codec(MACADDR8OID,
+ <encode_func>pgproto.text_encode,
+ <decode_func>pgproto.text_decode,
+ PG_FORMAT_TEXT)
+
+
+cdef init_monetary_codecs():
+ moneyoids = [
+ MONEYOID,
+ ]
+
+ for oid in moneyoids:
+ register_core_codec(oid,
+ <encode_func>pgproto.text_encode,
+ <decode_func>pgproto.text_decode,
+ PG_FORMAT_TEXT)
+
+
+cdef init_all_pgproto_codecs():
+ # Builtin types, in lexicographical order.
+ init_bits_codecs()
+ init_bytea_codecs()
+ init_datetime_codecs()
+ init_float_codecs()
+ init_geometry_codecs()
+ init_int_codecs()
+ init_json_codecs()
+ init_monetary_codecs()
+ init_network_codecs()
+ init_numeric_codecs()
+ init_text_codecs()
+ init_tid_codecs()
+ init_tsearch_codecs()
+ init_txid_codecs()
+ init_uuid_codecs()
+
+ # Various pseudotypes and system types
+ init_pseudo_codecs()
+
+ # contrib
+ init_hstore_codecs()
+
+
+init_all_pgproto_codecs()
diff --git a/.venv/lib/python3.12/site-packages/asyncpg/protocol/codecs/range.pyx b/.venv/lib/python3.12/site-packages/asyncpg/protocol/codecs/range.pyx
new file mode 100644
index 00000000..1038c18d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/asyncpg/protocol/codecs/range.pyx
@@ -0,0 +1,207 @@
+# Copyright (C) 2016-present the asyncpg authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of asyncpg and is released under
+# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0
+
+
+from asyncpg import types as apg_types
+
+from collections.abc import Sequence as SequenceABC
+
+# defined in postgresql/src/include/utils/rangetypes.h
+DEF RANGE_EMPTY = 0x01 # range is empty
+DEF RANGE_LB_INC = 0x02 # lower bound is inclusive
+DEF RANGE_UB_INC = 0x04 # upper bound is inclusive
+DEF RANGE_LB_INF = 0x08 # lower bound is -infinity
+DEF RANGE_UB_INF = 0x10 # upper bound is +infinity
+
+
+cdef enum _RangeArgumentType:
+ _RANGE_ARGUMENT_INVALID = 0
+ _RANGE_ARGUMENT_TUPLE = 1
+ _RANGE_ARGUMENT_RANGE = 2
+
+
+cdef inline bint _range_has_lbound(uint8_t flags):
+ return not (flags & (RANGE_EMPTY | RANGE_LB_INF))
+
+
+cdef inline bint _range_has_ubound(uint8_t flags):
+ return not (flags & (RANGE_EMPTY | RANGE_UB_INF))
+
+
+cdef inline _RangeArgumentType _range_type(object obj):
+ if cpython.PyTuple_Check(obj) or cpython.PyList_Check(obj):
+ return _RANGE_ARGUMENT_TUPLE
+ elif isinstance(obj, apg_types.Range):
+ return _RANGE_ARGUMENT_RANGE
+ else:
+ return _RANGE_ARGUMENT_INVALID
+
+
+cdef range_encode(ConnectionSettings settings, WriteBuffer buf,
+ object obj, uint32_t elem_oid,
+ encode_func_ex encoder, const void *encoder_arg):
+ cdef:
+ ssize_t obj_len
+ uint8_t flags = 0
+ object lower = None
+ object upper = None
+ WriteBuffer bounds_data = WriteBuffer.new()
+ _RangeArgumentType arg_type = _range_type(obj)
+
+ if arg_type == _RANGE_ARGUMENT_INVALID:
+ raise TypeError(
+ 'list, tuple or Range object expected (got type {})'.format(
+ type(obj)))
+
+ elif arg_type == _RANGE_ARGUMENT_TUPLE:
+ obj_len = len(obj)
+ if obj_len == 2:
+ lower = obj[0]
+ upper = obj[1]
+
+ if lower is None:
+ flags |= RANGE_LB_INF
+
+ if upper is None:
+ flags |= RANGE_UB_INF
+
+ flags |= RANGE_LB_INC | RANGE_UB_INC
+
+ elif obj_len == 1:
+ lower = obj[0]
+ flags |= RANGE_LB_INC | RANGE_UB_INF
+
+ elif obj_len == 0:
+ flags |= RANGE_EMPTY
+
+ else:
+ raise ValueError(
+ 'expected 0, 1 or 2 elements in range (got {})'.format(
+ obj_len))
+
+ else:
+ if obj.isempty:
+ flags |= RANGE_EMPTY
+ else:
+ lower = obj.lower
+ upper = obj.upper
+
+ if obj.lower_inc:
+ flags |= RANGE_LB_INC
+ elif lower is None:
+ flags |= RANGE_LB_INF
+
+ if obj.upper_inc:
+ flags |= RANGE_UB_INC
+ elif upper is None:
+ flags |= RANGE_UB_INF
+
+ if _range_has_lbound(flags):
+ encoder(settings, bounds_data, lower, encoder_arg)
+
+ if _range_has_ubound(flags):
+ encoder(settings, bounds_data, upper, encoder_arg)
+
+ buf.write_int32(1 + bounds_data.len())
+ buf.write_byte(<int8_t>flags)
+ buf.write_buffer(bounds_data)
+
+
+cdef range_decode(ConnectionSettings settings, FRBuffer *buf,
+ decode_func_ex decoder, const void *decoder_arg):
+ cdef:
+ uint8_t flags = <uint8_t>frb_read(buf, 1)[0]
+ int32_t bound_len
+ object lower = None
+ object upper = None
+ FRBuffer bound_buf
+
+ if _range_has_lbound(flags):
+ bound_len = hton.unpack_int32(frb_read(buf, 4))
+ if bound_len == -1:
+ lower = None
+ else:
+ frb_slice_from(&bound_buf, buf, bound_len)
+ lower = decoder(settings, &bound_buf, decoder_arg)
+
+ if _range_has_ubound(flags):
+ bound_len = hton.unpack_int32(frb_read(buf, 4))
+ if bound_len == -1:
+ upper = None
+ else:
+ frb_slice_from(&bound_buf, buf, bound_len)
+ upper = decoder(settings, &bound_buf, decoder_arg)
+
+ return apg_types.Range(lower=lower, upper=upper,
+ lower_inc=(flags & RANGE_LB_INC) != 0,
+ upper_inc=(flags & RANGE_UB_INC) != 0,
+ empty=(flags & RANGE_EMPTY) != 0)
+
+
+cdef multirange_encode(ConnectionSettings settings, WriteBuffer buf,
+ object obj, uint32_t elem_oid,
+ encode_func_ex encoder, const void *encoder_arg):
+ cdef:
+ WriteBuffer elem_data
+ ssize_t elem_data_len
+ ssize_t elem_count
+
+ if not isinstance(obj, SequenceABC):
+ raise TypeError(
+ 'expected a sequence (got type {!r})'.format(type(obj).__name__)
+ )
+
+ elem_data = WriteBuffer.new()
+
+ for elem in obj:
+ range_encode(settings, elem_data, elem, elem_oid, encoder, encoder_arg)
+
+ elem_count = len(obj)
+ if elem_count > INT32_MAX:
+ raise OverflowError(f'too many elements in multirange value')
+
+ elem_data_len = elem_data.len()
+ if elem_data_len > INT32_MAX - 4:
+ raise OverflowError(
+ f'size of encoded multirange datum exceeds the maximum allowed'
+ f' {INT32_MAX - 4} bytes')
+
+ # Datum length
+ buf.write_int32(4 + <int32_t>elem_data_len)
+ # Number of elements in multirange
+ buf.write_int32(<int32_t>elem_count)
+ buf.write_buffer(elem_data)
+
+
+cdef multirange_decode(ConnectionSettings settings, FRBuffer *buf,
+ decode_func_ex decoder, const void *decoder_arg):
+ cdef:
+ int32_t nelems = hton.unpack_int32(frb_read(buf, 4))
+ FRBuffer elem_buf
+ int32_t elem_len
+ int i
+ list result
+
+ if nelems == 0:
+ return []
+
+ if nelems < 0:
+ raise exceptions.ProtocolError(
+ 'unexpected multirange size value: {}'.format(nelems))
+
+ result = cpython.PyList_New(nelems)
+ for i in range(nelems):
+ elem_len = hton.unpack_int32(frb_read(buf, 4))
+ if elem_len == -1:
+ raise exceptions.ProtocolError(
+ 'unexpected NULL element in multirange value')
+ else:
+ frb_slice_from(&elem_buf, buf, elem_len)
+ elem = range_decode(settings, &elem_buf, decoder, decoder_arg)
+ cpython.Py_INCREF(elem)
+ cpython.PyList_SET_ITEM(result, i, elem)
+
+ return result
diff --git a/.venv/lib/python3.12/site-packages/asyncpg/protocol/codecs/record.pyx b/.venv/lib/python3.12/site-packages/asyncpg/protocol/codecs/record.pyx
new file mode 100644
index 00000000..6446f2da
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/asyncpg/protocol/codecs/record.pyx
@@ -0,0 +1,71 @@
+# Copyright (C) 2016-present the asyncpg authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of asyncpg and is released under
+# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0
+
+
+from asyncpg import exceptions
+
+
+cdef inline record_encode_frame(ConnectionSettings settings, WriteBuffer buf,
+ WriteBuffer elem_data, int32_t elem_count):
+ buf.write_int32(4 + elem_data.len())
+ # attribute count
+ buf.write_int32(elem_count)
+ # encoded attribute data
+ buf.write_buffer(elem_data)
+
+
+cdef anonymous_record_decode(ConnectionSettings settings, FRBuffer *buf):
+ cdef:
+ tuple result
+ ssize_t elem_count
+ ssize_t i
+ int32_t elem_len
+ uint32_t elem_typ
+ Codec elem_codec
+ FRBuffer elem_buf
+
+ elem_count = <ssize_t><uint32_t>hton.unpack_int32(frb_read(buf, 4))
+ result = cpython.PyTuple_New(elem_count)
+
+ for i in range(elem_count):
+ elem_typ = <uint32_t>hton.unpack_int32(frb_read(buf, 4))
+ elem_len = hton.unpack_int32(frb_read(buf, 4))
+
+ if elem_len == -1:
+ elem = None
+ else:
+ elem_codec = settings.get_data_codec(elem_typ)
+ if elem_codec is None or not elem_codec.has_decoder():
+ raise exceptions.InternalClientError(
+ 'no decoder for composite type element in '
+ 'position {} of type OID {}'.format(i, elem_typ))
+ elem = elem_codec.decode(settings,
+ frb_slice_from(&elem_buf, buf, elem_len))
+
+ cpython.Py_INCREF(elem)
+ cpython.PyTuple_SET_ITEM(result, i, elem)
+
+ return result
+
+
+cdef anonymous_record_encode(ConnectionSettings settings, WriteBuffer buf, obj):
+ raise exceptions.UnsupportedClientFeatureError(
+ 'input of anonymous composite types is not supported',
+ hint=(
+ 'Consider declaring an explicit composite type and '
+ 'using it to cast the argument.'
+ ),
+ detail='PostgreSQL does not implement anonymous composite type input.'
+ )
+
+
+cdef init_record_codecs():
+ register_core_codec(RECORDOID,
+ <encode_func>anonymous_record_encode,
+ <decode_func>anonymous_record_decode,
+ PG_FORMAT_BINARY)
+
+init_record_codecs()
diff --git a/.venv/lib/python3.12/site-packages/asyncpg/protocol/codecs/textutils.pyx b/.venv/lib/python3.12/site-packages/asyncpg/protocol/codecs/textutils.pyx
new file mode 100644
index 00000000..dfaf29e0
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/asyncpg/protocol/codecs/textutils.pyx
@@ -0,0 +1,99 @@
+# Copyright (C) 2016-present the asyncpg authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of asyncpg and is released under
+# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0
+
+
+cdef inline uint32_t _apg_tolower(uint32_t c):
+ if c >= <uint32_t><Py_UCS4>'A' and c <= <uint32_t><Py_UCS4>'Z':
+ return c + <uint32_t><Py_UCS4>'a' - <uint32_t><Py_UCS4>'A'
+ else:
+ return c
+
+
+cdef int apg_strcasecmp(const Py_UCS4 *s1, const Py_UCS4 *s2):
+ cdef:
+ uint32_t c1
+ uint32_t c2
+ int i = 0
+
+ while True:
+ c1 = s1[i]
+ c2 = s2[i]
+
+ if c1 != c2:
+ c1 = _apg_tolower(c1)
+ c2 = _apg_tolower(c2)
+ if c1 != c2:
+ return <int32_t>c1 - <int32_t>c2
+
+ if c1 == 0 or c2 == 0:
+ break
+
+ i += 1
+
+ return 0
+
+
+cdef int apg_strcasecmp_char(const char *s1, const char *s2):
+ cdef:
+ uint8_t c1
+ uint8_t c2
+ int i = 0
+
+ while True:
+ c1 = <uint8_t>s1[i]
+ c2 = <uint8_t>s2[i]
+
+ if c1 != c2:
+ c1 = <uint8_t>_apg_tolower(c1)
+ c2 = <uint8_t>_apg_tolower(c2)
+ if c1 != c2:
+ return <int8_t>c1 - <int8_t>c2
+
+ if c1 == 0 or c2 == 0:
+ break
+
+ i += 1
+
+ return 0
+
+
+cdef inline bint apg_ascii_isspace(Py_UCS4 ch):
+ return (
+ ch == ' ' or
+ ch == '\n' or
+ ch == '\r' or
+ ch == '\t' or
+ ch == '\v' or
+ ch == '\f'
+ )
+
+
+cdef Py_UCS4 *apg_parse_int32(Py_UCS4 *buf, int32_t *num):
+ cdef:
+ Py_UCS4 *p
+ int32_t n = 0
+ int32_t neg = 0
+
+ if buf[0] == '-':
+ neg = 1
+ buf += 1
+ elif buf[0] == '+':
+ buf += 1
+
+ p = buf
+ while <int>p[0] >= <int><Py_UCS4>'0' and <int>p[0] <= <int><Py_UCS4>'9':
+ n = 10 * n - (<int>p[0] - <int32_t><Py_UCS4>'0')
+ p += 1
+
+ if p == buf:
+ return NULL
+
+ if not neg:
+ n = -n
+
+ num[0] = n
+
+ return p