diff options
Diffstat (limited to '.venv/lib/python3.12/site-packages/jsonschema')
35 files changed, 13250 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/jsonschema/__init__.py b/.venv/lib/python3.12/site-packages/jsonschema/__init__.py new file mode 100644 index 00000000..79924cf7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/jsonschema/__init__.py @@ -0,0 +1,120 @@ +""" +An implementation of JSON Schema for Python. + +The main functionality is provided by the validator classes for each of the +supported JSON Schema versions. + +Most commonly, `jsonschema.validators.validate` is the quickest way to simply +validate a given instance under a schema, and will create a validator +for you. +""" +import warnings + +from jsonschema._format import FormatChecker +from jsonschema._types import TypeChecker +from jsonschema.exceptions import SchemaError, ValidationError +from jsonschema.validators import ( + Draft3Validator, + Draft4Validator, + Draft6Validator, + Draft7Validator, + Draft201909Validator, + Draft202012Validator, + validate, +) + + +def __getattr__(name): + if name == "__version__": + warnings.warn( + "Accessing jsonschema.__version__ is deprecated and will be " + "removed in a future release. Use importlib.metadata directly " + "to query for jsonschema's version.", + DeprecationWarning, + stacklevel=2, + ) + + from importlib import metadata + return metadata.version("jsonschema") + elif name == "RefResolver": + from jsonschema.validators import _RefResolver + warnings.warn( + _RefResolver._DEPRECATION_MESSAGE, + DeprecationWarning, + stacklevel=2, + ) + return _RefResolver + elif name == "ErrorTree": + warnings.warn( + "Importing ErrorTree directly from the jsonschema package " + "is deprecated and will become an ImportError. Import it from " + "jsonschema.exceptions instead.", + DeprecationWarning, + stacklevel=2, + ) + from jsonschema.exceptions import ErrorTree + return ErrorTree + elif name == "FormatError": + warnings.warn( + "Importing FormatError directly from the jsonschema package " + "is deprecated and will become an ImportError. Import it from " + "jsonschema.exceptions instead.", + DeprecationWarning, + stacklevel=2, + ) + from jsonschema.exceptions import FormatError + return FormatError + elif name == "Validator": + warnings.warn( + "Importing Validator directly from the jsonschema package " + "is deprecated and will become an ImportError. Import it from " + "jsonschema.protocols instead.", + DeprecationWarning, + stacklevel=2, + ) + from jsonschema.protocols import Validator + return Validator + elif name == "RefResolutionError": + from jsonschema.exceptions import _RefResolutionError + warnings.warn( + _RefResolutionError._DEPRECATION_MESSAGE, + DeprecationWarning, + stacklevel=2, + ) + return _RefResolutionError + + format_checkers = { + "draft3_format_checker": Draft3Validator, + "draft4_format_checker": Draft4Validator, + "draft6_format_checker": Draft6Validator, + "draft7_format_checker": Draft7Validator, + "draft201909_format_checker": Draft201909Validator, + "draft202012_format_checker": Draft202012Validator, + } + ValidatorForFormat = format_checkers.get(name) + if ValidatorForFormat is not None: + warnings.warn( + f"Accessing jsonschema.{name} is deprecated and will be " + "removed in a future release. Instead, use the FORMAT_CHECKER " + "attribute on the corresponding Validator.", + DeprecationWarning, + stacklevel=2, + ) + return ValidatorForFormat.FORMAT_CHECKER + + raise AttributeError(f"module {__name__} has no attribute {name}") + + +__all__ = [ + "Draft201909Validator", + "Draft202012Validator", + "Draft3Validator", + "Draft4Validator", + "Draft6Validator", + "Draft7Validator", + "FormatChecker", + "SchemaError", + "TypeChecker", + "ValidationError", + "validate", +] diff --git a/.venv/lib/python3.12/site-packages/jsonschema/__main__.py b/.venv/lib/python3.12/site-packages/jsonschema/__main__.py new file mode 100644 index 00000000..fb260ae1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/jsonschema/__main__.py @@ -0,0 +1,6 @@ +""" +The jsonschema CLI is now deprecated in favor of check-jsonschema. +""" +from jsonschema.cli import main + +main() diff --git a/.venv/lib/python3.12/site-packages/jsonschema/_format.py b/.venv/lib/python3.12/site-packages/jsonschema/_format.py new file mode 100644 index 00000000..6e87620c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/jsonschema/_format.py @@ -0,0 +1,519 @@ +from __future__ import annotations + +from contextlib import suppress +from datetime import date, datetime +from uuid import UUID +import ipaddress +import re +import typing +import warnings + +from jsonschema.exceptions import FormatError + +_FormatCheckCallable = typing.Callable[[object], bool] +#: A format checker callable. +_F = typing.TypeVar("_F", bound=_FormatCheckCallable) +_RaisesType = typing.Union[ + typing.Type[Exception], typing.Tuple[typing.Type[Exception], ...], +] + +_RE_DATE = re.compile(r"^\d{4}-\d{2}-\d{2}$", re.ASCII) + + +class FormatChecker: + """ + A ``format`` property checker. + + JSON Schema does not mandate that the ``format`` property actually do any + validation. If validation is desired however, instances of this class can + be hooked into validators to enable format validation. + + `FormatChecker` objects always return ``True`` when asked about + formats that they do not know how to validate. + + To add a check for a custom format use the `FormatChecker.checks` + decorator. + + Arguments: + + formats: + + The known formats to validate. This argument can be used to + limit which formats will be used during validation. + + """ + + checkers: dict[ + str, + tuple[_FormatCheckCallable, _RaisesType], + ] = {} # noqa: RUF012 + + def __init__(self, formats: typing.Iterable[str] | None = None): + if formats is None: + formats = self.checkers.keys() + self.checkers = {k: self.checkers[k] for k in formats} + + def __repr__(self): + return f"<FormatChecker checkers={sorted(self.checkers)}>" + + def checks( + self, format: str, raises: _RaisesType = (), + ) -> typing.Callable[[_F], _F]: + """ + Register a decorated function as validating a new format. + + Arguments: + + format: + + The format that the decorated function will check. + + raises: + + The exception(s) raised by the decorated function when an + invalid instance is found. + + The exception object will be accessible as the + `jsonschema.exceptions.ValidationError.cause` attribute of the + resulting validation error. + + """ + + def _checks(func: _F) -> _F: + self.checkers[format] = (func, raises) + return func + + return _checks + + @classmethod + def cls_checks( + cls, format: str, raises: _RaisesType = (), + ) -> typing.Callable[[_F], _F]: + warnings.warn( + ( + "FormatChecker.cls_checks is deprecated. Call " + "FormatChecker.checks on a specific FormatChecker instance " + "instead." + ), + DeprecationWarning, + stacklevel=2, + ) + return cls._cls_checks(format=format, raises=raises) + + @classmethod + def _cls_checks( + cls, format: str, raises: _RaisesType = (), + ) -> typing.Callable[[_F], _F]: + def _checks(func: _F) -> _F: + cls.checkers[format] = (func, raises) + return func + + return _checks + + def check(self, instance: object, format: str) -> None: + """ + Check whether the instance conforms to the given format. + + Arguments: + + instance (*any primitive type*, i.e. str, number, bool): + + The instance to check + + format: + + The format that instance should conform to + + Raises: + + FormatError: + + if the instance does not conform to ``format`` + + """ + if format not in self.checkers: + return + + func, raises = self.checkers[format] + result, cause = None, None + try: + result = func(instance) + except raises as e: + cause = e + if not result: + raise FormatError(f"{instance!r} is not a {format!r}", cause=cause) + + def conforms(self, instance: object, format: str) -> bool: + """ + Check whether the instance conforms to the given format. + + Arguments: + + instance (*any primitive type*, i.e. str, number, bool): + + The instance to check + + format: + + The format that instance should conform to + + Returns: + + bool: whether it conformed + + """ + try: + self.check(instance, format) + except FormatError: + return False + else: + return True + + +draft3_format_checker = FormatChecker() +draft4_format_checker = FormatChecker() +draft6_format_checker = FormatChecker() +draft7_format_checker = FormatChecker() +draft201909_format_checker = FormatChecker() +draft202012_format_checker = FormatChecker() + +_draft_checkers: dict[str, FormatChecker] = dict( + draft3=draft3_format_checker, + draft4=draft4_format_checker, + draft6=draft6_format_checker, + draft7=draft7_format_checker, + draft201909=draft201909_format_checker, + draft202012=draft202012_format_checker, +) + + +def _checks_drafts( + name=None, + draft3=None, + draft4=None, + draft6=None, + draft7=None, + draft201909=None, + draft202012=None, + raises=(), +) -> typing.Callable[[_F], _F]: + draft3 = draft3 or name + draft4 = draft4 or name + draft6 = draft6 or name + draft7 = draft7 or name + draft201909 = draft201909 or name + draft202012 = draft202012 or name + + def wrap(func: _F) -> _F: + if draft3: + func = _draft_checkers["draft3"].checks(draft3, raises)(func) + if draft4: + func = _draft_checkers["draft4"].checks(draft4, raises)(func) + if draft6: + func = _draft_checkers["draft6"].checks(draft6, raises)(func) + if draft7: + func = _draft_checkers["draft7"].checks(draft7, raises)(func) + if draft201909: + func = _draft_checkers["draft201909"].checks(draft201909, raises)( + func, + ) + if draft202012: + func = _draft_checkers["draft202012"].checks(draft202012, raises)( + func, + ) + + # Oy. This is bad global state, but relied upon for now, until + # deprecation. See #519 and test_format_checkers_come_with_defaults + FormatChecker._cls_checks( + draft202012 or draft201909 or draft7 or draft6 or draft4 or draft3, + raises, + )(func) + return func + + return wrap + + +@_checks_drafts(name="idn-email") +@_checks_drafts(name="email") +def is_email(instance: object) -> bool: + if not isinstance(instance, str): + return True + return "@" in instance + + +@_checks_drafts( + draft3="ip-address", + draft4="ipv4", + draft6="ipv4", + draft7="ipv4", + draft201909="ipv4", + draft202012="ipv4", + raises=ipaddress.AddressValueError, +) +def is_ipv4(instance: object) -> bool: + if not isinstance(instance, str): + return True + return bool(ipaddress.IPv4Address(instance)) + + +@_checks_drafts(name="ipv6", raises=ipaddress.AddressValueError) +def is_ipv6(instance: object) -> bool: + if not isinstance(instance, str): + return True + address = ipaddress.IPv6Address(instance) + return not getattr(address, "scope_id", "") + + +with suppress(ImportError): + from fqdn import FQDN + + @_checks_drafts( + draft3="host-name", + draft4="hostname", + draft6="hostname", + draft7="hostname", + draft201909="hostname", + draft202012="hostname", + ) + def is_host_name(instance: object) -> bool: + if not isinstance(instance, str): + return True + return FQDN(instance, min_labels=1).is_valid + + +with suppress(ImportError): + # The built-in `idna` codec only implements RFC 3890, so we go elsewhere. + import idna + + @_checks_drafts( + draft7="idn-hostname", + draft201909="idn-hostname", + draft202012="idn-hostname", + raises=(idna.IDNAError, UnicodeError), + ) + def is_idn_host_name(instance: object) -> bool: + if not isinstance(instance, str): + return True + idna.encode(instance) + return True + + +try: + import rfc3987 +except ImportError: + with suppress(ImportError): + from rfc3986_validator import validate_rfc3986 + + @_checks_drafts(name="uri") + def is_uri(instance: object) -> bool: + if not isinstance(instance, str): + return True + return validate_rfc3986(instance, rule="URI") + + @_checks_drafts( + draft6="uri-reference", + draft7="uri-reference", + draft201909="uri-reference", + draft202012="uri-reference", + raises=ValueError, + ) + def is_uri_reference(instance: object) -> bool: + if not isinstance(instance, str): + return True + return validate_rfc3986(instance, rule="URI_reference") + +else: + + @_checks_drafts( + draft7="iri", + draft201909="iri", + draft202012="iri", + raises=ValueError, + ) + def is_iri(instance: object) -> bool: + if not isinstance(instance, str): + return True + return rfc3987.parse(instance, rule="IRI") + + @_checks_drafts( + draft7="iri-reference", + draft201909="iri-reference", + draft202012="iri-reference", + raises=ValueError, + ) + def is_iri_reference(instance: object) -> bool: + if not isinstance(instance, str): + return True + return rfc3987.parse(instance, rule="IRI_reference") + + @_checks_drafts(name="uri", raises=ValueError) + def is_uri(instance: object) -> bool: + if not isinstance(instance, str): + return True + return rfc3987.parse(instance, rule="URI") + + @_checks_drafts( + draft6="uri-reference", + draft7="uri-reference", + draft201909="uri-reference", + draft202012="uri-reference", + raises=ValueError, + ) + def is_uri_reference(instance: object) -> bool: + if not isinstance(instance, str): + return True + return rfc3987.parse(instance, rule="URI_reference") + + +with suppress(ImportError): + from rfc3339_validator import validate_rfc3339 + + @_checks_drafts(name="date-time") + def is_datetime(instance: object) -> bool: + if not isinstance(instance, str): + return True + return validate_rfc3339(instance.upper()) + + @_checks_drafts( + draft7="time", + draft201909="time", + draft202012="time", + ) + def is_time(instance: object) -> bool: + if not isinstance(instance, str): + return True + return is_datetime("1970-01-01T" + instance) + + +@_checks_drafts(name="regex", raises=re.error) +def is_regex(instance: object) -> bool: + if not isinstance(instance, str): + return True + return bool(re.compile(instance)) + + +@_checks_drafts( + draft3="date", + draft7="date", + draft201909="date", + draft202012="date", + raises=ValueError, +) +def is_date(instance: object) -> bool: + if not isinstance(instance, str): + return True + return bool(_RE_DATE.fullmatch(instance) and date.fromisoformat(instance)) + + +@_checks_drafts(draft3="time", raises=ValueError) +def is_draft3_time(instance: object) -> bool: + if not isinstance(instance, str): + return True + return bool(datetime.strptime(instance, "%H:%M:%S")) # noqa: DTZ007 + + +with suppress(ImportError): + import webcolors + + @_checks_drafts(draft3="color", raises=(ValueError, TypeError)) + def is_css21_color(instance: object) -> bool: + if isinstance(instance, str): + try: + webcolors.name_to_hex(instance) + except ValueError: + webcolors.normalize_hex(instance.lower()) + return True + + +with suppress(ImportError): + import jsonpointer + + @_checks_drafts( + draft6="json-pointer", + draft7="json-pointer", + draft201909="json-pointer", + draft202012="json-pointer", + raises=jsonpointer.JsonPointerException, + ) + def is_json_pointer(instance: object) -> bool: + if not isinstance(instance, str): + return True + return bool(jsonpointer.JsonPointer(instance)) + + # TODO: I don't want to maintain this, so it + # needs to go either into jsonpointer (pending + # https://github.com/stefankoegl/python-json-pointer/issues/34) or + # into a new external library. + @_checks_drafts( + draft7="relative-json-pointer", + draft201909="relative-json-pointer", + draft202012="relative-json-pointer", + raises=jsonpointer.JsonPointerException, + ) + def is_relative_json_pointer(instance: object) -> bool: + # Definition taken from: + # https://tools.ietf.org/html/draft-handrews-relative-json-pointer-01#section-3 + if not isinstance(instance, str): + return True + if not instance: + return False + + non_negative_integer, rest = [], "" + for i, character in enumerate(instance): + if character.isdigit(): + # digits with a leading "0" are not allowed + if i > 0 and int(instance[i - 1]) == 0: + return False + + non_negative_integer.append(character) + continue + + if not non_negative_integer: + return False + + rest = instance[i:] + break + return (rest == "#") or bool(jsonpointer.JsonPointer(rest)) + + +with suppress(ImportError): + import uri_template + + @_checks_drafts( + draft6="uri-template", + draft7="uri-template", + draft201909="uri-template", + draft202012="uri-template", + ) + def is_uri_template(instance: object) -> bool: + if not isinstance(instance, str): + return True + return uri_template.validate(instance) + + +with suppress(ImportError): + import isoduration + + @_checks_drafts( + draft201909="duration", + draft202012="duration", + raises=isoduration.DurationParsingException, + ) + def is_duration(instance: object) -> bool: + if not isinstance(instance, str): + return True + isoduration.parse_duration(instance) + # FIXME: See bolsote/isoduration#25 and bolsote/isoduration#21 + return instance.endswith(tuple("DMYWHMS")) + + +@_checks_drafts( + draft201909="uuid", + draft202012="uuid", + raises=ValueError, +) +def is_uuid(instance: object) -> bool: + if not isinstance(instance, str): + return True + UUID(instance) + return all(instance[position] == "-" for position in (8, 13, 18, 23)) diff --git a/.venv/lib/python3.12/site-packages/jsonschema/_keywords.py b/.venv/lib/python3.12/site-packages/jsonschema/_keywords.py new file mode 100644 index 00000000..f30f9541 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/jsonschema/_keywords.py @@ -0,0 +1,449 @@ +from fractions import Fraction +import re + +from jsonschema._utils import ( + ensure_list, + equal, + extras_msg, + find_additional_properties, + find_evaluated_item_indexes_by_schema, + find_evaluated_property_keys_by_schema, + uniq, +) +from jsonschema.exceptions import FormatError, ValidationError + + +def patternProperties(validator, patternProperties, instance, schema): + if not validator.is_type(instance, "object"): + return + + for pattern, subschema in patternProperties.items(): + for k, v in instance.items(): + if re.search(pattern, k): + yield from validator.descend( + v, subschema, path=k, schema_path=pattern, + ) + + +def propertyNames(validator, propertyNames, instance, schema): + if not validator.is_type(instance, "object"): + return + + for property in instance: + yield from validator.descend(instance=property, schema=propertyNames) + + +def additionalProperties(validator, aP, instance, schema): + if not validator.is_type(instance, "object"): + return + + extras = set(find_additional_properties(instance, schema)) + + if validator.is_type(aP, "object"): + for extra in extras: + yield from validator.descend(instance[extra], aP, path=extra) + elif not aP and extras: + if "patternProperties" in schema: + verb = "does" if len(extras) == 1 else "do" + joined = ", ".join(repr(each) for each in sorted(extras)) + patterns = ", ".join( + repr(each) for each in sorted(schema["patternProperties"]) + ) + error = f"{joined} {verb} not match any of the regexes: {patterns}" + yield ValidationError(error) + else: + error = "Additional properties are not allowed (%s %s unexpected)" + yield ValidationError(error % extras_msg(sorted(extras, key=str))) + + +def items(validator, items, instance, schema): + if not validator.is_type(instance, "array"): + return + + prefix = len(schema.get("prefixItems", [])) + total = len(instance) + extra = total - prefix + if extra <= 0: + return + + if items is False: + rest = instance[prefix:] if extra != 1 else instance[prefix] + item = "items" if prefix != 1 else "item" + yield ValidationError( + f"Expected at most {prefix} {item} but found {extra} " + f"extra: {rest!r}", + ) + else: + for index in range(prefix, total): + yield from validator.descend( + instance=instance[index], + schema=items, + path=index, + ) + + +def const(validator, const, instance, schema): + if not equal(instance, const): + yield ValidationError(f"{const!r} was expected") + + +def contains(validator, contains, instance, schema): + if not validator.is_type(instance, "array"): + return + + matches = 0 + min_contains = schema.get("minContains", 1) + max_contains = schema.get("maxContains", len(instance)) + + contains_validator = validator.evolve(schema=contains) + + for each in instance: + if contains_validator.is_valid(each): + matches += 1 + if matches > max_contains: + yield ValidationError( + "Too many items match the given schema " + f"(expected at most {max_contains})", + validator="maxContains", + validator_value=max_contains, + ) + return + + if matches < min_contains: + if not matches: + yield ValidationError( + f"{instance!r} does not contain items " + "matching the given schema", + ) + else: + yield ValidationError( + "Too few items match the given schema (expected at least " + f"{min_contains} but only {matches} matched)", + validator="minContains", + validator_value=min_contains, + ) + + +def exclusiveMinimum(validator, minimum, instance, schema): + if not validator.is_type(instance, "number"): + return + + if instance <= minimum: + yield ValidationError( + f"{instance!r} is less than or equal to " + f"the minimum of {minimum!r}", + ) + + +def exclusiveMaximum(validator, maximum, instance, schema): + if not validator.is_type(instance, "number"): + return + + if instance >= maximum: + yield ValidationError( + f"{instance!r} is greater than or equal " + f"to the maximum of {maximum!r}", + ) + + +def minimum(validator, minimum, instance, schema): + if not validator.is_type(instance, "number"): + return + + if instance < minimum: + message = f"{instance!r} is less than the minimum of {minimum!r}" + yield ValidationError(message) + + +def maximum(validator, maximum, instance, schema): + if not validator.is_type(instance, "number"): + return + + if instance > maximum: + message = f"{instance!r} is greater than the maximum of {maximum!r}" + yield ValidationError(message) + + +def multipleOf(validator, dB, instance, schema): + if not validator.is_type(instance, "number"): + return + + if isinstance(dB, float): + quotient = instance / dB + try: + failed = int(quotient) != quotient + except OverflowError: + # When `instance` is large and `dB` is less than one, + # quotient can overflow to infinity; and then casting to int + # raises an error. + # + # In this case we fall back to Fraction logic, which is + # exact and cannot overflow. The performance is also + # acceptable: we try the fast all-float option first, and + # we know that fraction(dB) can have at most a few hundred + # digits in each part. The worst-case slowdown is therefore + # for already-slow enormous integers or Decimals. + failed = (Fraction(instance) / Fraction(dB)).denominator != 1 + else: + failed = instance % dB + + if failed: + yield ValidationError(f"{instance!r} is not a multiple of {dB}") + + +def minItems(validator, mI, instance, schema): + if validator.is_type(instance, "array") and len(instance) < mI: + message = "should be non-empty" if mI == 1 else "is too short" + yield ValidationError(f"{instance!r} {message}") + + +def maxItems(validator, mI, instance, schema): + if validator.is_type(instance, "array") and len(instance) > mI: + message = "is expected to be empty" if mI == 0 else "is too long" + yield ValidationError(f"{instance!r} {message}") + + +def uniqueItems(validator, uI, instance, schema): + if ( + uI + and validator.is_type(instance, "array") + and not uniq(instance) + ): + yield ValidationError(f"{instance!r} has non-unique elements") + + +def pattern(validator, patrn, instance, schema): + if ( + validator.is_type(instance, "string") + and not re.search(patrn, instance) + ): + yield ValidationError(f"{instance!r} does not match {patrn!r}") + + +def format(validator, format, instance, schema): + if validator.format_checker is not None: + try: + validator.format_checker.check(instance, format) + except FormatError as error: + yield ValidationError(error.message, cause=error.cause) + + +def minLength(validator, mL, instance, schema): + if validator.is_type(instance, "string") and len(instance) < mL: + message = "should be non-empty" if mL == 1 else "is too short" + yield ValidationError(f"{instance!r} {message}") + + +def maxLength(validator, mL, instance, schema): + if validator.is_type(instance, "string") and len(instance) > mL: + message = "is expected to be empty" if mL == 0 else "is too long" + yield ValidationError(f"{instance!r} {message}") + + +def dependentRequired(validator, dependentRequired, instance, schema): + if not validator.is_type(instance, "object"): + return + + for property, dependency in dependentRequired.items(): + if property not in instance: + continue + + for each in dependency: + if each not in instance: + message = f"{each!r} is a dependency of {property!r}" + yield ValidationError(message) + + +def dependentSchemas(validator, dependentSchemas, instance, schema): + if not validator.is_type(instance, "object"): + return + + for property, dependency in dependentSchemas.items(): + if property not in instance: + continue + yield from validator.descend( + instance, dependency, schema_path=property, + ) + + +def enum(validator, enums, instance, schema): + if all(not equal(each, instance) for each in enums): + yield ValidationError(f"{instance!r} is not one of {enums!r}") + + +def ref(validator, ref, instance, schema): + yield from validator._validate_reference(ref=ref, instance=instance) + + +def dynamicRef(validator, dynamicRef, instance, schema): + yield from validator._validate_reference(ref=dynamicRef, instance=instance) + + +def type(validator, types, instance, schema): + types = ensure_list(types) + + if not any(validator.is_type(instance, type) for type in types): + reprs = ", ".join(repr(type) for type in types) + yield ValidationError(f"{instance!r} is not of type {reprs}") + + +def properties(validator, properties, instance, schema): + if not validator.is_type(instance, "object"): + return + + for property, subschema in properties.items(): + if property in instance: + yield from validator.descend( + instance[property], + subschema, + path=property, + schema_path=property, + ) + + +def required(validator, required, instance, schema): + if not validator.is_type(instance, "object"): + return + for property in required: + if property not in instance: + yield ValidationError(f"{property!r} is a required property") + + +def minProperties(validator, mP, instance, schema): + if validator.is_type(instance, "object") and len(instance) < mP: + message = ( + "should be non-empty" if mP == 1 + else "does not have enough properties" + ) + yield ValidationError(f"{instance!r} {message}") + + +def maxProperties(validator, mP, instance, schema): + if not validator.is_type(instance, "object"): + return + if validator.is_type(instance, "object") and len(instance) > mP: + message = ( + "is expected to be empty" if mP == 0 + else "has too many properties" + ) + yield ValidationError(f"{instance!r} {message}") + + +def allOf(validator, allOf, instance, schema): + for index, subschema in enumerate(allOf): + yield from validator.descend(instance, subschema, schema_path=index) + + +def anyOf(validator, anyOf, instance, schema): + all_errors = [] + for index, subschema in enumerate(anyOf): + errs = list(validator.descend(instance, subschema, schema_path=index)) + if not errs: + break + all_errors.extend(errs) + else: + yield ValidationError( + f"{instance!r} is not valid under any of the given schemas", + context=all_errors, + ) + + +def oneOf(validator, oneOf, instance, schema): + subschemas = enumerate(oneOf) + all_errors = [] + for index, subschema in subschemas: + errs = list(validator.descend(instance, subschema, schema_path=index)) + if not errs: + first_valid = subschema + break + all_errors.extend(errs) + else: + yield ValidationError( + f"{instance!r} is not valid under any of the given schemas", + context=all_errors, + ) + + more_valid = [ + each for _, each in subschemas + if validator.evolve(schema=each).is_valid(instance) + ] + if more_valid: + more_valid.append(first_valid) + reprs = ", ".join(repr(schema) for schema in more_valid) + yield ValidationError(f"{instance!r} is valid under each of {reprs}") + + +def not_(validator, not_schema, instance, schema): + if validator.evolve(schema=not_schema).is_valid(instance): + message = f"{instance!r} should not be valid under {not_schema!r}" + yield ValidationError(message) + + +def if_(validator, if_schema, instance, schema): + if validator.evolve(schema=if_schema).is_valid(instance): + if "then" in schema: + then = schema["then"] + yield from validator.descend(instance, then, schema_path="then") + elif "else" in schema: + else_ = schema["else"] + yield from validator.descend(instance, else_, schema_path="else") + + +def unevaluatedItems(validator, unevaluatedItems, instance, schema): + if not validator.is_type(instance, "array"): + return + evaluated_item_indexes = find_evaluated_item_indexes_by_schema( + validator, instance, schema, + ) + unevaluated_items = [ + item for index, item in enumerate(instance) + if index not in evaluated_item_indexes + ] + if unevaluated_items: + error = "Unevaluated items are not allowed (%s %s unexpected)" + yield ValidationError(error % extras_msg(unevaluated_items)) + + +def unevaluatedProperties(validator, unevaluatedProperties, instance, schema): + if not validator.is_type(instance, "object"): + return + evaluated_keys = find_evaluated_property_keys_by_schema( + validator, instance, schema, + ) + unevaluated_keys = [] + for property in instance: + if property not in evaluated_keys: + for _ in validator.descend( + instance[property], + unevaluatedProperties, + path=property, + schema_path=property, + ): + # FIXME: Include context for each unevaluated property + # indicating why it's invalid under the subschema. + unevaluated_keys.append(property) # noqa: PERF401 + + if unevaluated_keys: + if unevaluatedProperties is False: + error = "Unevaluated properties are not allowed (%s %s unexpected)" + extras = sorted(unevaluated_keys, key=str) + yield ValidationError(error % extras_msg(extras)) + else: + error = ( + "Unevaluated properties are not valid under " + "the given schema (%s %s unevaluated and invalid)" + ) + yield ValidationError(error % extras_msg(unevaluated_keys)) + + +def prefixItems(validator, prefixItems, instance, schema): + if not validator.is_type(instance, "array"): + return + + for (index, item), subschema in zip(enumerate(instance), prefixItems): + yield from validator.descend( + instance=item, + schema=subschema, + schema_path=index, + path=index, + ) diff --git a/.venv/lib/python3.12/site-packages/jsonschema/_legacy_keywords.py b/.venv/lib/python3.12/site-packages/jsonschema/_legacy_keywords.py new file mode 100644 index 00000000..c691589f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/jsonschema/_legacy_keywords.py @@ -0,0 +1,449 @@ +import re + +from referencing.jsonschema import lookup_recursive_ref + +from jsonschema import _utils +from jsonschema.exceptions import ValidationError + + +def ignore_ref_siblings(schema): + """ + Ignore siblings of ``$ref`` if it is present. + + Otherwise, return all keywords. + + Suitable for use with `create`'s ``applicable_validators`` argument. + """ + ref = schema.get("$ref") + if ref is not None: + return [("$ref", ref)] + else: + return schema.items() + + +def dependencies_draft3(validator, dependencies, instance, schema): + if not validator.is_type(instance, "object"): + return + + for property, dependency in dependencies.items(): + if property not in instance: + continue + + if validator.is_type(dependency, "object"): + yield from validator.descend( + instance, dependency, schema_path=property, + ) + elif validator.is_type(dependency, "string"): + if dependency not in instance: + message = f"{dependency!r} is a dependency of {property!r}" + yield ValidationError(message) + else: + for each in dependency: + if each not in instance: + message = f"{each!r} is a dependency of {property!r}" + yield ValidationError(message) + + +def dependencies_draft4_draft6_draft7( + validator, + dependencies, + instance, + schema, +): + """ + Support for the ``dependencies`` keyword from pre-draft 2019-09. + + In later drafts, the keyword was split into separate + ``dependentRequired`` and ``dependentSchemas`` validators. + """ + if not validator.is_type(instance, "object"): + return + + for property, dependency in dependencies.items(): + if property not in instance: + continue + + if validator.is_type(dependency, "array"): + for each in dependency: + if each not in instance: + message = f"{each!r} is a dependency of {property!r}" + yield ValidationError(message) + else: + yield from validator.descend( + instance, dependency, schema_path=property, + ) + + +def disallow_draft3(validator, disallow, instance, schema): + for disallowed in _utils.ensure_list(disallow): + if validator.evolve(schema={"type": [disallowed]}).is_valid(instance): + message = f"{disallowed!r} is disallowed for {instance!r}" + yield ValidationError(message) + + +def extends_draft3(validator, extends, instance, schema): + if validator.is_type(extends, "object"): + yield from validator.descend(instance, extends) + return + for index, subschema in enumerate(extends): + yield from validator.descend(instance, subschema, schema_path=index) + + +def items_draft3_draft4(validator, items, instance, schema): + if not validator.is_type(instance, "array"): + return + + if validator.is_type(items, "object"): + for index, item in enumerate(instance): + yield from validator.descend(item, items, path=index) + else: + for (index, item), subschema in zip(enumerate(instance), items): + yield from validator.descend( + item, subschema, path=index, schema_path=index, + ) + + +def additionalItems(validator, aI, instance, schema): + if ( + not validator.is_type(instance, "array") + or validator.is_type(schema.get("items", {}), "object") + ): + return + + len_items = len(schema.get("items", [])) + if validator.is_type(aI, "object"): + for index, item in enumerate(instance[len_items:], start=len_items): + yield from validator.descend(item, aI, path=index) + elif not aI and len(instance) > len(schema.get("items", [])): + error = "Additional items are not allowed (%s %s unexpected)" + yield ValidationError( + error % _utils.extras_msg(instance[len(schema.get("items", [])):]), + ) + + +def items_draft6_draft7_draft201909(validator, items, instance, schema): + if not validator.is_type(instance, "array"): + return + + if validator.is_type(items, "array"): + for (index, item), subschema in zip(enumerate(instance), items): + yield from validator.descend( + item, subschema, path=index, schema_path=index, + ) + else: + for index, item in enumerate(instance): + yield from validator.descend(item, items, path=index) + + +def minimum_draft3_draft4(validator, minimum, instance, schema): + if not validator.is_type(instance, "number"): + return + + if schema.get("exclusiveMinimum", False): + failed = instance <= minimum + cmp = "less than or equal to" + else: + failed = instance < minimum + cmp = "less than" + + if failed: + message = f"{instance!r} is {cmp} the minimum of {minimum!r}" + yield ValidationError(message) + + +def maximum_draft3_draft4(validator, maximum, instance, schema): + if not validator.is_type(instance, "number"): + return + + if schema.get("exclusiveMaximum", False): + failed = instance >= maximum + cmp = "greater than or equal to" + else: + failed = instance > maximum + cmp = "greater than" + + if failed: + message = f"{instance!r} is {cmp} the maximum of {maximum!r}" + yield ValidationError(message) + + +def properties_draft3(validator, properties, instance, schema): + if not validator.is_type(instance, "object"): + return + + for property, subschema in properties.items(): + if property in instance: + yield from validator.descend( + instance[property], + subschema, + path=property, + schema_path=property, + ) + elif subschema.get("required", False): + error = ValidationError(f"{property!r} is a required property") + error._set( + validator="required", + validator_value=subschema["required"], + instance=instance, + schema=schema, + ) + error.path.appendleft(property) + error.schema_path.extend([property, "required"]) + yield error + + +def type_draft3(validator, types, instance, schema): + types = _utils.ensure_list(types) + + all_errors = [] + for index, type in enumerate(types): + if validator.is_type(type, "object"): + errors = list(validator.descend(instance, type, schema_path=index)) + if not errors: + return + all_errors.extend(errors) + elif validator.is_type(instance, type): + return + + reprs = [] + for type in types: + try: + reprs.append(repr(type["name"])) + except Exception: # noqa: BLE001 + reprs.append(repr(type)) + yield ValidationError( + f"{instance!r} is not of type {', '.join(reprs)}", + context=all_errors, + ) + + +def contains_draft6_draft7(validator, contains, instance, schema): + if not validator.is_type(instance, "array"): + return + + if not any( + validator.evolve(schema=contains).is_valid(element) + for element in instance + ): + yield ValidationError( + f"None of {instance!r} are valid under the given schema", + ) + + +def recursiveRef(validator, recursiveRef, instance, schema): + resolved = lookup_recursive_ref(validator._resolver) + yield from validator.descend( + instance, + resolved.contents, + resolver=resolved.resolver, + ) + + +def find_evaluated_item_indexes_by_schema(validator, instance, schema): + """ + Get all indexes of items that get evaluated under the current schema. + + Covers all keywords related to unevaluatedItems: items, prefixItems, if, + then, else, contains, unevaluatedItems, allOf, oneOf, anyOf + """ + if validator.is_type(schema, "boolean"): + return [] + evaluated_indexes = [] + + ref = schema.get("$ref") + if ref is not None: + resolved = validator._resolver.lookup(ref) + evaluated_indexes.extend( + find_evaluated_item_indexes_by_schema( + validator.evolve( + schema=resolved.contents, + _resolver=resolved.resolver, + ), + instance, + resolved.contents, + ), + ) + + if "$recursiveRef" in schema: + resolved = lookup_recursive_ref(validator._resolver) + evaluated_indexes.extend( + find_evaluated_item_indexes_by_schema( + validator.evolve( + schema=resolved.contents, + _resolver=resolved.resolver, + ), + instance, + resolved.contents, + ), + ) + + if "items" in schema: + if "additionalItems" in schema: + return list(range(len(instance))) + + if validator.is_type(schema["items"], "object"): + return list(range(len(instance))) + evaluated_indexes += list(range(len(schema["items"]))) + + if "if" in schema: + if validator.evolve(schema=schema["if"]).is_valid(instance): + evaluated_indexes += find_evaluated_item_indexes_by_schema( + validator, instance, schema["if"], + ) + if "then" in schema: + evaluated_indexes += find_evaluated_item_indexes_by_schema( + validator, instance, schema["then"], + ) + elif "else" in schema: + evaluated_indexes += find_evaluated_item_indexes_by_schema( + validator, instance, schema["else"], + ) + + for keyword in ["contains", "unevaluatedItems"]: + if keyword in schema: + for k, v in enumerate(instance): + if validator.evolve(schema=schema[keyword]).is_valid(v): + evaluated_indexes.append(k) + + for keyword in ["allOf", "oneOf", "anyOf"]: + if keyword in schema: + for subschema in schema[keyword]: + errs = next(validator.descend(instance, subschema), None) + if errs is None: + evaluated_indexes += find_evaluated_item_indexes_by_schema( + validator, instance, subschema, + ) + + return evaluated_indexes + + +def unevaluatedItems_draft2019(validator, unevaluatedItems, instance, schema): + if not validator.is_type(instance, "array"): + return + evaluated_item_indexes = find_evaluated_item_indexes_by_schema( + validator, instance, schema, + ) + unevaluated_items = [ + item for index, item in enumerate(instance) + if index not in evaluated_item_indexes + ] + if unevaluated_items: + error = "Unevaluated items are not allowed (%s %s unexpected)" + yield ValidationError(error % _utils.extras_msg(unevaluated_items)) + + +def find_evaluated_property_keys_by_schema(validator, instance, schema): + if validator.is_type(schema, "boolean"): + return [] + evaluated_keys = [] + + ref = schema.get("$ref") + if ref is not None: + resolved = validator._resolver.lookup(ref) + evaluated_keys.extend( + find_evaluated_property_keys_by_schema( + validator.evolve( + schema=resolved.contents, + _resolver=resolved.resolver, + ), + instance, + resolved.contents, + ), + ) + + if "$recursiveRef" in schema: + resolved = lookup_recursive_ref(validator._resolver) + evaluated_keys.extend( + find_evaluated_property_keys_by_schema( + validator.evolve( + schema=resolved.contents, + _resolver=resolved.resolver, + ), + instance, + resolved.contents, + ), + ) + + for keyword in [ + "properties", "additionalProperties", "unevaluatedProperties", + ]: + if keyword in schema: + schema_value = schema[keyword] + if validator.is_type(schema_value, "boolean") and schema_value: + evaluated_keys += instance.keys() + + elif validator.is_type(schema_value, "object"): + for property in schema_value: + if property in instance: + evaluated_keys.append(property) + + if "patternProperties" in schema: + for property in instance: + for pattern in schema["patternProperties"]: + if re.search(pattern, property): + evaluated_keys.append(property) + + if "dependentSchemas" in schema: + for property, subschema in schema["dependentSchemas"].items(): + if property not in instance: + continue + evaluated_keys += find_evaluated_property_keys_by_schema( + validator, instance, subschema, + ) + + for keyword in ["allOf", "oneOf", "anyOf"]: + if keyword in schema: + for subschema in schema[keyword]: + errs = next(validator.descend(instance, subschema), None) + if errs is None: + evaluated_keys += find_evaluated_property_keys_by_schema( + validator, instance, subschema, + ) + + if "if" in schema: + if validator.evolve(schema=schema["if"]).is_valid(instance): + evaluated_keys += find_evaluated_property_keys_by_schema( + validator, instance, schema["if"], + ) + if "then" in schema: + evaluated_keys += find_evaluated_property_keys_by_schema( + validator, instance, schema["then"], + ) + elif "else" in schema: + evaluated_keys += find_evaluated_property_keys_by_schema( + validator, instance, schema["else"], + ) + + return evaluated_keys + + +def unevaluatedProperties_draft2019(validator, uP, instance, schema): + if not validator.is_type(instance, "object"): + return + evaluated_keys = find_evaluated_property_keys_by_schema( + validator, instance, schema, + ) + unevaluated_keys = [] + for property in instance: + if property not in evaluated_keys: + for _ in validator.descend( + instance[property], + uP, + path=property, + schema_path=property, + ): + # FIXME: Include context for each unevaluated property + # indicating why it's invalid under the subschema. + unevaluated_keys.append(property) # noqa: PERF401 + + if unevaluated_keys: + if uP is False: + error = "Unevaluated properties are not allowed (%s %s unexpected)" + extras = sorted(unevaluated_keys, key=str) + yield ValidationError(error % _utils.extras_msg(extras)) + else: + error = ( + "Unevaluated properties are not valid under " + "the given schema (%s %s unevaluated and invalid)" + ) + yield ValidationError(error % _utils.extras_msg(unevaluated_keys)) diff --git a/.venv/lib/python3.12/site-packages/jsonschema/_types.py b/.venv/lib/python3.12/site-packages/jsonschema/_types.py new file mode 100644 index 00000000..bf25e7e6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/jsonschema/_types.py @@ -0,0 +1,200 @@ +from __future__ import annotations + +from typing import Any, Callable, Mapping +import numbers + +from attrs import evolve, field, frozen +from rpds import HashTrieMap + +from jsonschema.exceptions import UndefinedTypeCheck + + +# unfortunately, the type of HashTrieMap is generic, and if used as an attrs +# converter, the generic type is presented to mypy, which then fails to match +# the concrete type of a type checker mapping +# this "do nothing" wrapper presents the correct information to mypy +def _typed_map_converter( + init_val: Mapping[str, Callable[[TypeChecker, Any], bool]], +) -> HashTrieMap[str, Callable[[TypeChecker, Any], bool]]: + return HashTrieMap.convert(init_val) + + +def is_array(checker, instance): + return isinstance(instance, list) + + +def is_bool(checker, instance): + return isinstance(instance, bool) + + +def is_integer(checker, instance): + # bool inherits from int, so ensure bools aren't reported as ints + if isinstance(instance, bool): + return False + return isinstance(instance, int) + + +def is_null(checker, instance): + return instance is None + + +def is_number(checker, instance): + # bool inherits from int, so ensure bools aren't reported as ints + if isinstance(instance, bool): + return False + return isinstance(instance, numbers.Number) + + +def is_object(checker, instance): + return isinstance(instance, dict) + + +def is_string(checker, instance): + return isinstance(instance, str) + + +def is_any(checker, instance): + return True + + +@frozen(repr=False) +class TypeChecker: + """ + A :kw:`type` property checker. + + A `TypeChecker` performs type checking for a `Validator`, converting + between the defined JSON Schema types and some associated Python types or + objects. + + Modifying the behavior just mentioned by redefining which Python objects + are considered to be of which JSON Schema types can be done using + `TypeChecker.redefine` or `TypeChecker.redefine_many`, and types can be + removed via `TypeChecker.remove`. Each of these return a new `TypeChecker`. + + Arguments: + + type_checkers: + + The initial mapping of types to their checking functions. + + """ + + _type_checkers: HashTrieMap[ + str, Callable[[TypeChecker, Any], bool], + ] = field(default=HashTrieMap(), converter=_typed_map_converter) + + def __repr__(self): + types = ", ".join(repr(k) for k in sorted(self._type_checkers)) + return f"<{self.__class__.__name__} types={{{types}}}>" + + def is_type(self, instance, type: str) -> bool: + """ + Check if the instance is of the appropriate type. + + Arguments: + + instance: + + The instance to check + + type: + + The name of the type that is expected. + + Raises: + + `jsonschema.exceptions.UndefinedTypeCheck`: + + if ``type`` is unknown to this object. + + """ + try: + fn = self._type_checkers[type] + except KeyError: + raise UndefinedTypeCheck(type) from None + + return fn(self, instance) + + def redefine(self, type: str, fn) -> TypeChecker: + """ + Produce a new checker with the given type redefined. + + Arguments: + + type: + + The name of the type to check. + + fn (collections.abc.Callable): + + A callable taking exactly two parameters - the type + checker calling the function and the instance to check. + The function should return true if instance is of this + type and false otherwise. + + """ + return self.redefine_many({type: fn}) + + def redefine_many(self, definitions=()) -> TypeChecker: + """ + Produce a new checker with the given types redefined. + + Arguments: + + definitions (dict): + + A dictionary mapping types to their checking functions. + + """ + type_checkers = self._type_checkers.update(definitions) + return evolve(self, type_checkers=type_checkers) + + def remove(self, *types) -> TypeChecker: + """ + Produce a new checker with the given types forgotten. + + Arguments: + + types: + + the names of the types to remove. + + Raises: + + `jsonschema.exceptions.UndefinedTypeCheck`: + + if any given type is unknown to this object + + """ + type_checkers = self._type_checkers + for each in types: + try: + type_checkers = type_checkers.remove(each) + except KeyError: + raise UndefinedTypeCheck(each) from None + return evolve(self, type_checkers=type_checkers) + + +draft3_type_checker = TypeChecker( + { + "any": is_any, + "array": is_array, + "boolean": is_bool, + "integer": is_integer, + "object": is_object, + "null": is_null, + "number": is_number, + "string": is_string, + }, +) +draft4_type_checker = draft3_type_checker.remove("any") +draft6_type_checker = draft4_type_checker.redefine( + "integer", + lambda checker, instance: ( + is_integer(checker, instance) + or isinstance(instance, float) and instance.is_integer() + ), +) +draft7_type_checker = draft6_type_checker +draft201909_type_checker = draft7_type_checker +draft202012_type_checker = draft201909_type_checker diff --git a/.venv/lib/python3.12/site-packages/jsonschema/_typing.py b/.venv/lib/python3.12/site-packages/jsonschema/_typing.py new file mode 100644 index 00000000..d283dc48 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/jsonschema/_typing.py @@ -0,0 +1,28 @@ +""" +Some (initially private) typing helpers for jsonschema's types. +""" +from typing import Any, Callable, Iterable, Protocol, Tuple, Union + +import referencing.jsonschema + +from jsonschema.protocols import Validator + + +class SchemaKeywordValidator(Protocol): + def __call__( + self, + validator: Validator, + value: Any, + instance: Any, + schema: referencing.jsonschema.Schema, + ) -> None: + ... + + +id_of = Callable[[referencing.jsonschema.Schema], Union[str, None]] + + +ApplicableValidators = Callable[ + [referencing.jsonschema.Schema], + Iterable[Tuple[str, Any]], +] diff --git a/.venv/lib/python3.12/site-packages/jsonschema/_utils.py b/.venv/lib/python3.12/site-packages/jsonschema/_utils.py new file mode 100644 index 00000000..54d28c04 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/jsonschema/_utils.py @@ -0,0 +1,351 @@ +from collections.abc import Mapping, MutableMapping, Sequence +from urllib.parse import urlsplit +import itertools +import re + + +class URIDict(MutableMapping): + """ + Dictionary which uses normalized URIs as keys. + """ + + def normalize(self, uri): + return urlsplit(uri).geturl() + + def __init__(self, *args, **kwargs): + self.store = dict() + self.store.update(*args, **kwargs) + + def __getitem__(self, uri): + return self.store[self.normalize(uri)] + + def __setitem__(self, uri, value): + self.store[self.normalize(uri)] = value + + def __delitem__(self, uri): + del self.store[self.normalize(uri)] + + def __iter__(self): + return iter(self.store) + + def __len__(self): # pragma: no cover -- untested, but to be removed + return len(self.store) + + def __repr__(self): # pragma: no cover -- untested, but to be removed + return repr(self.store) + + +class Unset: + """ + An as-of-yet unset attribute or unprovided default parameter. + """ + + def __repr__(self): # pragma: no cover + return "<unset>" + + +def format_as_index(container, indices): + """ + Construct a single string containing indexing operations for the indices. + + For example for a container ``bar``, [1, 2, "foo"] -> bar[1][2]["foo"] + + Arguments: + + container (str): + + A word to use for the thing being indexed + + indices (sequence): + + The indices to format. + + """ + if not indices: + return container + return f"{container}[{']['.join(repr(index) for index in indices)}]" + + +def find_additional_properties(instance, schema): + """ + Return the set of additional properties for the given ``instance``. + + Weeds out properties that should have been validated by ``properties`` and + / or ``patternProperties``. + + Assumes ``instance`` is dict-like already. + """ + properties = schema.get("properties", {}) + patterns = "|".join(schema.get("patternProperties", {})) + for property in instance: + if property not in properties: + if patterns and re.search(patterns, property): + continue + yield property + + +def extras_msg(extras): + """ + Create an error message for extra items or properties. + """ + verb = "was" if len(extras) == 1 else "were" + return ", ".join(repr(extra) for extra in extras), verb + + +def ensure_list(thing): + """ + Wrap ``thing`` in a list if it's a single str. + + Otherwise, return it unchanged. + """ + if isinstance(thing, str): + return [thing] + return thing + + +def _mapping_equal(one, two): + """ + Check if two mappings are equal using the semantics of `equal`. + """ + if len(one) != len(two): + return False + return all( + key in two and equal(value, two[key]) + for key, value in one.items() + ) + + +def _sequence_equal(one, two): + """ + Check if two sequences are equal using the semantics of `equal`. + """ + if len(one) != len(two): + return False + return all(equal(i, j) for i, j in zip(one, two)) + + +def equal(one, two): + """ + Check if two things are equal evading some Python type hierarchy semantics. + + Specifically in JSON Schema, evade `bool` inheriting from `int`, + recursing into sequences to do the same. + """ + if one is two: + return True + if isinstance(one, str) or isinstance(two, str): + return one == two + if isinstance(one, Sequence) and isinstance(two, Sequence): + return _sequence_equal(one, two) + if isinstance(one, Mapping) and isinstance(two, Mapping): + return _mapping_equal(one, two) + return unbool(one) == unbool(two) + + +def unbool(element, true=object(), false=object()): + """ + A hack to make True and 1 and False and 0 unique for ``uniq``. + """ + if element is True: + return true + elif element is False: + return false + return element + + +def uniq(container): + """ + Check if all of a container's elements are unique. + + Tries to rely on the container being recursively sortable, or otherwise + falls back on (slow) brute force. + """ + try: + sort = sorted(unbool(i) for i in container) + sliced = itertools.islice(sort, 1, None) + + for i, j in zip(sort, sliced): + if equal(i, j): + return False + + except (NotImplementedError, TypeError): + seen = [] + for e in container: + e = unbool(e) + + for i in seen: + if equal(i, e): + return False + + seen.append(e) + return True + + +def find_evaluated_item_indexes_by_schema(validator, instance, schema): + """ + Get all indexes of items that get evaluated under the current schema. + + Covers all keywords related to unevaluatedItems: items, prefixItems, if, + then, else, contains, unevaluatedItems, allOf, oneOf, anyOf + """ + if validator.is_type(schema, "boolean"): + return [] + evaluated_indexes = [] + + if "items" in schema: + return list(range(len(instance))) + + ref = schema.get("$ref") + if ref is not None: + resolved = validator._resolver.lookup(ref) + evaluated_indexes.extend( + find_evaluated_item_indexes_by_schema( + validator.evolve( + schema=resolved.contents, + _resolver=resolved.resolver, + ), + instance, + resolved.contents, + ), + ) + + dynamicRef = schema.get("$dynamicRef") + if dynamicRef is not None: + resolved = validator._resolver.lookup(dynamicRef) + evaluated_indexes.extend( + find_evaluated_item_indexes_by_schema( + validator.evolve( + schema=resolved.contents, + _resolver=resolved.resolver, + ), + instance, + resolved.contents, + ), + ) + + if "prefixItems" in schema: + evaluated_indexes += list(range(len(schema["prefixItems"]))) + + if "if" in schema: + if validator.evolve(schema=schema["if"]).is_valid(instance): + evaluated_indexes += find_evaluated_item_indexes_by_schema( + validator, instance, schema["if"], + ) + if "then" in schema: + evaluated_indexes += find_evaluated_item_indexes_by_schema( + validator, instance, schema["then"], + ) + elif "else" in schema: + evaluated_indexes += find_evaluated_item_indexes_by_schema( + validator, instance, schema["else"], + ) + + for keyword in ["contains", "unevaluatedItems"]: + if keyword in schema: + for k, v in enumerate(instance): + if validator.evolve(schema=schema[keyword]).is_valid(v): + evaluated_indexes.append(k) + + for keyword in ["allOf", "oneOf", "anyOf"]: + if keyword in schema: + for subschema in schema[keyword]: + errs = next(validator.descend(instance, subschema), None) + if errs is None: + evaluated_indexes += find_evaluated_item_indexes_by_schema( + validator, instance, subschema, + ) + + return evaluated_indexes + + +def find_evaluated_property_keys_by_schema(validator, instance, schema): + """ + Get all keys of items that get evaluated under the current schema. + + Covers all keywords related to unevaluatedProperties: properties, + additionalProperties, unevaluatedProperties, patternProperties, + dependentSchemas, allOf, oneOf, anyOf, if, then, else + """ + if validator.is_type(schema, "boolean"): + return [] + evaluated_keys = [] + + ref = schema.get("$ref") + if ref is not None: + resolved = validator._resolver.lookup(ref) + evaluated_keys.extend( + find_evaluated_property_keys_by_schema( + validator.evolve( + schema=resolved.contents, + _resolver=resolved.resolver, + ), + instance, + resolved.contents, + ), + ) + + dynamicRef = schema.get("$dynamicRef") + if dynamicRef is not None: + resolved = validator._resolver.lookup(dynamicRef) + evaluated_keys.extend( + find_evaluated_property_keys_by_schema( + validator.evolve( + schema=resolved.contents, + _resolver=resolved.resolver, + ), + instance, + resolved.contents, + ), + ) + + for keyword in [ + "properties", "additionalProperties", "unevaluatedProperties", + ]: + if keyword in schema: + schema_value = schema[keyword] + if validator.is_type(schema_value, "boolean") and schema_value: + evaluated_keys += instance.keys() + + elif validator.is_type(schema_value, "object"): + for property in schema_value: + if property in instance: + evaluated_keys.append(property) + + if "patternProperties" in schema: + for property in instance: + for pattern in schema["patternProperties"]: + if re.search(pattern, property): + evaluated_keys.append(property) + + if "dependentSchemas" in schema: + for property, subschema in schema["dependentSchemas"].items(): + if property not in instance: + continue + evaluated_keys += find_evaluated_property_keys_by_schema( + validator, instance, subschema, + ) + + for keyword in ["allOf", "oneOf", "anyOf"]: + if keyword in schema: + for subschema in schema[keyword]: + errs = next(validator.descend(instance, subschema), None) + if errs is None: + evaluated_keys += find_evaluated_property_keys_by_schema( + validator, instance, subschema, + ) + + if "if" in schema: + if validator.evolve(schema=schema["if"]).is_valid(instance): + evaluated_keys += find_evaluated_property_keys_by_schema( + validator, instance, schema["if"], + ) + if "then" in schema: + evaluated_keys += find_evaluated_property_keys_by_schema( + validator, instance, schema["then"], + ) + elif "else" in schema: + evaluated_keys += find_evaluated_property_keys_by_schema( + validator, instance, schema["else"], + ) + + return evaluated_keys diff --git a/.venv/lib/python3.12/site-packages/jsonschema/benchmarks/__init__.py b/.venv/lib/python3.12/site-packages/jsonschema/benchmarks/__init__.py new file mode 100644 index 00000000..e3dcc689 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/jsonschema/benchmarks/__init__.py @@ -0,0 +1,5 @@ +""" +Benchmarks for validation. + +This package is *not* public API. +""" diff --git a/.venv/lib/python3.12/site-packages/jsonschema/benchmarks/const_vs_enum.py b/.venv/lib/python3.12/site-packages/jsonschema/benchmarks/const_vs_enum.py new file mode 100644 index 00000000..c6fecd10 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/jsonschema/benchmarks/const_vs_enum.py @@ -0,0 +1,30 @@ +""" +A benchmark for comparing equivalent validation of `const` and `enum`. +""" + +from pyperf import Runner + +from jsonschema import Draft202012Validator + +value = [37] * 100 +const_schema = {"const": list(value)} +enum_schema = {"enum": [list(value)]} + +valid = list(value) +invalid = [*valid, 73] + +const = Draft202012Validator(const_schema) +enum = Draft202012Validator(enum_schema) + +assert const.is_valid(valid) +assert enum.is_valid(valid) +assert not const.is_valid(invalid) +assert not enum.is_valid(invalid) + + +if __name__ == "__main__": + runner = Runner() + runner.bench_func("const valid", lambda: const.is_valid(valid)) + runner.bench_func("const invalid", lambda: const.is_valid(invalid)) + runner.bench_func("enum valid", lambda: enum.is_valid(valid)) + runner.bench_func("enum invalid", lambda: enum.is_valid(invalid)) diff --git a/.venv/lib/python3.12/site-packages/jsonschema/benchmarks/contains.py b/.venv/lib/python3.12/site-packages/jsonschema/benchmarks/contains.py new file mode 100644 index 00000000..739cd044 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/jsonschema/benchmarks/contains.py @@ -0,0 +1,28 @@ +""" +A benchmark for validation of the `contains` keyword. +""" + +from pyperf import Runner + +from jsonschema import Draft202012Validator + +schema = { + "type": "array", + "contains": {"const": 37}, +} +validator = Draft202012Validator(schema) + +size = 1000 +beginning = [37] + [0] * (size - 1) +middle = [0] * (size // 2) + [37] + [0] * (size // 2) +end = [0] * (size - 1) + [37] +invalid = [0] * size + + +if __name__ == "__main__": + runner = Runner() + runner.bench_func("baseline", lambda: validator.is_valid([])) + runner.bench_func("beginning", lambda: validator.is_valid(beginning)) + runner.bench_func("middle", lambda: validator.is_valid(middle)) + runner.bench_func("end", lambda: validator.is_valid(end)) + runner.bench_func("invalid", lambda: validator.is_valid(invalid)) diff --git a/.venv/lib/python3.12/site-packages/jsonschema/benchmarks/issue232.py b/.venv/lib/python3.12/site-packages/jsonschema/benchmarks/issue232.py new file mode 100644 index 00000000..efd07154 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/jsonschema/benchmarks/issue232.py @@ -0,0 +1,25 @@ +""" +A performance benchmark using the example from issue #232. + +See https://github.com/python-jsonschema/jsonschema/pull/232. +""" +from pathlib import Path + +from pyperf import Runner +from referencing import Registry + +from jsonschema.tests._suite import Version +import jsonschema + +issue232 = Version( + path=Path(__file__).parent / "issue232", + remotes=Registry(), + name="issue232", +) + + +if __name__ == "__main__": + issue232.benchmark( + runner=Runner(), + Validator=jsonschema.Draft4Validator, + ) diff --git a/.venv/lib/python3.12/site-packages/jsonschema/benchmarks/issue232/issue.json b/.venv/lib/python3.12/site-packages/jsonschema/benchmarks/issue232/issue.json new file mode 100644 index 00000000..804c3408 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/jsonschema/benchmarks/issue232/issue.json @@ -0,0 +1,2653 @@ +[ + { + "description": "Petstore", + "schema": { + "title": "A JSON Schema for Swagger 2.0 API.", + "id": "http://swagger.io/v2/schema.json#", + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "required": [ + "swagger", + "info", + "paths" + ], + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "swagger": { + "type": "string", + "enum": [ + "2.0" + ], + "description": "The Swagger version of this document." + }, + "info": { + "$ref": "#/definitions/info" + }, + "host": { + "type": "string", + "pattern": "^[^{}/ :\\\\]+(?::\\d+)?$", + "description": "The host (name or ip) of the API. Example: 'swagger.io'" + }, + "basePath": { + "type": "string", + "pattern": "^/", + "description": "The base path to the API. Example: '/api'." + }, + "schemes": { + "$ref": "#/definitions/schemesList" + }, + "consumes": { + "description": "A list of MIME types accepted by the API.", + "allOf": [ + { + "$ref": "#/definitions/mediaTypeList" + } + ] + }, + "produces": { + "description": "A list of MIME types the API can produce.", + "allOf": [ + { + "$ref": "#/definitions/mediaTypeList" + } + ] + }, + "paths": { + "$ref": "#/definitions/paths" + }, + "definitions": { + "$ref": "#/definitions/definitions" + }, + "parameters": { + "$ref": "#/definitions/parameterDefinitions" + }, + "responses": { + "$ref": "#/definitions/responseDefinitions" + }, + "security": { + "$ref": "#/definitions/security" + }, + "securityDefinitions": { + "$ref": "#/definitions/securityDefinitions" + }, + "tags": { + "type": "array", + "items": { + "$ref": "#/definitions/tag" + }, + "uniqueItems": true + }, + "externalDocs": { + "$ref": "#/definitions/externalDocs" + } + }, + "definitions": { + "info": { + "type": "object", + "description": "General information about the API.", + "required": [ + "version", + "title" + ], + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "title": { + "type": "string", + "description": "A unique and precise title of the API." + }, + "version": { + "type": "string", + "description": "A semantic version number of the API." + }, + "description": { + "type": "string", + "description": "A longer description of the API. Should be different from the title. GitHub Flavored Markdown is allowed." + }, + "termsOfService": { + "type": "string", + "description": "The terms of service for the API." + }, + "contact": { + "$ref": "#/definitions/contact" + }, + "license": { + "$ref": "#/definitions/license" + } + } + }, + "contact": { + "type": "object", + "description": "Contact information for the owners of the API.", + "additionalProperties": false, + "properties": { + "name": { + "type": "string", + "description": "The identifying name of the contact person/organization." + }, + "url": { + "type": "string", + "description": "The URL pointing to the contact information.", + "format": "uri" + }, + "email": { + "type": "string", + "description": "The email address of the contact person/organization.", + "format": "email" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "license": { + "type": "object", + "required": [ + "name" + ], + "additionalProperties": false, + "properties": { + "name": { + "type": "string", + "description": "The name of the license type. It's encouraged to use an OSI compatible license." + }, + "url": { + "type": "string", + "description": "The URL pointing to the license.", + "format": "uri" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "paths": { + "type": "object", + "description": "Relative paths to the individual endpoints. They must be relative to the 'basePath'.", + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + }, + "^/": { + "$ref": "#/definitions/pathItem" + } + }, + "additionalProperties": false + }, + "definitions": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/schema" + }, + "description": "One or more JSON objects describing the schemas being consumed and produced by the API." + }, + "parameterDefinitions": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/parameter" + }, + "description": "One or more JSON representations for parameters" + }, + "responseDefinitions": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/response" + }, + "description": "One or more JSON representations for parameters" + }, + "externalDocs": { + "type": "object", + "additionalProperties": false, + "description": "information about external documentation", + "required": [ + "url" + ], + "properties": { + "description": { + "type": "string" + }, + "url": { + "type": "string", + "format": "uri" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "examples": { + "type": "object", + "additionalProperties": true + }, + "mimeType": { + "type": "string", + "description": "The MIME type of the HTTP message." + }, + "operation": { + "type": "object", + "required": [ + "responses" + ], + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "tags": { + "type": "array", + "items": { + "type": "string" + }, + "uniqueItems": true + }, + "summary": { + "type": "string", + "description": "A brief summary of the operation." + }, + "description": { + "type": "string", + "description": "A longer description of the operation, GitHub Flavored Markdown is allowed." + }, + "externalDocs": { + "$ref": "#/definitions/externalDocs" + }, + "operationId": { + "type": "string", + "description": "A unique identifier of the operation." + }, + "produces": { + "description": "A list of MIME types the API can produce.", + "allOf": [ + { + "$ref": "#/definitions/mediaTypeList" + } + ] + }, + "consumes": { + "description": "A list of MIME types the API can consume.", + "allOf": [ + { + "$ref": "#/definitions/mediaTypeList" + } + ] + }, + "parameters": { + "$ref": "#/definitions/parametersList" + }, + "responses": { + "$ref": "#/definitions/responses" + }, + "schemes": { + "$ref": "#/definitions/schemesList" + }, + "deprecated": { + "type": "boolean", + "default": false + }, + "security": { + "$ref": "#/definitions/security" + } + } + }, + "pathItem": { + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "$ref": { + "type": "string" + }, + "get": { + "$ref": "#/definitions/operation" + }, + "put": { + "$ref": "#/definitions/operation" + }, + "post": { + "$ref": "#/definitions/operation" + }, + "delete": { + "$ref": "#/definitions/operation" + }, + "options": { + "$ref": "#/definitions/operation" + }, + "head": { + "$ref": "#/definitions/operation" + }, + "patch": { + "$ref": "#/definitions/operation" + }, + "parameters": { + "$ref": "#/definitions/parametersList" + } + } + }, + "responses": { + "type": "object", + "description": "Response objects names can either be any valid HTTP status code or 'default'.", + "minProperties": 1, + "additionalProperties": false, + "patternProperties": { + "^([0-9]{3})$|^(default)$": { + "$ref": "#/definitions/responseValue" + }, + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "not": { + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + } + }, + "responseValue": { + "oneOf": [ + { + "$ref": "#/definitions/response" + }, + { + "$ref": "#/definitions/jsonReference" + } + ] + }, + "response": { + "type": "object", + "required": [ + "description" + ], + "properties": { + "description": { + "type": "string" + }, + "schema": { + "oneOf": [ + { + "$ref": "#/definitions/schema" + }, + { + "$ref": "#/definitions/fileSchema" + } + ] + }, + "headers": { + "$ref": "#/definitions/headers" + }, + "examples": { + "$ref": "#/definitions/examples" + } + }, + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "headers": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/header" + } + }, + "header": { + "type": "object", + "additionalProperties": false, + "required": [ + "type" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "string", + "number", + "integer", + "boolean", + "array" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormat" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "vendorExtension": { + "description": "Any property starting with x- is valid.", + "additionalProperties": true, + "additionalItems": true + }, + "bodyParameter": { + "type": "object", + "required": [ + "name", + "in", + "schema" + ], + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "description": { + "type": "string", + "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed." + }, + "name": { + "type": "string", + "description": "The name of the parameter." + }, + "in": { + "type": "string", + "description": "Determines the location of the parameter.", + "enum": [ + "body" + ] + }, + "required": { + "type": "boolean", + "description": "Determines whether or not this parameter is required or optional.", + "default": false + }, + "schema": { + "$ref": "#/definitions/schema" + } + }, + "additionalProperties": false + }, + "headerParameterSubSchema": { + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "required": { + "type": "boolean", + "description": "Determines whether or not this parameter is required or optional.", + "default": false + }, + "in": { + "type": "string", + "description": "Determines the location of the parameter.", + "enum": [ + "header" + ] + }, + "description": { + "type": "string", + "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed." + }, + "name": { + "type": "string", + "description": "The name of the parameter." + }, + "type": { + "type": "string", + "enum": [ + "string", + "number", + "boolean", + "integer", + "array" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormat" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + } + } + }, + "queryParameterSubSchema": { + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "required": { + "type": "boolean", + "description": "Determines whether or not this parameter is required or optional.", + "default": false + }, + "in": { + "type": "string", + "description": "Determines the location of the parameter.", + "enum": [ + "query" + ] + }, + "description": { + "type": "string", + "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed." + }, + "name": { + "type": "string", + "description": "The name of the parameter." + }, + "allowEmptyValue": { + "type": "boolean", + "default": false, + "description": "allows sending a parameter by name only or with an empty value." + }, + "type": { + "type": "string", + "enum": [ + "string", + "number", + "boolean", + "integer", + "array" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormatWithMulti" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + } + } + }, + "formDataParameterSubSchema": { + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "required": { + "type": "boolean", + "description": "Determines whether or not this parameter is required or optional.", + "default": false + }, + "in": { + "type": "string", + "description": "Determines the location of the parameter.", + "enum": [ + "formData" + ] + }, + "description": { + "type": "string", + "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed." + }, + "name": { + "type": "string", + "description": "The name of the parameter." + }, + "allowEmptyValue": { + "type": "boolean", + "default": false, + "description": "allows sending a parameter by name only or with an empty value." + }, + "type": { + "type": "string", + "enum": [ + "string", + "number", + "boolean", + "integer", + "array", + "file" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormatWithMulti" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + } + } + }, + "pathParameterSubSchema": { + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "required": [ + "required" + ], + "properties": { + "required": { + "type": "boolean", + "enum": [ + true + ], + "description": "Determines whether or not this parameter is required or optional." + }, + "in": { + "type": "string", + "description": "Determines the location of the parameter.", + "enum": [ + "path" + ] + }, + "description": { + "type": "string", + "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed." + }, + "name": { + "type": "string", + "description": "The name of the parameter." + }, + "type": { + "type": "string", + "enum": [ + "string", + "number", + "boolean", + "integer", + "array" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormat" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + } + } + }, + "nonBodyParameter": { + "type": "object", + "required": [ + "name", + "in", + "type" + ], + "oneOf": [ + { + "$ref": "#/definitions/headerParameterSubSchema" + }, + { + "$ref": "#/definitions/formDataParameterSubSchema" + }, + { + "$ref": "#/definitions/queryParameterSubSchema" + }, + { + "$ref": "#/definitions/pathParameterSubSchema" + } + ] + }, + "parameter": { + "oneOf": [ + { + "$ref": "#/definitions/bodyParameter" + }, + { + "$ref": "#/definitions/nonBodyParameter" + } + ] + }, + "schema": { + "type": "object", + "description": "A deterministic version of a JSON Schema object.", + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "$ref": { + "type": "string" + }, + "format": { + "type": "string" + }, + "title": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/title" + }, + "description": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/description" + }, + "default": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/default" + }, + "multipleOf": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/multipleOf" + }, + "maximum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/maximum" + }, + "exclusiveMaximum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMaximum" + }, + "minimum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/minimum" + }, + "exclusiveMinimum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMinimum" + }, + "maxLength": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger" + }, + "minLength": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0" + }, + "pattern": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/pattern" + }, + "maxItems": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger" + }, + "minItems": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0" + }, + "uniqueItems": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/uniqueItems" + }, + "maxProperties": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger" + }, + "minProperties": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0" + }, + "required": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/stringArray" + }, + "enum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/enum" + }, + "additionalProperties": { + "anyOf": [ + { + "$ref": "#/definitions/schema" + }, + { + "type": "boolean" + } + ], + "default": {} + }, + "type": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/type" + }, + "items": { + "anyOf": [ + { + "$ref": "#/definitions/schema" + }, + { + "type": "array", + "minItems": 1, + "items": { + "$ref": "#/definitions/schema" + } + } + ], + "default": {} + }, + "allOf": { + "type": "array", + "minItems": 1, + "items": { + "$ref": "#/definitions/schema" + } + }, + "properties": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/schema" + }, + "default": {} + }, + "discriminator": { + "type": "string" + }, + "readOnly": { + "type": "boolean", + "default": false + }, + "xml": { + "$ref": "#/definitions/xml" + }, + "externalDocs": { + "$ref": "#/definitions/externalDocs" + }, + "example": {} + }, + "additionalProperties": false + }, + "fileSchema": { + "type": "object", + "description": "A deterministic version of a JSON Schema object.", + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "required": [ + "type" + ], + "properties": { + "format": { + "type": "string" + }, + "title": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/title" + }, + "description": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/description" + }, + "default": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/default" + }, + "required": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/stringArray" + }, + "type": { + "type": "string", + "enum": [ + "file" + ] + }, + "readOnly": { + "type": "boolean", + "default": false + }, + "externalDocs": { + "$ref": "#/definitions/externalDocs" + }, + "example": {} + }, + "additionalProperties": false + }, + "primitivesItems": { + "type": "object", + "additionalProperties": false, + "properties": { + "type": { + "type": "string", + "enum": [ + "string", + "number", + "integer", + "boolean", + "array" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormat" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "security": { + "type": "array", + "items": { + "$ref": "#/definitions/securityRequirement" + }, + "uniqueItems": true + }, + "securityRequirement": { + "type": "object", + "additionalProperties": { + "type": "array", + "items": { + "type": "string" + }, + "uniqueItems": true + } + }, + "xml": { + "type": "object", + "additionalProperties": false, + "properties": { + "name": { + "type": "string" + }, + "namespace": { + "type": "string" + }, + "prefix": { + "type": "string" + }, + "attribute": { + "type": "boolean", + "default": false + }, + "wrapped": { + "type": "boolean", + "default": false + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "tag": { + "type": "object", + "additionalProperties": false, + "required": [ + "name" + ], + "properties": { + "name": { + "type": "string" + }, + "description": { + "type": "string" + }, + "externalDocs": { + "$ref": "#/definitions/externalDocs" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "securityDefinitions": { + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "$ref": "#/definitions/basicAuthenticationSecurity" + }, + { + "$ref": "#/definitions/apiKeySecurity" + }, + { + "$ref": "#/definitions/oauth2ImplicitSecurity" + }, + { + "$ref": "#/definitions/oauth2PasswordSecurity" + }, + { + "$ref": "#/definitions/oauth2ApplicationSecurity" + }, + { + "$ref": "#/definitions/oauth2AccessCodeSecurity" + } + ] + } + }, + "basicAuthenticationSecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "basic" + ] + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "apiKeySecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type", + "name", + "in" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "apiKey" + ] + }, + "name": { + "type": "string" + }, + "in": { + "type": "string", + "enum": [ + "header", + "query" + ] + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "oauth2ImplicitSecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type", + "flow", + "authorizationUrl" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "oauth2" + ] + }, + "flow": { + "type": "string", + "enum": [ + "implicit" + ] + }, + "scopes": { + "$ref": "#/definitions/oauth2Scopes" + }, + "authorizationUrl": { + "type": "string", + "format": "uri" + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "oauth2PasswordSecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type", + "flow", + "tokenUrl" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "oauth2" + ] + }, + "flow": { + "type": "string", + "enum": [ + "password" + ] + }, + "scopes": { + "$ref": "#/definitions/oauth2Scopes" + }, + "tokenUrl": { + "type": "string", + "format": "uri" + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "oauth2ApplicationSecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type", + "flow", + "tokenUrl" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "oauth2" + ] + }, + "flow": { + "type": "string", + "enum": [ + "application" + ] + }, + "scopes": { + "$ref": "#/definitions/oauth2Scopes" + }, + "tokenUrl": { + "type": "string", + "format": "uri" + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "oauth2AccessCodeSecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type", + "flow", + "authorizationUrl", + "tokenUrl" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "oauth2" + ] + }, + "flow": { + "type": "string", + "enum": [ + "accessCode" + ] + }, + "scopes": { + "$ref": "#/definitions/oauth2Scopes" + }, + "authorizationUrl": { + "type": "string", + "format": "uri" + }, + "tokenUrl": { + "type": "string", + "format": "uri" + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "oauth2Scopes": { + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "mediaTypeList": { + "type": "array", + "items": { + "$ref": "#/definitions/mimeType" + }, + "uniqueItems": true + }, + "parametersList": { + "type": "array", + "description": "The parameters needed to send a valid API call.", + "additionalItems": false, + "items": { + "oneOf": [ + { + "$ref": "#/definitions/parameter" + }, + { + "$ref": "#/definitions/jsonReference" + } + ] + }, + "uniqueItems": true + }, + "schemesList": { + "type": "array", + "description": "The transfer protocol of the API.", + "items": { + "type": "string", + "enum": [ + "http", + "https", + "ws", + "wss" + ] + }, + "uniqueItems": true + }, + "collectionFormat": { + "type": "string", + "enum": [ + "csv", + "ssv", + "tsv", + "pipes" + ], + "default": "csv" + }, + "collectionFormatWithMulti": { + "type": "string", + "enum": [ + "csv", + "ssv", + "tsv", + "pipes", + "multi" + ], + "default": "csv" + }, + "title": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/title" + }, + "description": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/description" + }, + "default": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/default" + }, + "multipleOf": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/multipleOf" + }, + "maximum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/maximum" + }, + "exclusiveMaximum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMaximum" + }, + "minimum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/minimum" + }, + "exclusiveMinimum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMinimum" + }, + "maxLength": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger" + }, + "minLength": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0" + }, + "pattern": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/pattern" + }, + "maxItems": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger" + }, + "minItems": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0" + }, + "uniqueItems": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/uniqueItems" + }, + "enum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/enum" + }, + "jsonReference": { + "type": "object", + "required": [ + "$ref" + ], + "additionalProperties": false, + "properties": { + "$ref": { + "type": "string" + } + } + } + } + }, + "tests": [ + { + "description": "Example petsore", + "data": { + "swagger": "2.0", + "info": { + "description": "This is a sample server Petstore server. You can find out more about Swagger at [http://swagger.io](http://swagger.io) or on [irc.freenode.net, #swagger](http://swagger.io/irc/). For this sample, you can use the api key `special-key` to test the authorization filters.", + "version": "1.0.0", + "title": "Swagger Petstore", + "termsOfService": "http://swagger.io/terms/", + "contact": { + "email": "apiteam@swagger.io" + }, + "license": { + "name": "Apache 2.0", + "url": "http://www.apache.org/licenses/LICENSE-2.0.html" + } + }, + "host": "petstore.swagger.io", + "basePath": "/v2", + "tags": [ + { + "name": "pet", + "description": "Everything about your Pets", + "externalDocs": { + "description": "Find out more", + "url": "http://swagger.io" + } + }, + { + "name": "store", + "description": "Access to Petstore orders" + }, + { + "name": "user", + "description": "Operations about user", + "externalDocs": { + "description": "Find out more about our store", + "url": "http://swagger.io" + } + } + ], + "schemes": [ + "http" + ], + "paths": { + "/pet": { + "post": { + "tags": [ + "pet" + ], + "summary": "Add a new pet to the store", + "description": "", + "operationId": "addPet", + "consumes": [ + "application/json", + "application/xml" + ], + "produces": [ + "application/xml", + "application/json" + ], + "parameters": [ + { + "in": "body", + "name": "body", + "description": "Pet object that needs to be added to the store", + "required": true, + "schema": { + "$ref": "#/definitions/Pet" + } + } + ], + "responses": { + "405": { + "description": "Invalid input" + } + }, + "security": [ + { + "petstore_auth": [ + "write:pets", + "read:pets" + ] + } + ] + }, + "put": { + "tags": [ + "pet" + ], + "summary": "Update an existing pet", + "description": "", + "operationId": "updatePet", + "consumes": [ + "application/json", + "application/xml" + ], + "produces": [ + "application/xml", + "application/json" + ], + "parameters": [ + { + "in": "body", + "name": "body", + "description": "Pet object that needs to be added to the store", + "required": true, + "schema": { + "$ref": "#/definitions/Pet" + } + } + ], + "responses": { + "400": { + "description": "Invalid ID supplied" + }, + "404": { + "description": "Pet not found" + }, + "405": { + "description": "Validation exception" + } + }, + "security": [ + { + "petstore_auth": [ + "write:pets", + "read:pets" + ] + } + ] + } + }, + "/pet/findByStatus": { + "get": { + "tags": [ + "pet" + ], + "summary": "Finds Pets by status", + "description": "Multiple status values can be provided with comma separated strings", + "operationId": "findPetsByStatus", + "produces": [ + "application/xml", + "application/json" + ], + "parameters": [ + { + "name": "status", + "in": "query", + "description": "Status values that need to be considered for filter", + "required": true, + "type": "array", + "items": { + "type": "string", + "enum": [ + "available", + "pending", + "sold" + ], + "default": "available" + }, + "collectionFormat": "multi" + } + ], + "responses": { + "200": { + "description": "successful operation", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/Pet" + } + } + }, + "400": { + "description": "Invalid status value" + } + }, + "security": [ + { + "petstore_auth": [ + "write:pets", + "read:pets" + ] + } + ] + } + }, + "/pet/findByTags": { + "get": { + "tags": [ + "pet" + ], + "summary": "Finds Pets by tags", + "description": "Muliple tags can be provided with comma separated strings. Use tag1, tag2, tag3 for testing.", + "operationId": "findPetsByTags", + "produces": [ + "application/xml", + "application/json" + ], + "parameters": [ + { + "name": "tags", + "in": "query", + "description": "Tags to filter by", + "required": true, + "type": "array", + "items": { + "type": "string" + }, + "collectionFormat": "multi" + } + ], + "responses": { + "200": { + "description": "successful operation", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/Pet" + } + } + }, + "400": { + "description": "Invalid tag value" + } + }, + "security": [ + { + "petstore_auth": [ + "write:pets", + "read:pets" + ] + } + ], + "deprecated": true + } + }, + "/pet/{petId}": { + "get": { + "tags": [ + "pet" + ], + "summary": "Find pet by ID", + "description": "Returns a single pet", + "operationId": "getPetById", + "produces": [ + "application/xml", + "application/json" + ], + "parameters": [ + { + "name": "petId", + "in": "path", + "description": "ID of pet to return", + "required": true, + "type": "integer", + "format": "int64" + } + ], + "responses": { + "200": { + "description": "successful operation", + "schema": { + "$ref": "#/definitions/Pet" + } + }, + "400": { + "description": "Invalid ID supplied" + }, + "404": { + "description": "Pet not found" + } + }, + "security": [ + { + "api_key": [] + } + ] + }, + "post": { + "tags": [ + "pet" + ], + "summary": "Updates a pet in the store with form data", + "description": "", + "operationId": "updatePetWithForm", + "consumes": [ + "application/x-www-form-urlencoded" + ], + "produces": [ + "application/xml", + "application/json" + ], + "parameters": [ + { + "name": "petId", + "in": "path", + "description": "ID of pet that needs to be updated", + "required": true, + "type": "integer", + "format": "int64" + }, + { + "name": "name", + "in": "formData", + "description": "Updated name of the pet", + "required": false, + "type": "string" + }, + { + "name": "status", + "in": "formData", + "description": "Updated status of the pet", + "required": false, + "type": "string" + } + ], + "responses": { + "405": { + "description": "Invalid input" + } + }, + "security": [ + { + "petstore_auth": [ + "write:pets", + "read:pets" + ] + } + ] + }, + "delete": { + "tags": [ + "pet" + ], + "summary": "Deletes a pet", + "description": "", + "operationId": "deletePet", + "produces": [ + "application/xml", + "application/json" + ], + "parameters": [ + { + "name": "api_key", + "in": "header", + "required": false, + "type": "string" + }, + { + "name": "petId", + "in": "path", + "description": "Pet id to delete", + "required": true, + "type": "integer", + "format": "int64" + } + ], + "responses": { + "400": { + "description": "Invalid ID supplied" + }, + "404": { + "description": "Pet not found" + } + }, + "security": [ + { + "petstore_auth": [ + "write:pets", + "read:pets" + ] + } + ] + } + }, + "/pet/{petId}/uploadImage": { + "post": { + "tags": [ + "pet" + ], + "summary": "uploads an image", + "description": "", + "operationId": "uploadFile", + "consumes": [ + "multipart/form-data" + ], + "produces": [ + "application/json" + ], + "parameters": [ + { + "name": "petId", + "in": "path", + "description": "ID of pet to update", + "required": true, + "type": "integer", + "format": "int64" + }, + { + "name": "additionalMetadata", + "in": "formData", + "description": "Additional data to pass to server", + "required": false, + "type": "string" + }, + { + "name": "file", + "in": "formData", + "description": "file to upload", + "required": false, + "type": "file" + } + ], + "responses": { + "200": { + "description": "successful operation", + "schema": { + "$ref": "#/definitions/ApiResponse" + } + } + }, + "security": [ + { + "petstore_auth": [ + "write:pets", + "read:pets" + ] + } + ] + } + }, + "/store/inventory": { + "get": { + "tags": [ + "store" + ], + "summary": "Returns pet inventories by status", + "description": "Returns a map of status codes to quantities", + "operationId": "getInventory", + "produces": [ + "application/json" + ], + "parameters": [], + "responses": { + "200": { + "description": "successful operation", + "schema": { + "type": "object", + "additionalProperties": { + "type": "integer", + "format": "int32" + } + } + } + }, + "security": [ + { + "api_key": [] + } + ] + } + }, + "/store/order": { + "post": { + "tags": [ + "store" + ], + "summary": "Place an order for a pet", + "description": "", + "operationId": "placeOrder", + "produces": [ + "application/xml", + "application/json" + ], + "parameters": [ + { + "in": "body", + "name": "body", + "description": "order placed for purchasing the pet", + "required": true, + "schema": { + "$ref": "#/definitions/Order" + } + } + ], + "responses": { + "200": { + "description": "successful operation", + "schema": { + "$ref": "#/definitions/Order" + } + }, + "400": { + "description": "Invalid Order" + } + } + } + }, + "/store/order/{orderId}": { + "get": { + "tags": [ + "store" + ], + "summary": "Find purchase order by ID", + "description": "For valid response try integer IDs with value >= 1 and <= 10. Other values will generated exceptions", + "operationId": "getOrderById", + "produces": [ + "application/xml", + "application/json" + ], + "parameters": [ + { + "name": "orderId", + "in": "path", + "description": "ID of pet that needs to be fetched", + "required": true, + "type": "integer", + "maximum": 10.0, + "minimum": 1.0, + "format": "int64" + } + ], + "responses": { + "200": { + "description": "successful operation", + "schema": { + "$ref": "#/definitions/Order" + } + }, + "400": { + "description": "Invalid ID supplied" + }, + "404": { + "description": "Order not found" + } + } + }, + "delete": { + "tags": [ + "store" + ], + "summary": "Delete purchase order by ID", + "description": "For valid response try integer IDs with positive integer value. Negative or non-integer values will generate API errors", + "operationId": "deleteOrder", + "produces": [ + "application/xml", + "application/json" + ], + "parameters": [ + { + "name": "orderId", + "in": "path", + "description": "ID of the order that needs to be deleted", + "required": true, + "type": "integer", + "minimum": 1.0, + "format": "int64" + } + ], + "responses": { + "400": { + "description": "Invalid ID supplied" + }, + "404": { + "description": "Order not found" + } + } + } + }, + "/user": { + "post": { + "tags": [ + "user" + ], + "summary": "Create user", + "description": "This can only be done by the logged in user.", + "operationId": "createUser", + "produces": [ + "application/xml", + "application/json" + ], + "parameters": [ + { + "in": "body", + "name": "body", + "description": "Created user object", + "required": true, + "schema": { + "$ref": "#/definitions/User" + } + } + ], + "responses": { + "default": { + "description": "successful operation" + } + } + } + }, + "/user/createWithArray": { + "post": { + "tags": [ + "user" + ], + "summary": "Creates list of users with given input array", + "description": "", + "operationId": "createUsersWithArrayInput", + "produces": [ + "application/xml", + "application/json" + ], + "parameters": [ + { + "in": "body", + "name": "body", + "description": "List of user object", + "required": true, + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/User" + } + } + } + ], + "responses": { + "default": { + "description": "successful operation" + } + } + } + }, + "/user/createWithList": { + "post": { + "tags": [ + "user" + ], + "summary": "Creates list of users with given input array", + "description": "", + "operationId": "createUsersWithListInput", + "produces": [ + "application/xml", + "application/json" + ], + "parameters": [ + { + "in": "body", + "name": "body", + "description": "List of user object", + "required": true, + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/User" + } + } + } + ], + "responses": { + "default": { + "description": "successful operation" + } + } + } + }, + "/user/login": { + "get": { + "tags": [ + "user" + ], + "summary": "Logs user into the system", + "description": "", + "operationId": "loginUser", + "produces": [ + "application/xml", + "application/json" + ], + "parameters": [ + { + "name": "username", + "in": "query", + "description": "The user name for login", + "required": true, + "type": "string" + }, + { + "name": "password", + "in": "query", + "description": "The password for login in clear text", + "required": true, + "type": "string" + } + ], + "responses": { + "200": { + "description": "successful operation", + "schema": { + "type": "string" + }, + "headers": { + "X-Rate-Limit": { + "type": "integer", + "format": "int32", + "description": "calls per hour allowed by the user" + }, + "X-Expires-After": { + "type": "string", + "format": "date-time", + "description": "date in UTC when token expires" + } + } + }, + "400": { + "description": "Invalid username/password supplied" + } + } + } + }, + "/user/logout": { + "get": { + "tags": [ + "user" + ], + "summary": "Logs out current logged in user session", + "description": "", + "operationId": "logoutUser", + "produces": [ + "application/xml", + "application/json" + ], + "parameters": [], + "responses": { + "default": { + "description": "successful operation" + } + } + } + }, + "/user/{username}": { + "get": { + "tags": [ + "user" + ], + "summary": "Get user by user name", + "description": "", + "operationId": "getUserByName", + "produces": [ + "application/xml", + "application/json" + ], + "parameters": [ + { + "name": "username", + "in": "path", + "description": "The name that needs to be fetched. Use user1 for testing. ", + "required": true, + "type": "string" + } + ], + "responses": { + "200": { + "description": "successful operation", + "schema": { + "$ref": "#/definitions/User" + } + }, + "400": { + "description": "Invalid username supplied" + }, + "404": { + "description": "User not found" + } + } + }, + "put": { + "tags": [ + "user" + ], + "summary": "Updated user", + "description": "This can only be done by the logged in user.", + "operationId": "updateUser", + "produces": [ + "application/xml", + "application/json" + ], + "parameters": [ + { + "name": "username", + "in": "path", + "description": "name that need to be updated", + "required": true, + "type": "string" + }, + { + "in": "body", + "name": "body", + "description": "Updated user object", + "required": true, + "schema": { + "$ref": "#/definitions/User" + } + } + ], + "responses": { + "400": { + "description": "Invalid user supplied" + }, + "404": { + "description": "User not found" + } + } + }, + "delete": { + "tags": [ + "user" + ], + "summary": "Delete user", + "description": "This can only be done by the logged in user.", + "operationId": "deleteUser", + "produces": [ + "application/xml", + "application/json" + ], + "parameters": [ + { + "name": "username", + "in": "path", + "description": "The name that needs to be deleted", + "required": true, + "type": "string" + } + ], + "responses": { + "400": { + "description": "Invalid username supplied" + }, + "404": { + "description": "User not found" + } + } + } + } + }, + "securityDefinitions": { + "petstore_auth": { + "type": "oauth2", + "authorizationUrl": "http://petstore.swagger.io/oauth/dialog", + "flow": "implicit", + "scopes": { + "write:pets": "modify pets in your account", + "read:pets": "read your pets" + } + }, + "api_key": { + "type": "apiKey", + "name": "api_key", + "in": "header" + } + }, + "definitions": { + "Order": { + "type": "object", + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "petId": { + "type": "integer", + "format": "int64" + }, + "quantity": { + "type": "integer", + "format": "int32" + }, + "shipDate": { + "type": "string", + "format": "date-time" + }, + "status": { + "type": "string", + "description": "Order Status", + "enum": [ + "placed", + "approved", + "delivered" + ] + }, + "complete": { + "type": "boolean", + "default": false + } + }, + "xml": { + "name": "Order" + } + }, + "Category": { + "type": "object", + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "name": { + "type": "string" + } + }, + "xml": { + "name": "Category" + } + }, + "User": { + "type": "object", + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "username": { + "type": "string" + }, + "firstName": { + "type": "string" + }, + "lastName": { + "type": "string" + }, + "email": { + "type": "string" + }, + "password": { + "type": "string" + }, + "phone": { + "type": "string" + }, + "userStatus": { + "type": "integer", + "format": "int32", + "description": "User Status" + } + }, + "xml": { + "name": "User" + } + }, + "Tag": { + "type": "object", + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "name": { + "type": "string" + } + }, + "xml": { + "name": "Tag" + } + }, + "Pet": { + "type": "object", + "required": [ + "name", + "photoUrls" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "category": { + "$ref": "#/definitions/Category" + }, + "name": { + "type": "string", + "example": "doggie" + }, + "photoUrls": { + "type": "array", + "xml": { + "name": "photoUrl", + "wrapped": true + }, + "items": { + "type": "string" + } + }, + "tags": { + "type": "array", + "xml": { + "name": "tag", + "wrapped": true + }, + "items": { + "$ref": "#/definitions/Tag" + } + }, + "status": { + "type": "string", + "description": "pet status in the store", + "enum": [ + "available", + "pending", + "sold" + ] + } + }, + "xml": { + "name": "Pet" + } + }, + "ApiResponse": { + "type": "object", + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "type": { + "type": "string" + }, + "message": { + "type": "string" + } + } + } + }, + "externalDocs": { + "description": "Find out more about Swagger", + "url": "http://swagger.io" + } + }, + "valid": true + } + ] + } +] diff --git a/.venv/lib/python3.12/site-packages/jsonschema/benchmarks/json_schema_test_suite.py b/.venv/lib/python3.12/site-packages/jsonschema/benchmarks/json_schema_test_suite.py new file mode 100644 index 00000000..905fb6a3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/jsonschema/benchmarks/json_schema_test_suite.py @@ -0,0 +1,12 @@ +""" +A performance benchmark using the official test suite. + +This benchmarks jsonschema using every valid example in the +JSON-Schema-Test-Suite. It will take some time to complete. +""" +from pyperf import Runner + +from jsonschema.tests._suite import Suite + +if __name__ == "__main__": + Suite().benchmark(runner=Runner()) diff --git a/.venv/lib/python3.12/site-packages/jsonschema/benchmarks/nested_schemas.py b/.venv/lib/python3.12/site-packages/jsonschema/benchmarks/nested_schemas.py new file mode 100644 index 00000000..b025c47c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/jsonschema/benchmarks/nested_schemas.py @@ -0,0 +1,56 @@ +""" +Validating highly nested schemas shouldn't cause exponential time blowups. + +See https://github.com/python-jsonschema/jsonschema/issues/1097. +""" +from itertools import cycle + +from jsonschema.validators import validator_for + +metaschemaish = { + "$id": "https://example.com/draft/2020-12/schema/strict", + "$schema": "https://json-schema.org/draft/2020-12/schema", + + "$vocabulary": { + "https://json-schema.org/draft/2020-12/vocab/core": True, + "https://json-schema.org/draft/2020-12/vocab/applicator": True, + "https://json-schema.org/draft/2020-12/vocab/unevaluated": True, + "https://json-schema.org/draft/2020-12/vocab/validation": True, + "https://json-schema.org/draft/2020-12/vocab/meta-data": True, + "https://json-schema.org/draft/2020-12/vocab/format-annotation": True, + "https://json-schema.org/draft/2020-12/vocab/content": True, + }, + "$dynamicAnchor": "meta", + + "$ref": "https://json-schema.org/draft/2020-12/schema", + "unevaluatedProperties": False, +} + + +def nested_schema(levels): + """ + Produce a schema which validates deeply nested objects and arrays. + """ + + names = cycle(["foo", "bar", "baz", "quux", "spam", "eggs"]) + schema = {"type": "object", "properties": {"ham": {"type": "string"}}} + for _, name in zip(range(levels - 1), names): + schema = {"type": "object", "properties": {name: schema}} + return schema + + +validator = validator_for(metaschemaish)(metaschemaish) + +if __name__ == "__main__": + from pyperf import Runner + runner = Runner() + + not_nested = nested_schema(levels=1) + runner.bench_func("not nested", lambda: validator.is_valid(not_nested)) + + for levels in range(1, 11, 3): + schema = nested_schema(levels=levels) + runner.bench_func( + f"nested * {levels}", + lambda schema=schema: validator.is_valid(schema), + ) diff --git a/.venv/lib/python3.12/site-packages/jsonschema/benchmarks/subcomponents.py b/.venv/lib/python3.12/site-packages/jsonschema/benchmarks/subcomponents.py new file mode 100644 index 00000000..6d78c7be --- /dev/null +++ b/.venv/lib/python3.12/site-packages/jsonschema/benchmarks/subcomponents.py @@ -0,0 +1,42 @@ +""" +A benchmark which tries to compare the possible slow subparts of validation. +""" +from referencing import Registry +from referencing.jsonschema import DRAFT202012 +from rpds import HashTrieMap, HashTrieSet + +from jsonschema import Draft202012Validator + +schema = { + "type": "array", + "minLength": 1, + "maxLength": 1, + "items": {"type": "integer"}, +} + +hmap = HashTrieMap() +hset = HashTrieSet() + +registry = Registry() + +v = Draft202012Validator(schema) + + +def registry_data_structures(): + return hmap.insert("foo", "bar"), hset.insert("foo") + + +def registry_add(): + resource = DRAFT202012.create_resource(schema) + return registry.with_resource(uri="urn:example", resource=resource) + + +if __name__ == "__main__": + from pyperf import Runner + runner = Runner() + + runner.bench_func("HashMap/HashSet insertion", registry_data_structures) + runner.bench_func("Registry insertion", registry_add) + runner.bench_func("Success", lambda: v.is_valid([1])) + runner.bench_func("Failure", lambda: v.is_valid(["foo"])) + runner.bench_func("Metaschema validation", lambda: v.check_schema(schema)) diff --git a/.venv/lib/python3.12/site-packages/jsonschema/benchmarks/unused_registry.py b/.venv/lib/python3.12/site-packages/jsonschema/benchmarks/unused_registry.py new file mode 100644 index 00000000..7b272c23 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/jsonschema/benchmarks/unused_registry.py @@ -0,0 +1,35 @@ +""" +An unused schema registry should not cause slower validation. + +"Unused" here means one where no reference resolution is occurring anyhow. + +See https://github.com/python-jsonschema/jsonschema/issues/1088. +""" +from pyperf import Runner +from referencing import Registry +from referencing.jsonschema import DRAFT201909 + +from jsonschema import Draft201909Validator + +registry = Registry().with_resource( + "urn:example:foo", + DRAFT201909.create_resource({}), +) + +schema = {"$ref": "https://json-schema.org/draft/2019-09/schema"} +instance = {"maxLength": 4} + +no_registry = Draft201909Validator(schema) +with_useless_registry = Draft201909Validator(schema, registry=registry) + +if __name__ == "__main__": + runner = Runner() + + runner.bench_func( + "no registry", + lambda: no_registry.is_valid(instance), + ) + runner.bench_func( + "useless registry", + lambda: with_useless_registry.is_valid(instance), + ) diff --git a/.venv/lib/python3.12/site-packages/jsonschema/benchmarks/useless_applicator_schemas.py b/.venv/lib/python3.12/site-packages/jsonschema/benchmarks/useless_applicator_schemas.py new file mode 100644 index 00000000..f3229c0b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/jsonschema/benchmarks/useless_applicator_schemas.py @@ -0,0 +1,106 @@ + +""" +A benchmark for validation of applicators containing lots of useless schemas. + +Signals a small possible optimization to remove all such schemas ahead of time. +""" + +from pyperf import Runner + +from jsonschema import Draft202012Validator as Validator + +NUM_USELESS = 100000 + +subschema = {"const": 37} + +valid = 37 +invalid = 12 + +baseline = Validator(subschema) + + +# These should be indistinguishable from just `subschema` +by_name = { + "single subschema": { + "anyOf": Validator({"anyOf": [subschema]}), + "allOf": Validator({"allOf": [subschema]}), + "oneOf": Validator({"oneOf": [subschema]}), + }, + "redundant subschemas": { + "anyOf": Validator({"anyOf": [subschema] * NUM_USELESS}), + "allOf": Validator({"allOf": [subschema] * NUM_USELESS}), + }, + "useless successful subschemas (beginning)": { + "anyOf": Validator({"anyOf": [subschema, *[True] * NUM_USELESS]}), + "allOf": Validator({"allOf": [subschema, *[True] * NUM_USELESS]}), + }, + "useless successful subschemas (middle)": { + "anyOf": Validator( + { + "anyOf": [ + *[True] * (NUM_USELESS // 2), + subschema, + *[True] * (NUM_USELESS // 2), + ], + }, + ), + "allOf": Validator( + { + "allOf": [ + *[True] * (NUM_USELESS // 2), + subschema, + *[True] * (NUM_USELESS // 2), + ], + }, + ), + }, + "useless successful subschemas (end)": { + "anyOf": Validator({"anyOf": [*[True] * NUM_USELESS, subschema]}), + "allOf": Validator({"allOf": [*[True] * NUM_USELESS, subschema]}), + }, + "useless failing subschemas (beginning)": { + "anyOf": Validator({"anyOf": [subschema, *[False] * NUM_USELESS]}), + "oneOf": Validator({"oneOf": [subschema, *[False] * NUM_USELESS]}), + }, + "useless failing subschemas (middle)": { + "anyOf": Validator( + { + "anyOf": [ + *[False] * (NUM_USELESS // 2), + subschema, + *[False] * (NUM_USELESS // 2), + ], + }, + ), + "oneOf": Validator( + { + "oneOf": [ + *[False] * (NUM_USELESS // 2), + subschema, + *[False] * (NUM_USELESS // 2), + ], + }, + ), + }, + "useless failing subschemas (end)": { + "anyOf": Validator({"anyOf": [*[False] * NUM_USELESS, subschema]}), + "oneOf": Validator({"oneOf": [*[False] * NUM_USELESS, subschema]}), + }, +} + +if __name__ == "__main__": + runner = Runner() + + runner.bench_func("baseline valid", lambda: baseline.is_valid(valid)) + runner.bench_func("baseline invalid", lambda: baseline.is_valid(invalid)) + + for group, applicators in by_name.items(): + for applicator, validator in applicators.items(): + runner.bench_func( + f"{group}: {applicator} valid", + lambda validator=validator: validator.is_valid(valid), + ) + runner.bench_func( + f"{group}: {applicator} invalid", + lambda validator=validator: validator.is_valid(invalid), + ) diff --git a/.venv/lib/python3.12/site-packages/jsonschema/benchmarks/useless_keywords.py b/.venv/lib/python3.12/site-packages/jsonschema/benchmarks/useless_keywords.py new file mode 100644 index 00000000..50f43598 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/jsonschema/benchmarks/useless_keywords.py @@ -0,0 +1,32 @@ +""" +A benchmark for validation of schemas containing lots of useless keywords. + +Checks we filter them out once, ahead of time. +""" + +from pyperf import Runner + +from jsonschema import Draft202012Validator + +NUM_USELESS = 100000 +schema = dict( + [ + ("not", {"const": 42}), + *((str(i), i) for i in range(NUM_USELESS)), + ("type", "integer"), + *((str(i), i) for i in range(NUM_USELESS, NUM_USELESS)), + ("minimum", 37), + ], +) +validator = Draft202012Validator(schema) + +valid = 3737 +invalid = 12 + + +if __name__ == "__main__": + runner = Runner() + runner.bench_func("beginning of schema", lambda: validator.is_valid(42)) + runner.bench_func("middle of schema", lambda: validator.is_valid("foo")) + runner.bench_func("end of schema", lambda: validator.is_valid(12)) + runner.bench_func("valid", lambda: validator.is_valid(3737)) diff --git a/.venv/lib/python3.12/site-packages/jsonschema/benchmarks/validator_creation.py b/.venv/lib/python3.12/site-packages/jsonschema/benchmarks/validator_creation.py new file mode 100644 index 00000000..4baeb3a3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/jsonschema/benchmarks/validator_creation.py @@ -0,0 +1,14 @@ +from pyperf import Runner + +from jsonschema import Draft202012Validator + +schema = { + "type": "array", + "minLength": 1, + "maxLength": 1, + "items": {"type": "integer"}, +} + + +if __name__ == "__main__": + Runner().bench_func("validator creation", Draft202012Validator, schema) diff --git a/.venv/lib/python3.12/site-packages/jsonschema/cli.py b/.venv/lib/python3.12/site-packages/jsonschema/cli.py new file mode 100644 index 00000000..cf6298eb --- /dev/null +++ b/.venv/lib/python3.12/site-packages/jsonschema/cli.py @@ -0,0 +1,296 @@ +""" +The ``jsonschema`` command line. +""" + +from importlib import metadata +from json import JSONDecodeError +from textwrap import dedent +import argparse +import json +import sys +import traceback +import warnings + +try: + from pkgutil import resolve_name +except ImportError: + from pkgutil_resolve_name import resolve_name # type: ignore[no-redef] + +from attrs import define, field + +from jsonschema.exceptions import SchemaError +from jsonschema.validators import _RefResolver, validator_for + +warnings.warn( + ( + "The jsonschema CLI is deprecated and will be removed in a future " + "version. Please use check-jsonschema instead, which can be installed " + "from https://pypi.org/project/check-jsonschema/" + ), + DeprecationWarning, + stacklevel=2, +) + + +class _CannotLoadFile(Exception): + pass + + +@define +class _Outputter: + + _formatter = field() + _stdout = field() + _stderr = field() + + @classmethod + def from_arguments(cls, arguments, stdout, stderr): + if arguments["output"] == "plain": + formatter = _PlainFormatter(arguments["error_format"]) + elif arguments["output"] == "pretty": + formatter = _PrettyFormatter() + return cls(formatter=formatter, stdout=stdout, stderr=stderr) + + def load(self, path): + try: + file = open(path) # noqa: SIM115, PTH123 + except FileNotFoundError as error: + self.filenotfound_error(path=path, exc_info=sys.exc_info()) + raise _CannotLoadFile() from error + + with file: + try: + return json.load(file) + except JSONDecodeError as error: + self.parsing_error(path=path, exc_info=sys.exc_info()) + raise _CannotLoadFile() from error + + def filenotfound_error(self, **kwargs): + self._stderr.write(self._formatter.filenotfound_error(**kwargs)) + + def parsing_error(self, **kwargs): + self._stderr.write(self._formatter.parsing_error(**kwargs)) + + def validation_error(self, **kwargs): + self._stderr.write(self._formatter.validation_error(**kwargs)) + + def validation_success(self, **kwargs): + self._stdout.write(self._formatter.validation_success(**kwargs)) + + +@define +class _PrettyFormatter: + + _ERROR_MSG = dedent( + """\ + ===[{type}]===({path})=== + + {body} + ----------------------------- + """, + ) + _SUCCESS_MSG = "===[SUCCESS]===({path})===\n" + + def filenotfound_error(self, path, exc_info): + return self._ERROR_MSG.format( + path=path, + type="FileNotFoundError", + body=f"{path!r} does not exist.", + ) + + def parsing_error(self, path, exc_info): + exc_type, exc_value, exc_traceback = exc_info + exc_lines = "".join( + traceback.format_exception(exc_type, exc_value, exc_traceback), + ) + return self._ERROR_MSG.format( + path=path, + type=exc_type.__name__, + body=exc_lines, + ) + + def validation_error(self, instance_path, error): + return self._ERROR_MSG.format( + path=instance_path, + type=error.__class__.__name__, + body=error, + ) + + def validation_success(self, instance_path): + return self._SUCCESS_MSG.format(path=instance_path) + + +@define +class _PlainFormatter: + + _error_format = field() + + def filenotfound_error(self, path, exc_info): + return f"{path!r} does not exist.\n" + + def parsing_error(self, path, exc_info): + return "Failed to parse {}: {}\n".format( + "<stdin>" if path == "<stdin>" else repr(path), + exc_info[1], + ) + + def validation_error(self, instance_path, error): + return self._error_format.format(file_name=instance_path, error=error) + + def validation_success(self, instance_path): + return "" + + +def _resolve_name_with_default(name): + if "." not in name: + name = "jsonschema." + name + return resolve_name(name) + + +parser = argparse.ArgumentParser( + description="JSON Schema Validation CLI", +) +parser.add_argument( + "-i", "--instance", + action="append", + dest="instances", + help=""" + a path to a JSON instance (i.e. filename.json) to validate (may + be specified multiple times). If no instances are provided via this + option, one will be expected on standard input. + """, +) +parser.add_argument( + "-F", "--error-format", + help=""" + the format to use for each validation error message, specified + in a form suitable for str.format. This string will be passed + one formatted object named 'error' for each ValidationError. + Only provide this option when using --output=plain, which is the + default. If this argument is unprovided and --output=plain is + used, a simple default representation will be used. + """, +) +parser.add_argument( + "-o", "--output", + choices=["plain", "pretty"], + default="plain", + help=""" + an output format to use. 'plain' (default) will produce minimal + text with one line for each error, while 'pretty' will produce + more detailed human-readable output on multiple lines. + """, +) +parser.add_argument( + "-V", "--validator", + type=_resolve_name_with_default, + help=""" + the fully qualified object name of a validator to use, or, for + validators that are registered with jsonschema, simply the name + of the class. + """, +) +parser.add_argument( + "--base-uri", + help=""" + a base URI to assign to the provided schema, even if it does not + declare one (via e.g. $id). This option can be used if you wish to + resolve relative references to a particular URI (or local path) + """, +) +parser.add_argument( + "--version", + action="version", + version=metadata.version("jsonschema"), +) +parser.add_argument( + "schema", + help="the path to a JSON Schema to validate with (i.e. schema.json)", +) + + +def parse_args(args): # noqa: D103 + arguments = vars(parser.parse_args(args=args or ["--help"])) + if arguments["output"] != "plain" and arguments["error_format"]: + raise parser.error( + "--error-format can only be used with --output plain", + ) + if arguments["output"] == "plain" and arguments["error_format"] is None: + arguments["error_format"] = "{error.instance}: {error.message}\n" + return arguments + + +def _validate_instance(instance_path, instance, validator, outputter): + invalid = False + for error in validator.iter_errors(instance): + invalid = True + outputter.validation_error(instance_path=instance_path, error=error) + + if not invalid: + outputter.validation_success(instance_path=instance_path) + return invalid + + +def main(args=sys.argv[1:]): # noqa: D103 + sys.exit(run(arguments=parse_args(args=args))) + + +def run(arguments, stdout=sys.stdout, stderr=sys.stderr, stdin=sys.stdin): # noqa: D103 + outputter = _Outputter.from_arguments( + arguments=arguments, + stdout=stdout, + stderr=stderr, + ) + + try: + schema = outputter.load(arguments["schema"]) + except _CannotLoadFile: + return 1 + + Validator = arguments["validator"] + if Validator is None: + Validator = validator_for(schema) + + try: + Validator.check_schema(schema) + except SchemaError as error: + outputter.validation_error( + instance_path=arguments["schema"], + error=error, + ) + return 1 + + if arguments["instances"]: + load, instances = outputter.load, arguments["instances"] + else: + def load(_): + try: + return json.load(stdin) + except JSONDecodeError as error: + outputter.parsing_error( + path="<stdin>", exc_info=sys.exc_info(), + ) + raise _CannotLoadFile() from error + instances = ["<stdin>"] + + resolver = _RefResolver( + base_uri=arguments["base_uri"], + referrer=schema, + ) if arguments["base_uri"] is not None else None + + validator = Validator(schema, resolver=resolver) + exit_code = 0 + for each in instances: + try: + instance = load(each) + except _CannotLoadFile: + exit_code = 1 + else: + exit_code |= _validate_instance( + instance_path=each, + instance=instance, + validator=validator, + outputter=outputter, + ) + + return exit_code diff --git a/.venv/lib/python3.12/site-packages/jsonschema/exceptions.py b/.venv/lib/python3.12/site-packages/jsonschema/exceptions.py new file mode 100644 index 00000000..78da49fc --- /dev/null +++ b/.venv/lib/python3.12/site-packages/jsonschema/exceptions.py @@ -0,0 +1,487 @@ +""" +Validation errors, and some surrounding helpers. +""" +from __future__ import annotations + +from collections import defaultdict, deque +from pprint import pformat +from textwrap import dedent, indent +from typing import TYPE_CHECKING, Any, ClassVar +import heapq +import itertools +import warnings + +from attrs import define +from referencing.exceptions import Unresolvable as _Unresolvable + +from jsonschema import _utils + +if TYPE_CHECKING: + from collections.abc import Iterable, Mapping, MutableMapping, Sequence + + from jsonschema import _types + +WEAK_MATCHES: frozenset[str] = frozenset(["anyOf", "oneOf"]) +STRONG_MATCHES: frozenset[str] = frozenset() + +_unset = _utils.Unset() + + +def _pretty(thing: Any, prefix: str): + """ + Format something for an error message as prettily as we currently can. + """ + return indent(pformat(thing, width=72, sort_dicts=False), prefix).lstrip() + + +def __getattr__(name): + if name == "RefResolutionError": + warnings.warn( + _RefResolutionError._DEPRECATION_MESSAGE, + DeprecationWarning, + stacklevel=2, + ) + return _RefResolutionError + raise AttributeError(f"module {__name__} has no attribute {name}") + + +class _Error(Exception): + + _word_for_schema_in_error_message: ClassVar[str] + _word_for_instance_in_error_message: ClassVar[str] + + def __init__( + self, + message: str, + validator: str = _unset, # type: ignore[assignment] + path: Iterable[str | int] = (), + cause: Exception | None = None, + context=(), + validator_value: Any = _unset, + instance: Any = _unset, + schema: Mapping[str, Any] | bool = _unset, # type: ignore[assignment] + schema_path: Iterable[str | int] = (), + parent: _Error | None = None, + type_checker: _types.TypeChecker = _unset, # type: ignore[assignment] + ) -> None: + super().__init__( + message, + validator, + path, + cause, + context, + validator_value, + instance, + schema, + schema_path, + parent, + ) + self.message = message + self.path = self.relative_path = deque(path) + self.schema_path = self.relative_schema_path = deque(schema_path) + self.context = list(context) + self.cause = self.__cause__ = cause + self.validator = validator + self.validator_value = validator_value + self.instance = instance + self.schema = schema + self.parent = parent + self._type_checker = type_checker + + for error in context: + error.parent = self + + def __repr__(self) -> str: + return f"<{self.__class__.__name__}: {self.message!r}>" + + def __str__(self) -> str: + essential_for_verbose = ( + self.validator, self.validator_value, self.instance, self.schema, + ) + if any(m is _unset for m in essential_for_verbose): + return self.message + + schema_path = _utils.format_as_index( + container=self._word_for_schema_in_error_message, + indices=list(self.relative_schema_path)[:-1], + ) + instance_path = _utils.format_as_index( + container=self._word_for_instance_in_error_message, + indices=self.relative_path, + ) + prefix = 16 * " " + + return dedent( + f"""\ + {self.message} + + Failed validating {self.validator!r} in {schema_path}: + {_pretty(self.schema, prefix=prefix)} + + On {instance_path}: + {_pretty(self.instance, prefix=prefix)} + """.rstrip(), + ) + + @classmethod + def create_from(cls, other: _Error): + return cls(**other._contents()) + + @property + def absolute_path(self) -> Sequence[str | int]: + parent = self.parent + if parent is None: + return self.relative_path + + path = deque(self.relative_path) + path.extendleft(reversed(parent.absolute_path)) + return path + + @property + def absolute_schema_path(self) -> Sequence[str | int]: + parent = self.parent + if parent is None: + return self.relative_schema_path + + path = deque(self.relative_schema_path) + path.extendleft(reversed(parent.absolute_schema_path)) + return path + + @property + def json_path(self) -> str: + path = "$" + for elem in self.absolute_path: + if isinstance(elem, int): + path += "[" + str(elem) + "]" + else: + path += "." + elem + return path + + def _set( + self, + type_checker: _types.TypeChecker | None = None, + **kwargs: Any, + ) -> None: + if type_checker is not None and self._type_checker is _unset: + self._type_checker = type_checker + + for k, v in kwargs.items(): + if getattr(self, k) is _unset: + setattr(self, k, v) + + def _contents(self): + attrs = ( + "message", "cause", "context", "validator", "validator_value", + "path", "schema_path", "instance", "schema", "parent", + ) + return {attr: getattr(self, attr) for attr in attrs} + + def _matches_type(self) -> bool: + try: + # We ignore this as we want to simply crash if this happens + expected = self.schema["type"] # type: ignore[index] + except (KeyError, TypeError): + return False + + if isinstance(expected, str): + return self._type_checker.is_type(self.instance, expected) + + return any( + self._type_checker.is_type(self.instance, expected_type) + for expected_type in expected + ) + + +class ValidationError(_Error): + """ + An instance was invalid under a provided schema. + """ + + _word_for_schema_in_error_message = "schema" + _word_for_instance_in_error_message = "instance" + + +class SchemaError(_Error): + """ + A schema was invalid under its corresponding metaschema. + """ + + _word_for_schema_in_error_message = "metaschema" + _word_for_instance_in_error_message = "schema" + + +@define(slots=False) +class _RefResolutionError(Exception): + """ + A ref could not be resolved. + """ + + _DEPRECATION_MESSAGE = ( + "jsonschema.exceptions.RefResolutionError is deprecated as of version " + "4.18.0. If you wish to catch potential reference resolution errors, " + "directly catch referencing.exceptions.Unresolvable." + ) + + _cause: Exception + + def __eq__(self, other): + if self.__class__ is not other.__class__: + return NotImplemented # pragma: no cover -- uncovered but deprecated # noqa: E501 + return self._cause == other._cause + + def __str__(self) -> str: + return str(self._cause) + + +class _WrappedReferencingError(_RefResolutionError, _Unresolvable): # pragma: no cover -- partially uncovered but to be removed # noqa: E501 + def __init__(self, cause: _Unresolvable): + object.__setattr__(self, "_wrapped", cause) + + def __eq__(self, other): + if other.__class__ is self.__class__: + return self._wrapped == other._wrapped + elif other.__class__ is self._wrapped.__class__: + return self._wrapped == other + return NotImplemented + + def __getattr__(self, attr): + return getattr(self._wrapped, attr) + + def __hash__(self): + return hash(self._wrapped) + + def __repr__(self): + return f"<WrappedReferencingError {self._wrapped!r}>" + + def __str__(self): + return f"{self._wrapped.__class__.__name__}: {self._wrapped}" + + +class UndefinedTypeCheck(Exception): + """ + A type checker was asked to check a type it did not have registered. + """ + + def __init__(self, type: str) -> None: + self.type = type + + def __str__(self) -> str: + return f"Type {self.type!r} is unknown to this type checker" + + +class UnknownType(Exception): + """ + A validator was asked to validate an instance against an unknown type. + """ + + def __init__(self, type, instance, schema): + self.type = type + self.instance = instance + self.schema = schema + + def __str__(self): + prefix = 16 * " " + + return dedent( + f"""\ + Unknown type {self.type!r} for validator with schema: + {_pretty(self.schema, prefix=prefix)} + + While checking instance: + {_pretty(self.instance, prefix=prefix)} + """.rstrip(), + ) + + +class FormatError(Exception): + """ + Validating a format failed. + """ + + def __init__(self, message, cause=None): + super().__init__(message, cause) + self.message = message + self.cause = self.__cause__ = cause + + def __str__(self): + return self.message + + +class ErrorTree: + """ + ErrorTrees make it easier to check which validations failed. + """ + + _instance = _unset + + def __init__(self, errors: Iterable[ValidationError] = ()): + self.errors: MutableMapping[str, ValidationError] = {} + self._contents: Mapping[str, ErrorTree] = defaultdict(self.__class__) + + for error in errors: + container = self + for element in error.path: + container = container[element] + container.errors[error.validator] = error + + container._instance = error.instance + + def __contains__(self, index: str | int): + """ + Check whether ``instance[index]`` has any errors. + """ + return index in self._contents + + def __getitem__(self, index): + """ + Retrieve the child tree one level down at the given ``index``. + + If the index is not in the instance that this tree corresponds + to and is not known by this tree, whatever error would be raised + by ``instance.__getitem__`` will be propagated (usually this is + some subclass of `LookupError`. + """ + if self._instance is not _unset and index not in self: + self._instance[index] + return self._contents[index] + + def __setitem__(self, index: str | int, value: ErrorTree): + """ + Add an error to the tree at the given ``index``. + + .. deprecated:: v4.20.0 + + Setting items on an `ErrorTree` is deprecated without replacement. + To populate a tree, provide all of its sub-errors when you + construct the tree. + """ + warnings.warn( + "ErrorTree.__setitem__ is deprecated without replacement.", + DeprecationWarning, + stacklevel=2, + ) + self._contents[index] = value # type: ignore[index] + + def __iter__(self): + """ + Iterate (non-recursively) over the indices in the instance with errors. + """ + return iter(self._contents) + + def __len__(self): + """ + Return the `total_errors`. + """ + return self.total_errors + + def __repr__(self): + total = len(self) + errors = "error" if total == 1 else "errors" + return f"<{self.__class__.__name__} ({total} total {errors})>" + + @property + def total_errors(self): + """ + The total number of errors in the entire tree, including children. + """ + child_errors = sum(len(tree) for _, tree in self._contents.items()) + return len(self.errors) + child_errors + + +def by_relevance(weak=WEAK_MATCHES, strong=STRONG_MATCHES): + """ + Create a key function that can be used to sort errors by relevance. + + Arguments: + weak (set): + a collection of validation keywords to consider to be + "weak". If there are two errors at the same level of the + instance and one is in the set of weak validation keywords, + the other error will take priority. By default, :kw:`anyOf` + and :kw:`oneOf` are considered weak keywords and will be + superseded by other same-level validation errors. + + strong (set): + a collection of validation keywords to consider to be + "strong" + + """ + + def relevance(error): + validator = error.validator + return ( # prefer errors which are ... + -len(error.path), # 'deeper' and thereby more specific + error.path, # earlier (for sibling errors) + validator not in weak, # for a non-low-priority keyword + validator in strong, # for a high priority keyword + not error._matches_type(), # at least match the instance's type + ) # otherwise we'll treat them the same + + return relevance + + +relevance = by_relevance() +""" +A key function (e.g. to use with `sorted`) which sorts errors by relevance. + +Example: + +.. code:: python + + sorted(validator.iter_errors(12), key=jsonschema.exceptions.relevance) +""" + + +def best_match(errors, key=relevance): + """ + Try to find an error that appears to be the best match among given errors. + + In general, errors that are higher up in the instance (i.e. for which + `ValidationError.path` is shorter) are considered better matches, + since they indicate "more" is wrong with the instance. + + If the resulting match is either :kw:`oneOf` or :kw:`anyOf`, the + *opposite* assumption is made -- i.e. the deepest error is picked, + since these keywords only need to match once, and any other errors + may not be relevant. + + Arguments: + errors (collections.abc.Iterable): + + the errors to select from. Do not provide a mixture of + errors from different validation attempts (i.e. from + different instances or schemas), since it won't produce + sensical output. + + key (collections.abc.Callable): + + the key to use when sorting errors. See `relevance` and + transitively `by_relevance` for more details (the default is + to sort with the defaults of that function). Changing the + default is only useful if you want to change the function + that rates errors but still want the error context descent + done by this function. + + Returns: + the best matching error, or ``None`` if the iterable was empty + + .. note:: + + This function is a heuristic. Its return value may change for a given + set of inputs from version to version if better heuristics are added. + + """ + errors = iter(errors) + best = next(errors, None) + if best is None: + return + best = max(itertools.chain([best], errors), key=key) + + while best.context: + # Calculate the minimum via nsmallest, because we don't recurse if + # all nested errors have the same relevance (i.e. if min == max == all) + smallest = heapq.nsmallest(2, best.context, key=key) + if len(smallest) == 2 and key(smallest[0]) == key(smallest[1]): # noqa: PLR2004 + return best + best = smallest[0] + return best diff --git a/.venv/lib/python3.12/site-packages/jsonschema/protocols.py b/.venv/lib/python3.12/site-packages/jsonschema/protocols.py new file mode 100644 index 00000000..39e56d0f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/jsonschema/protocols.py @@ -0,0 +1,236 @@ +""" +typing.Protocol classes for jsonschema interfaces. +""" + +# for reference material on Protocols, see +# https://www.python.org/dev/peps/pep-0544/ + +from __future__ import annotations + +from typing import ( + TYPE_CHECKING, + Any, + ClassVar, + Iterable, + Protocol, + runtime_checkable, +) + +# in order for Sphinx to resolve references accurately from type annotations, +# it needs to see names like `jsonschema.TypeChecker` +# therefore, only import at type-checking time (to avoid circular references), +# but use `jsonschema` for any types which will otherwise not be resolvable +if TYPE_CHECKING: + from collections.abc import Mapping + + import referencing.jsonschema + + from jsonschema import _typing + from jsonschema.exceptions import ValidationError + import jsonschema + import jsonschema.validators + +# For code authors working on the validator protocol, these are the three +# use-cases which should be kept in mind: +# +# 1. As a protocol class, it can be used in type annotations to describe the +# available methods and attributes of a validator +# 2. It is the source of autodoc for the validator documentation +# 3. It is runtime_checkable, meaning that it can be used in isinstance() +# checks. +# +# Since protocols are not base classes, isinstance() checking is limited in +# its capabilities. See docs on runtime_checkable for detail + + +@runtime_checkable +class Validator(Protocol): + """ + The protocol to which all validator classes adhere. + + Arguments: + + schema: + + The schema that the validator object will validate with. + It is assumed to be valid, and providing + an invalid schema can lead to undefined behavior. See + `Validator.check_schema` to validate a schema first. + + registry: + + a schema registry that will be used for looking up JSON references + + resolver: + + a resolver that will be used to resolve :kw:`$ref` + properties (JSON references). If unprovided, one will be created. + + .. deprecated:: v4.18.0 + + `RefResolver <_RefResolver>` has been deprecated in favor of + `referencing`, and with it, this argument. + + format_checker: + + if provided, a checker which will be used to assert about + :kw:`format` properties present in the schema. If unprovided, + *no* format validation is done, and the presence of format + within schemas is strictly informational. Certain formats + require additional packages to be installed in order to assert + against instances. Ensure you've installed `jsonschema` with + its `extra (optional) dependencies <index:extras>` when + invoking ``pip``. + + .. deprecated:: v4.12.0 + + Subclassing validator classes now explicitly warns this is not part of + their public API. + + """ + + #: An object representing the validator's meta schema (the schema that + #: describes valid schemas in the given version). + META_SCHEMA: ClassVar[Mapping] + + #: A mapping of validation keywords (`str`\s) to functions that + #: validate the keyword with that name. For more information see + #: `creating-validators`. + VALIDATORS: ClassVar[Mapping] + + #: A `jsonschema.TypeChecker` that will be used when validating + #: :kw:`type` keywords in JSON schemas. + TYPE_CHECKER: ClassVar[jsonschema.TypeChecker] + + #: A `jsonschema.FormatChecker` that will be used when validating + #: :kw:`format` keywords in JSON schemas. + FORMAT_CHECKER: ClassVar[jsonschema.FormatChecker] + + #: A function which given a schema returns its ID. + ID_OF: _typing.id_of + + #: The schema that will be used to validate instances + schema: Mapping | bool + + def __init__( + self, + schema: Mapping | bool, + registry: referencing.jsonschema.SchemaRegistry, + format_checker: jsonschema.FormatChecker | None = None, + ) -> None: + ... + + @classmethod + def check_schema(cls, schema: Mapping | bool) -> None: + """ + Validate the given schema against the validator's `META_SCHEMA`. + + Raises: + + `jsonschema.exceptions.SchemaError`: + + if the schema is invalid + + """ + + def is_type(self, instance: Any, type: str) -> bool: + """ + Check if the instance is of the given (JSON Schema) type. + + Arguments: + + instance: + + the value to check + + type: + + the name of a known (JSON Schema) type + + Returns: + + whether the instance is of the given type + + Raises: + + `jsonschema.exceptions.UnknownType`: + + if ``type`` is not a known type + + """ + + def is_valid(self, instance: Any) -> bool: + """ + Check if the instance is valid under the current `schema`. + + Returns: + + whether the instance is valid or not + + >>> schema = {"maxItems" : 2} + >>> Draft202012Validator(schema).is_valid([2, 3, 4]) + False + + """ + + def iter_errors(self, instance: Any) -> Iterable[ValidationError]: + r""" + Lazily yield each of the validation errors in the given instance. + + >>> schema = { + ... "type" : "array", + ... "items" : {"enum" : [1, 2, 3]}, + ... "maxItems" : 2, + ... } + >>> v = Draft202012Validator(schema) + >>> for error in sorted(v.iter_errors([2, 3, 4]), key=str): + ... print(error.message) + 4 is not one of [1, 2, 3] + [2, 3, 4] is too long + + .. deprecated:: v4.0.0 + + Calling this function with a second schema argument is deprecated. + Use `Validator.evolve` instead. + """ + + def validate(self, instance: Any) -> None: + """ + Check if the instance is valid under the current `schema`. + + Raises: + + `jsonschema.exceptions.ValidationError`: + + if the instance is invalid + + >>> schema = {"maxItems" : 2} + >>> Draft202012Validator(schema).validate([2, 3, 4]) + Traceback (most recent call last): + ... + ValidationError: [2, 3, 4] is too long + + """ + + def evolve(self, **kwargs) -> Validator: + """ + Create a new validator like this one, but with given changes. + + Preserves all other attributes, so can be used to e.g. create a + validator with a different schema but with the same :kw:`$ref` + resolution behavior. + + >>> validator = Draft202012Validator({}) + >>> validator.evolve(schema={"type": "number"}) + Draft202012Validator(schema={'type': 'number'}, format_checker=None) + + The returned object satisfies the validator protocol, but may not + be of the same concrete class! In particular this occurs + when a :kw:`$ref` occurs to a schema with a different + :kw:`$schema` than this one (i.e. for a different draft). + + >>> validator.evolve( + ... schema={"$schema": Draft7Validator.META_SCHEMA["$id"]} + ... ) + Draft7Validator(schema=..., format_checker=None) + """ diff --git a/.venv/lib/python3.12/site-packages/jsonschema/tests/__init__.py b/.venv/lib/python3.12/site-packages/jsonschema/tests/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/jsonschema/tests/__init__.py diff --git a/.venv/lib/python3.12/site-packages/jsonschema/tests/_suite.py b/.venv/lib/python3.12/site-packages/jsonschema/tests/_suite.py new file mode 100644 index 00000000..0da6503c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/jsonschema/tests/_suite.py @@ -0,0 +1,276 @@ +""" +Python representations of the JSON Schema Test Suite tests. +""" +from __future__ import annotations + +from contextlib import suppress +from functools import partial +from pathlib import Path +from typing import TYPE_CHECKING, Any +import json +import os +import re +import subprocess +import sys +import unittest + +from attrs import field, frozen +from referencing import Registry +import referencing.jsonschema + +if TYPE_CHECKING: + from collections.abc import Iterable, Mapping, Sequence + + import pyperf + +from jsonschema.validators import _VALIDATORS +import jsonschema + +_DELIMITERS = re.compile(r"[\W\- ]+") + + +def _find_suite(): + root = os.environ.get("JSON_SCHEMA_TEST_SUITE") + if root is not None: + return Path(root) + + root = Path(jsonschema.__file__).parent.parent / "json" + if not root.is_dir(): # pragma: no cover + raise ValueError( + ( + "Can't find the JSON-Schema-Test-Suite directory. " + "Set the 'JSON_SCHEMA_TEST_SUITE' environment " + "variable or run the tests from alongside a checkout " + "of the suite." + ), + ) + return root + + +@frozen +class Suite: + + _root: Path = field(factory=_find_suite) + _remotes: referencing.jsonschema.SchemaRegistry = field(init=False) + + def __attrs_post_init__(self): + jsonschema_suite = self._root.joinpath("bin", "jsonschema_suite") + argv = [sys.executable, str(jsonschema_suite), "remotes"] + remotes = subprocess.check_output(argv).decode("utf-8") + + resources = json.loads(remotes) + + li = "http://localhost:1234/locationIndependentIdentifierPre2019.json" + li4 = "http://localhost:1234/locationIndependentIdentifierDraft4.json" + + registry = Registry().with_resources( + [ + ( + li, + referencing.jsonschema.DRAFT7.create_resource( + contents=resources.pop(li), + ), + ), + ( + li4, + referencing.jsonschema.DRAFT4.create_resource( + contents=resources.pop(li4), + ), + ), + ], + ).with_contents( + resources.items(), + default_specification=referencing.jsonschema.DRAFT202012, + ) + object.__setattr__(self, "_remotes", registry) + + def benchmark(self, runner: pyperf.Runner): # pragma: no cover + for name, Validator in _VALIDATORS.items(): + self.version(name=name).benchmark( + runner=runner, + Validator=Validator, + ) + + def version(self, name) -> Version: + return Version( + name=name, + path=self._root / "tests" / name, + remotes=self._remotes, + ) + + +@frozen +class Version: + + _path: Path + _remotes: referencing.jsonschema.SchemaRegistry + + name: str + + def benchmark(self, **kwargs): # pragma: no cover + for case in self.cases(): + case.benchmark(**kwargs) + + def cases(self) -> Iterable[_Case]: + return self._cases_in(paths=self._path.glob("*.json")) + + def format_cases(self) -> Iterable[_Case]: + return self._cases_in(paths=self._path.glob("optional/format/*.json")) + + def optional_cases_of(self, name: str) -> Iterable[_Case]: + return self._cases_in(paths=[self._path / "optional" / f"{name}.json"]) + + def to_unittest_testcase(self, *groups, **kwargs): + name = kwargs.pop("name", "Test" + self.name.title().replace("-", "")) + methods = { + method.__name__: method + for method in ( + test.to_unittest_method(**kwargs) + for group in groups + for case in group + for test in case.tests + ) + } + cls = type(name, (unittest.TestCase,), methods) + + # We're doing crazy things, so if they go wrong, like a function + # behaving differently on some other interpreter, just make them + # not happen. + with suppress(Exception): + cls.__module__ = _someone_save_us_the_module_of_the_caller() + + return cls + + def _cases_in(self, paths: Iterable[Path]) -> Iterable[_Case]: + for path in paths: + for case in json.loads(path.read_text(encoding="utf-8")): + yield _Case.from_dict( + case, + version=self, + subject=path.stem, + remotes=self._remotes, + ) + + +@frozen +class _Case: + + version: Version + + subject: str + description: str + schema: Mapping[str, Any] | bool + tests: list[_Test] + comment: str | None = None + specification: Sequence[dict[str, str]] = () + + @classmethod + def from_dict(cls, data, remotes, **kwargs): + data.update(kwargs) + tests = [ + _Test( + version=data["version"], + subject=data["subject"], + case_description=data["description"], + schema=data["schema"], + remotes=remotes, + **test, + ) for test in data.pop("tests") + ] + return cls(tests=tests, **data) + + def benchmark(self, runner: pyperf.Runner, **kwargs): # pragma: no cover + for test in self.tests: + runner.bench_func( + test.fully_qualified_name, + partial(test.validate_ignoring_errors, **kwargs), + ) + + +@frozen(repr=False) +class _Test: + + version: Version + + subject: str + case_description: str + description: str + + data: Any + schema: Mapping[str, Any] | bool + + valid: bool + + _remotes: referencing.jsonschema.SchemaRegistry + + comment: str | None = None + + def __repr__(self): # pragma: no cover + return f"<Test {self.fully_qualified_name}>" + + @property + def fully_qualified_name(self): # pragma: no cover + return " > ".join( # noqa: FLY002 + [ + self.version.name, + self.subject, + self.case_description, + self.description, + ], + ) + + def to_unittest_method(self, skip=lambda test: None, **kwargs): + if self.valid: + def fn(this): + self.validate(**kwargs) + else: + def fn(this): + with this.assertRaises(jsonschema.ValidationError): + self.validate(**kwargs) + + fn.__name__ = "_".join( + [ + "test", + _DELIMITERS.sub("_", self.subject), + _DELIMITERS.sub("_", self.case_description), + _DELIMITERS.sub("_", self.description), + ], + ) + reason = skip(self) + if reason is None or os.environ.get("JSON_SCHEMA_DEBUG", "0") != "0": + return fn + elif os.environ.get("JSON_SCHEMA_EXPECTED_FAILURES", "0") != "0": # pragma: no cover # noqa: E501 + return unittest.expectedFailure(fn) + else: + return unittest.skip(reason)(fn) + + def validate(self, Validator, **kwargs): + Validator.check_schema(self.schema) + validator = Validator( + schema=self.schema, + registry=self._remotes, + **kwargs, + ) + if os.environ.get("JSON_SCHEMA_DEBUG", "0") != "0": # pragma: no cover + breakpoint() # noqa: T100 + validator.validate(instance=self.data) + + def validate_ignoring_errors(self, Validator): # pragma: no cover + with suppress(jsonschema.ValidationError): + self.validate(Validator=Validator) + + +def _someone_save_us_the_module_of_the_caller(): + """ + The FQON of the module 2nd stack frames up from here. + + This is intended to allow us to dynamically return test case classes that + are indistinguishable from being defined in the module that wants them. + + Otherwise, trial will mis-print the FQON, and copy pasting it won't re-run + the class that really is running. + + Save us all, this is all so so so so so terrible. + """ + + return sys._getframe(2).f_globals["__name__"] diff --git a/.venv/lib/python3.12/site-packages/jsonschema/tests/fuzz_validate.py b/.venv/lib/python3.12/site-packages/jsonschema/tests/fuzz_validate.py new file mode 100644 index 00000000..c12e88bc --- /dev/null +++ b/.venv/lib/python3.12/site-packages/jsonschema/tests/fuzz_validate.py @@ -0,0 +1,50 @@ +""" +Fuzzing setup for OSS-Fuzz. + +See https://github.com/google/oss-fuzz/tree/master/projects/jsonschema for the +other half of the setup here. +""" +import sys + +from hypothesis import given, strategies + +import jsonschema + +PRIM = strategies.one_of( + strategies.booleans(), + strategies.integers(), + strategies.floats(allow_nan=False, allow_infinity=False), + strategies.text(), +) +DICT = strategies.recursive( + base=strategies.one_of( + strategies.booleans(), + strategies.dictionaries(strategies.text(), PRIM), + ), + extend=lambda inner: strategies.dictionaries(strategies.text(), inner), +) + + +@given(obj1=DICT, obj2=DICT) +def test_schemas(obj1, obj2): + try: + jsonschema.validate(instance=obj1, schema=obj2) + except jsonschema.exceptions.ValidationError: + pass + except jsonschema.exceptions.SchemaError: + pass + + +def main(): + atheris.instrument_all() + atheris.Setup( + sys.argv, + test_schemas.hypothesis.fuzz_one_input, + enable_python_coverage=True, + ) + atheris.Fuzz() + + +if __name__ == "__main__": + import atheris + main() diff --git a/.venv/lib/python3.12/site-packages/jsonschema/tests/test_cli.py b/.venv/lib/python3.12/site-packages/jsonschema/tests/test_cli.py new file mode 100644 index 00000000..79d2a158 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/jsonschema/tests/test_cli.py @@ -0,0 +1,907 @@ +from contextlib import redirect_stderr, redirect_stdout +from importlib import metadata +from io import StringIO +from json import JSONDecodeError +from pathlib import Path +from textwrap import dedent +from unittest import TestCase +import json +import os +import subprocess +import sys +import tempfile +import warnings + +from jsonschema import Draft4Validator, Draft202012Validator +from jsonschema.exceptions import ( + SchemaError, + ValidationError, + _RefResolutionError, +) +from jsonschema.validators import _LATEST_VERSION, validate + +with warnings.catch_warnings(): + warnings.simplefilter("ignore") + from jsonschema import cli + + +def fake_validator(*errors): + errors = list(reversed(errors)) + + class FakeValidator: + def __init__(self, *args, **kwargs): + pass + + def iter_errors(self, instance): + if errors: + return errors.pop() + return [] # pragma: no cover + + @classmethod + def check_schema(self, schema): + pass + + return FakeValidator + + +def fake_open(all_contents): + def open(path): + contents = all_contents.get(path) + if contents is None: + raise FileNotFoundError(path) + return StringIO(contents) + return open + + +def _message_for(non_json): + try: + json.loads(non_json) + except JSONDecodeError as error: + return str(error) + else: # pragma: no cover + raise RuntimeError("Tried and failed to capture a JSON dump error.") + + +class TestCLI(TestCase): + def run_cli( + self, argv, files=None, stdin=StringIO(), exit_code=0, **override, + ): + arguments = cli.parse_args(argv) + arguments.update(override) + + self.assertFalse(hasattr(cli, "open")) + cli.open = fake_open(files or {}) + try: + stdout, stderr = StringIO(), StringIO() + actual_exit_code = cli.run( + arguments, + stdin=stdin, + stdout=stdout, + stderr=stderr, + ) + finally: + del cli.open + + self.assertEqual( + actual_exit_code, exit_code, msg=dedent( + f""" + Expected an exit code of {exit_code} != {actual_exit_code}. + + stdout: {stdout.getvalue()} + + stderr: {stderr.getvalue()} + """, + ), + ) + return stdout.getvalue(), stderr.getvalue() + + def assertOutputs(self, stdout="", stderr="", **kwargs): + self.assertEqual( + self.run_cli(**kwargs), + (dedent(stdout), dedent(stderr)), + ) + + def test_invalid_instance(self): + error = ValidationError("I am an error!", instance=12) + self.assertOutputs( + files=dict( + some_schema='{"does not": "matter since it is stubbed"}', + some_instance=json.dumps(error.instance), + ), + validator=fake_validator([error]), + + argv=["-i", "some_instance", "some_schema"], + + exit_code=1, + stderr="12: I am an error!\n", + ) + + def test_invalid_instance_pretty_output(self): + error = ValidationError("I am an error!", instance=12) + self.assertOutputs( + files=dict( + some_schema='{"does not": "matter since it is stubbed"}', + some_instance=json.dumps(error.instance), + ), + validator=fake_validator([error]), + + argv=["-i", "some_instance", "--output", "pretty", "some_schema"], + + exit_code=1, + stderr="""\ + ===[ValidationError]===(some_instance)=== + + I am an error! + ----------------------------- + """, + ) + + def test_invalid_instance_explicit_plain_output(self): + error = ValidationError("I am an error!", instance=12) + self.assertOutputs( + files=dict( + some_schema='{"does not": "matter since it is stubbed"}', + some_instance=json.dumps(error.instance), + ), + validator=fake_validator([error]), + + argv=["--output", "plain", "-i", "some_instance", "some_schema"], + + exit_code=1, + stderr="12: I am an error!\n", + ) + + def test_invalid_instance_multiple_errors(self): + instance = 12 + first = ValidationError("First error", instance=instance) + second = ValidationError("Second error", instance=instance) + + self.assertOutputs( + files=dict( + some_schema='{"does not": "matter since it is stubbed"}', + some_instance=json.dumps(instance), + ), + validator=fake_validator([first, second]), + + argv=["-i", "some_instance", "some_schema"], + + exit_code=1, + stderr="""\ + 12: First error + 12: Second error + """, + ) + + def test_invalid_instance_multiple_errors_pretty_output(self): + instance = 12 + first = ValidationError("First error", instance=instance) + second = ValidationError("Second error", instance=instance) + + self.assertOutputs( + files=dict( + some_schema='{"does not": "matter since it is stubbed"}', + some_instance=json.dumps(instance), + ), + validator=fake_validator([first, second]), + + argv=["-i", "some_instance", "--output", "pretty", "some_schema"], + + exit_code=1, + stderr="""\ + ===[ValidationError]===(some_instance)=== + + First error + ----------------------------- + ===[ValidationError]===(some_instance)=== + + Second error + ----------------------------- + """, + ) + + def test_multiple_invalid_instances(self): + first_instance = 12 + first_errors = [ + ValidationError("An error", instance=first_instance), + ValidationError("Another error", instance=first_instance), + ] + second_instance = "foo" + second_errors = [ValidationError("BOOM", instance=second_instance)] + + self.assertOutputs( + files=dict( + some_schema='{"does not": "matter since it is stubbed"}', + some_first_instance=json.dumps(first_instance), + some_second_instance=json.dumps(second_instance), + ), + validator=fake_validator(first_errors, second_errors), + + argv=[ + "-i", "some_first_instance", + "-i", "some_second_instance", + "some_schema", + ], + + exit_code=1, + stderr="""\ + 12: An error + 12: Another error + foo: BOOM + """, + ) + + def test_multiple_invalid_instances_pretty_output(self): + first_instance = 12 + first_errors = [ + ValidationError("An error", instance=first_instance), + ValidationError("Another error", instance=first_instance), + ] + second_instance = "foo" + second_errors = [ValidationError("BOOM", instance=second_instance)] + + self.assertOutputs( + files=dict( + some_schema='{"does not": "matter since it is stubbed"}', + some_first_instance=json.dumps(first_instance), + some_second_instance=json.dumps(second_instance), + ), + validator=fake_validator(first_errors, second_errors), + + argv=[ + "--output", "pretty", + "-i", "some_first_instance", + "-i", "some_second_instance", + "some_schema", + ], + + exit_code=1, + stderr="""\ + ===[ValidationError]===(some_first_instance)=== + + An error + ----------------------------- + ===[ValidationError]===(some_first_instance)=== + + Another error + ----------------------------- + ===[ValidationError]===(some_second_instance)=== + + BOOM + ----------------------------- + """, + ) + + def test_custom_error_format(self): + first_instance = 12 + first_errors = [ + ValidationError("An error", instance=first_instance), + ValidationError("Another error", instance=first_instance), + ] + second_instance = "foo" + second_errors = [ValidationError("BOOM", instance=second_instance)] + + self.assertOutputs( + files=dict( + some_schema='{"does not": "matter since it is stubbed"}', + some_first_instance=json.dumps(first_instance), + some_second_instance=json.dumps(second_instance), + ), + validator=fake_validator(first_errors, second_errors), + + argv=[ + "--error-format", ":{error.message}._-_.{error.instance}:", + "-i", "some_first_instance", + "-i", "some_second_instance", + "some_schema", + ], + + exit_code=1, + stderr=":An error._-_.12::Another error._-_.12::BOOM._-_.foo:", + ) + + def test_invalid_schema(self): + self.assertOutputs( + files=dict(some_schema='{"type": 12}'), + argv=["some_schema"], + + exit_code=1, + stderr="""\ + 12: 12 is not valid under any of the given schemas + """, + ) + + def test_invalid_schema_pretty_output(self): + schema = {"type": 12} + + with self.assertRaises(SchemaError) as e: + validate(schema=schema, instance="") + error = str(e.exception) + + self.assertOutputs( + files=dict(some_schema=json.dumps(schema)), + argv=["--output", "pretty", "some_schema"], + + exit_code=1, + stderr=( + "===[SchemaError]===(some_schema)===\n\n" + + str(error) + + "\n-----------------------------\n" + ), + ) + + def test_invalid_schema_multiple_errors(self): + self.assertOutputs( + files=dict(some_schema='{"type": 12, "items": 57}'), + argv=["some_schema"], + + exit_code=1, + stderr="""\ + 57: 57 is not of type 'object', 'boolean' + """, + ) + + def test_invalid_schema_multiple_errors_pretty_output(self): + schema = {"type": 12, "items": 57} + + with self.assertRaises(SchemaError) as e: + validate(schema=schema, instance="") + error = str(e.exception) + + self.assertOutputs( + files=dict(some_schema=json.dumps(schema)), + argv=["--output", "pretty", "some_schema"], + + exit_code=1, + stderr=( + "===[SchemaError]===(some_schema)===\n\n" + + str(error) + + "\n-----------------------------\n" + ), + ) + + def test_invalid_schema_with_invalid_instance(self): + """ + "Validating" an instance that's invalid under an invalid schema + just shows the schema error. + """ + self.assertOutputs( + files=dict( + some_schema='{"type": 12, "minimum": 30}', + some_instance="13", + ), + argv=["-i", "some_instance", "some_schema"], + + exit_code=1, + stderr="""\ + 12: 12 is not valid under any of the given schemas + """, + ) + + def test_invalid_schema_with_invalid_instance_pretty_output(self): + instance, schema = 13, {"type": 12, "minimum": 30} + + with self.assertRaises(SchemaError) as e: + validate(schema=schema, instance=instance) + error = str(e.exception) + + self.assertOutputs( + files=dict( + some_schema=json.dumps(schema), + some_instance=json.dumps(instance), + ), + argv=["--output", "pretty", "-i", "some_instance", "some_schema"], + + exit_code=1, + stderr=( + "===[SchemaError]===(some_schema)===\n\n" + + str(error) + + "\n-----------------------------\n" + ), + ) + + def test_invalid_instance_continues_with_the_rest(self): + self.assertOutputs( + files=dict( + some_schema='{"minimum": 30}', + first_instance="not valid JSON!", + second_instance="12", + ), + argv=[ + "-i", "first_instance", + "-i", "second_instance", + "some_schema", + ], + + exit_code=1, + stderr="""\ + Failed to parse 'first_instance': {} + 12: 12 is less than the minimum of 30 + """.format(_message_for("not valid JSON!")), + ) + + def test_custom_error_format_applies_to_schema_errors(self): + instance, schema = 13, {"type": 12, "minimum": 30} + + with self.assertRaises(SchemaError): + validate(schema=schema, instance=instance) + + self.assertOutputs( + files=dict(some_schema=json.dumps(schema)), + + argv=[ + "--error-format", ":{error.message}._-_.{error.instance}:", + "some_schema", + ], + + exit_code=1, + stderr=":12 is not valid under any of the given schemas._-_.12:", + ) + + def test_instance_is_invalid_JSON(self): + instance = "not valid JSON!" + + self.assertOutputs( + files=dict(some_schema="{}", some_instance=instance), + argv=["-i", "some_instance", "some_schema"], + + exit_code=1, + stderr=f"""\ + Failed to parse 'some_instance': {_message_for(instance)} + """, + ) + + def test_instance_is_invalid_JSON_pretty_output(self): + stdout, stderr = self.run_cli( + files=dict( + some_schema="{}", + some_instance="not valid JSON!", + ), + + argv=["--output", "pretty", "-i", "some_instance", "some_schema"], + + exit_code=1, + ) + self.assertFalse(stdout) + self.assertIn( + "(some_instance)===\n\nTraceback (most recent call last):\n", + stderr, + ) + self.assertNotIn("some_schema", stderr) + + def test_instance_is_invalid_JSON_on_stdin(self): + instance = "not valid JSON!" + + self.assertOutputs( + files=dict(some_schema="{}"), + stdin=StringIO(instance), + + argv=["some_schema"], + + exit_code=1, + stderr=f"""\ + Failed to parse <stdin>: {_message_for(instance)} + """, + ) + + def test_instance_is_invalid_JSON_on_stdin_pretty_output(self): + stdout, stderr = self.run_cli( + files=dict(some_schema="{}"), + stdin=StringIO("not valid JSON!"), + + argv=["--output", "pretty", "some_schema"], + + exit_code=1, + ) + self.assertFalse(stdout) + self.assertIn( + "(<stdin>)===\n\nTraceback (most recent call last):\n", + stderr, + ) + self.assertNotIn("some_schema", stderr) + + def test_schema_is_invalid_JSON(self): + schema = "not valid JSON!" + + self.assertOutputs( + files=dict(some_schema=schema), + + argv=["some_schema"], + + exit_code=1, + stderr=f"""\ + Failed to parse 'some_schema': {_message_for(schema)} + """, + ) + + def test_schema_is_invalid_JSON_pretty_output(self): + stdout, stderr = self.run_cli( + files=dict(some_schema="not valid JSON!"), + + argv=["--output", "pretty", "some_schema"], + + exit_code=1, + ) + self.assertFalse(stdout) + self.assertIn( + "(some_schema)===\n\nTraceback (most recent call last):\n", + stderr, + ) + + def test_schema_and_instance_are_both_invalid_JSON(self): + """ + Only the schema error is reported, as we abort immediately. + """ + schema, instance = "not valid JSON!", "also not valid JSON!" + self.assertOutputs( + files=dict(some_schema=schema, some_instance=instance), + + argv=["some_schema"], + + exit_code=1, + stderr=f"""\ + Failed to parse 'some_schema': {_message_for(schema)} + """, + ) + + def test_schema_and_instance_are_both_invalid_JSON_pretty_output(self): + """ + Only the schema error is reported, as we abort immediately. + """ + stdout, stderr = self.run_cli( + files=dict( + some_schema="not valid JSON!", + some_instance="also not valid JSON!", + ), + + argv=["--output", "pretty", "-i", "some_instance", "some_schema"], + + exit_code=1, + ) + self.assertFalse(stdout) + self.assertIn( + "(some_schema)===\n\nTraceback (most recent call last):\n", + stderr, + ) + self.assertNotIn("some_instance", stderr) + + def test_instance_does_not_exist(self): + self.assertOutputs( + files=dict(some_schema="{}"), + argv=["-i", "nonexisting_instance", "some_schema"], + + exit_code=1, + stderr="""\ + 'nonexisting_instance' does not exist. + """, + ) + + def test_instance_does_not_exist_pretty_output(self): + self.assertOutputs( + files=dict(some_schema="{}"), + argv=[ + "--output", "pretty", + "-i", "nonexisting_instance", + "some_schema", + ], + + exit_code=1, + stderr="""\ + ===[FileNotFoundError]===(nonexisting_instance)=== + + 'nonexisting_instance' does not exist. + ----------------------------- + """, + ) + + def test_schema_does_not_exist(self): + self.assertOutputs( + argv=["nonexisting_schema"], + + exit_code=1, + stderr="'nonexisting_schema' does not exist.\n", + ) + + def test_schema_does_not_exist_pretty_output(self): + self.assertOutputs( + argv=["--output", "pretty", "nonexisting_schema"], + + exit_code=1, + stderr="""\ + ===[FileNotFoundError]===(nonexisting_schema)=== + + 'nonexisting_schema' does not exist. + ----------------------------- + """, + ) + + def test_neither_instance_nor_schema_exist(self): + self.assertOutputs( + argv=["-i", "nonexisting_instance", "nonexisting_schema"], + + exit_code=1, + stderr="'nonexisting_schema' does not exist.\n", + ) + + def test_neither_instance_nor_schema_exist_pretty_output(self): + self.assertOutputs( + argv=[ + "--output", "pretty", + "-i", "nonexisting_instance", + "nonexisting_schema", + ], + + exit_code=1, + stderr="""\ + ===[FileNotFoundError]===(nonexisting_schema)=== + + 'nonexisting_schema' does not exist. + ----------------------------- + """, + ) + + def test_successful_validation(self): + self.assertOutputs( + files=dict(some_schema="{}", some_instance="{}"), + argv=["-i", "some_instance", "some_schema"], + stdout="", + stderr="", + ) + + def test_successful_validation_pretty_output(self): + self.assertOutputs( + files=dict(some_schema="{}", some_instance="{}"), + argv=["--output", "pretty", "-i", "some_instance", "some_schema"], + stdout="===[SUCCESS]===(some_instance)===\n", + stderr="", + ) + + def test_successful_validation_of_stdin(self): + self.assertOutputs( + files=dict(some_schema="{}"), + stdin=StringIO("{}"), + argv=["some_schema"], + stdout="", + stderr="", + ) + + def test_successful_validation_of_stdin_pretty_output(self): + self.assertOutputs( + files=dict(some_schema="{}"), + stdin=StringIO("{}"), + argv=["--output", "pretty", "some_schema"], + stdout="===[SUCCESS]===(<stdin>)===\n", + stderr="", + ) + + def test_successful_validation_of_just_the_schema(self): + self.assertOutputs( + files=dict(some_schema="{}", some_instance="{}"), + argv=["-i", "some_instance", "some_schema"], + stdout="", + stderr="", + ) + + def test_successful_validation_of_just_the_schema_pretty_output(self): + self.assertOutputs( + files=dict(some_schema="{}", some_instance="{}"), + argv=["--output", "pretty", "-i", "some_instance", "some_schema"], + stdout="===[SUCCESS]===(some_instance)===\n", + stderr="", + ) + + def test_successful_validation_via_explicit_base_uri(self): + ref_schema_file = tempfile.NamedTemporaryFile(delete=False) + ref_schema_file.close() + self.addCleanup(os.remove, ref_schema_file.name) + + ref_path = Path(ref_schema_file.name) + ref_path.write_text('{"definitions": {"num": {"type": "integer"}}}') + + schema = f'{{"$ref": "{ref_path.name}#/definitions/num"}}' + + self.assertOutputs( + files=dict(some_schema=schema, some_instance="1"), + argv=[ + "-i", "some_instance", + "--base-uri", ref_path.parent.as_uri() + "/", + "some_schema", + ], + stdout="", + stderr="", + ) + + def test_unsuccessful_validation_via_explicit_base_uri(self): + ref_schema_file = tempfile.NamedTemporaryFile(delete=False) + ref_schema_file.close() + self.addCleanup(os.remove, ref_schema_file.name) + + ref_path = Path(ref_schema_file.name) + ref_path.write_text('{"definitions": {"num": {"type": "integer"}}}') + + schema = f'{{"$ref": "{ref_path.name}#/definitions/num"}}' + + self.assertOutputs( + files=dict(some_schema=schema, some_instance='"1"'), + argv=[ + "-i", "some_instance", + "--base-uri", ref_path.parent.as_uri() + "/", + "some_schema", + ], + exit_code=1, + stdout="", + stderr="1: '1' is not of type 'integer'\n", + ) + + def test_nonexistent_file_with_explicit_base_uri(self): + schema = '{"$ref": "someNonexistentFile.json#definitions/num"}' + instance = "1" + + with self.assertRaises(_RefResolutionError) as e: + self.assertOutputs( + files=dict( + some_schema=schema, + some_instance=instance, + ), + argv=[ + "-i", "some_instance", + "--base-uri", Path.cwd().as_uri(), + "some_schema", + ], + ) + error = str(e.exception) + self.assertIn(f"{os.sep}someNonexistentFile.json'", error) + + def test_invalid_explicit_base_uri(self): + schema = '{"$ref": "foo.json#definitions/num"}' + instance = "1" + + with self.assertRaises(_RefResolutionError) as e: + self.assertOutputs( + files=dict( + some_schema=schema, + some_instance=instance, + ), + argv=[ + "-i", "some_instance", + "--base-uri", "not@UR1", + "some_schema", + ], + ) + error = str(e.exception) + self.assertEqual( + error, "unknown url type: 'foo.json'", + ) + + def test_it_validates_using_the_latest_validator_when_unspecified(self): + # There isn't a better way now I can think of to ensure that the + # latest version was used, given that the call to validator_for + # is hidden inside the CLI, so guard that that's the case, and + # this test will have to be updated when versions change until + # we can think of a better way to ensure this behavior. + self.assertIs(Draft202012Validator, _LATEST_VERSION) + + self.assertOutputs( + files=dict(some_schema='{"const": "check"}', some_instance='"a"'), + argv=["-i", "some_instance", "some_schema"], + exit_code=1, + stdout="", + stderr="a: 'check' was expected\n", + ) + + def test_it_validates_using_draft7_when_specified(self): + """ + Specifically, `const` validation applies for Draft 7. + """ + schema = """ + { + "$schema": "http://json-schema.org/draft-07/schema#", + "const": "check" + } + """ + instance = '"foo"' + self.assertOutputs( + files=dict(some_schema=schema, some_instance=instance), + argv=["-i", "some_instance", "some_schema"], + exit_code=1, + stdout="", + stderr="foo: 'check' was expected\n", + ) + + def test_it_validates_using_draft4_when_specified(self): + """ + Specifically, `const` validation *does not* apply for Draft 4. + """ + schema = """ + { + "$schema": "http://json-schema.org/draft-04/schema#", + "const": "check" + } + """ + instance = '"foo"' + self.assertOutputs( + files=dict(some_schema=schema, some_instance=instance), + argv=["-i", "some_instance", "some_schema"], + stdout="", + stderr="", + ) + + +class TestParser(TestCase): + + FakeValidator = fake_validator() + + def test_find_validator_by_fully_qualified_object_name(self): + arguments = cli.parse_args( + [ + "--validator", + "jsonschema.tests.test_cli.TestParser.FakeValidator", + "--instance", "mem://some/instance", + "mem://some/schema", + ], + ) + self.assertIs(arguments["validator"], self.FakeValidator) + + def test_find_validator_in_jsonschema(self): + arguments = cli.parse_args( + [ + "--validator", "Draft4Validator", + "--instance", "mem://some/instance", + "mem://some/schema", + ], + ) + self.assertIs(arguments["validator"], Draft4Validator) + + def cli_output_for(self, *argv): + stdout, stderr = StringIO(), StringIO() + with redirect_stdout(stdout), redirect_stderr(stderr): # noqa: SIM117 + with self.assertRaises(SystemExit): + cli.parse_args(argv) + return stdout.getvalue(), stderr.getvalue() + + def test_unknown_output(self): + stdout, stderr = self.cli_output_for( + "--output", "foo", + "mem://some/schema", + ) + self.assertIn("invalid choice: 'foo'", stderr) + self.assertFalse(stdout) + + def test_useless_error_format(self): + stdout, stderr = self.cli_output_for( + "--output", "pretty", + "--error-format", "foo", + "mem://some/schema", + ) + self.assertIn( + "--error-format can only be used with --output plain", + stderr, + ) + self.assertFalse(stdout) + + +class TestCLIIntegration(TestCase): + def test_license(self): + output = subprocess.check_output( + [sys.executable, "-m", "pip", "show", "jsonschema"], + stderr=subprocess.STDOUT, + ) + self.assertIn(b"License: MIT", output) + + def test_version(self): + version = subprocess.check_output( + [sys.executable, "-W", "ignore", "-m", "jsonschema", "--version"], + stderr=subprocess.STDOUT, + ) + version = version.decode("utf-8").strip() + self.assertEqual(version, metadata.version("jsonschema")) + + def test_no_arguments_shows_usage_notes(self): + output = subprocess.check_output( + [sys.executable, "-m", "jsonschema"], + stderr=subprocess.STDOUT, + ) + output_for_help = subprocess.check_output( + [sys.executable, "-m", "jsonschema", "--help"], + stderr=subprocess.STDOUT, + ) + self.assertEqual(output, output_for_help) diff --git a/.venv/lib/python3.12/site-packages/jsonschema/tests/test_deprecations.py b/.venv/lib/python3.12/site-packages/jsonschema/tests/test_deprecations.py new file mode 100644 index 00000000..aea922d2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/jsonschema/tests/test_deprecations.py @@ -0,0 +1,432 @@ +from contextlib import contextmanager +from io import BytesIO +from unittest import TestCase, mock +import importlib.metadata +import json +import subprocess +import sys +import urllib.request + +import referencing.exceptions + +from jsonschema import FormatChecker, exceptions, protocols, validators + + +class TestDeprecations(TestCase): + def test_version(self): + """ + As of v4.0.0, __version__ is deprecated in favor of importlib.metadata. + """ + + message = "Accessing jsonschema.__version__ is deprecated" + with self.assertWarnsRegex(DeprecationWarning, message) as w: + from jsonschema import __version__ + + self.assertEqual(__version__, importlib.metadata.version("jsonschema")) + self.assertEqual(w.filename, __file__) + + def test_validators_ErrorTree(self): + """ + As of v4.0.0, importing ErrorTree from jsonschema.validators is + deprecated in favor of doing so from jsonschema.exceptions. + """ + + message = "Importing ErrorTree from jsonschema.validators is " + with self.assertWarnsRegex(DeprecationWarning, message) as w: + from jsonschema.validators import ErrorTree + + self.assertEqual(ErrorTree, exceptions.ErrorTree) + self.assertEqual(w.filename, __file__) + + def test_import_ErrorTree(self): + """ + As of v4.18.0, importing ErrorTree from the package root is + deprecated in favor of doing so from jsonschema.exceptions. + """ + + message = "Importing ErrorTree directly from the jsonschema package " + with self.assertWarnsRegex(DeprecationWarning, message) as w: + from jsonschema import ErrorTree + + self.assertEqual(ErrorTree, exceptions.ErrorTree) + self.assertEqual(w.filename, __file__) + + def test_ErrorTree_setitem(self): + """ + As of v4.20.0, setting items on an ErrorTree is deprecated. + """ + + e = exceptions.ValidationError("some error", path=["foo"]) + tree = exceptions.ErrorTree() + subtree = exceptions.ErrorTree(errors=[e]) + + message = "ErrorTree.__setitem__ is " + with self.assertWarnsRegex(DeprecationWarning, message) as w: + tree["foo"] = subtree + + self.assertEqual(tree["foo"], subtree) + self.assertEqual(w.filename, __file__) + + def test_import_FormatError(self): + """ + As of v4.18.0, importing FormatError from the package root is + deprecated in favor of doing so from jsonschema.exceptions. + """ + + message = "Importing FormatError directly from the jsonschema package " + with self.assertWarnsRegex(DeprecationWarning, message) as w: + from jsonschema import FormatError + + self.assertEqual(FormatError, exceptions.FormatError) + self.assertEqual(w.filename, __file__) + + def test_import_Validator(self): + """ + As of v4.19.0, importing Validator from the package root is + deprecated in favor of doing so from jsonschema.protocols. + """ + + message = "Importing Validator directly from the jsonschema package " + with self.assertWarnsRegex(DeprecationWarning, message) as w: + from jsonschema import Validator + + self.assertEqual(Validator, protocols.Validator) + self.assertEqual(w.filename, __file__) + + def test_validators_validators(self): + """ + As of v4.0.0, accessing jsonschema.validators.validators is + deprecated. + """ + + message = "Accessing jsonschema.validators.validators is deprecated" + with self.assertWarnsRegex(DeprecationWarning, message) as w: + value = validators.validators + + self.assertEqual(value, validators._VALIDATORS) + self.assertEqual(w.filename, __file__) + + def test_validators_meta_schemas(self): + """ + As of v4.0.0, accessing jsonschema.validators.meta_schemas is + deprecated. + """ + + message = "Accessing jsonschema.validators.meta_schemas is deprecated" + with self.assertWarnsRegex(DeprecationWarning, message) as w: + value = validators.meta_schemas + + self.assertEqual(value, validators._META_SCHEMAS) + self.assertEqual(w.filename, __file__) + + def test_RefResolver_in_scope(self): + """ + As of v4.0.0, RefResolver.in_scope is deprecated. + """ + + resolver = validators._RefResolver.from_schema({}) + message = "jsonschema.RefResolver.in_scope is deprecated " + with self.assertWarnsRegex(DeprecationWarning, message) as w: # noqa: SIM117 + with resolver.in_scope("foo"): + pass + + self.assertEqual(w.filename, __file__) + + def test_Validator_is_valid_two_arguments(self): + """ + As of v4.0.0, calling is_valid with two arguments (to provide a + different schema) is deprecated. + """ + + validator = validators.Draft7Validator({}) + message = "Passing a schema to Validator.is_valid is deprecated " + with self.assertWarnsRegex(DeprecationWarning, message) as w: + result = validator.is_valid("foo", {"type": "number"}) + + self.assertFalse(result) + self.assertEqual(w.filename, __file__) + + def test_Validator_iter_errors_two_arguments(self): + """ + As of v4.0.0, calling iter_errors with two arguments (to provide a + different schema) is deprecated. + """ + + validator = validators.Draft7Validator({}) + message = "Passing a schema to Validator.iter_errors is deprecated " + with self.assertWarnsRegex(DeprecationWarning, message) as w: + error, = validator.iter_errors("foo", {"type": "number"}) + + self.assertEqual(error.validator, "type") + self.assertEqual(w.filename, __file__) + + def test_Validator_resolver(self): + """ + As of v4.18.0, accessing Validator.resolver is deprecated. + """ + + validator = validators.Draft7Validator({}) + message = "Accessing Draft7Validator.resolver is " + with self.assertWarnsRegex(DeprecationWarning, message) as w: + self.assertIsInstance(validator.resolver, validators._RefResolver) + + self.assertEqual(w.filename, __file__) + + def test_RefResolver(self): + """ + As of v4.18.0, RefResolver is fully deprecated. + """ + + message = "jsonschema.RefResolver is deprecated" + with self.assertWarnsRegex(DeprecationWarning, message) as w: + from jsonschema import RefResolver + self.assertEqual(w.filename, __file__) + + with self.assertWarnsRegex(DeprecationWarning, message) as w: + from jsonschema.validators import RefResolver # noqa: F401, F811 + self.assertEqual(w.filename, __file__) + + def test_RefResolutionError(self): + """ + As of v4.18.0, RefResolutionError is deprecated in favor of directly + catching errors from the referencing library. + """ + + message = "jsonschema.exceptions.RefResolutionError is deprecated" + with self.assertWarnsRegex(DeprecationWarning, message) as w: + from jsonschema import RefResolutionError + + self.assertEqual(RefResolutionError, exceptions._RefResolutionError) + self.assertEqual(w.filename, __file__) + + with self.assertWarnsRegex(DeprecationWarning, message) as w: + from jsonschema.exceptions import RefResolutionError + + self.assertEqual(RefResolutionError, exceptions._RefResolutionError) + self.assertEqual(w.filename, __file__) + + def test_catching_Unresolvable_directly(self): + """ + This behavior is the intended behavior (i.e. it's not deprecated), but + given we do "tricksy" things in the iterim to wrap exceptions in a + multiple inheritance subclass, we need to be extra sure it works and + stays working. + """ + validator = validators.Draft202012Validator({"$ref": "urn:nothing"}) + + with self.assertRaises(referencing.exceptions.Unresolvable) as e: + validator.validate(12) + + expected = referencing.exceptions.Unresolvable(ref="urn:nothing") + self.assertEqual( + (e.exception, str(e.exception)), + (expected, "Unresolvable: urn:nothing"), + ) + + def test_catching_Unresolvable_via_RefResolutionError(self): + """ + Until RefResolutionError is removed, it is still possible to catch + exceptions from reference resolution using it, even though they may + have been raised by referencing. + """ + with self.assertWarns(DeprecationWarning): + from jsonschema import RefResolutionError + + validator = validators.Draft202012Validator({"$ref": "urn:nothing"}) + + with self.assertRaises(referencing.exceptions.Unresolvable) as u: + validator.validate(12) + + with self.assertRaises(RefResolutionError) as e: + validator.validate(12) + + self.assertEqual( + (e.exception, str(e.exception)), + (u.exception, "Unresolvable: urn:nothing"), + ) + + def test_WrappedReferencingError_hashability(self): + """ + Ensure the wrapped referencing errors are hashable when possible. + """ + with self.assertWarns(DeprecationWarning): + from jsonschema import RefResolutionError + + validator = validators.Draft202012Validator({"$ref": "urn:nothing"}) + + with self.assertRaises(referencing.exceptions.Unresolvable) as u: + validator.validate(12) + + with self.assertRaises(RefResolutionError) as e: + validator.validate(12) + + self.assertIn(e.exception, {u.exception}) + self.assertIn(u.exception, {e.exception}) + + def test_Validator_subclassing(self): + """ + As of v4.12.0, subclassing a validator class produces an explicit + deprecation warning. + + This was never intended to be public API (and some comments over the + years in issues said so, but obviously that's not a great way to make + sure it's followed). + + A future version will explicitly raise an error. + """ + + message = "Subclassing validator classes is " + with self.assertWarnsRegex(DeprecationWarning, message) as w: + class Subclass(validators.Draft202012Validator): + pass + + self.assertEqual(w.filename, __file__) + + with self.assertWarnsRegex(DeprecationWarning, message) as w: + class AnotherSubclass(validators.create(meta_schema={})): + pass + + def test_FormatChecker_cls_checks(self): + """ + As of v4.14.0, FormatChecker.cls_checks is deprecated without + replacement. + """ + + self.addCleanup(FormatChecker.checkers.pop, "boom", None) + + message = "FormatChecker.cls_checks " + with self.assertWarnsRegex(DeprecationWarning, message) as w: + FormatChecker.cls_checks("boom") + + self.assertEqual(w.filename, __file__) + + def test_draftN_format_checker(self): + """ + As of v4.16.0, accessing jsonschema.draftn_format_checker is deprecated + in favor of Validator.FORMAT_CHECKER. + """ + + message = "Accessing jsonschema.draft202012_format_checker is " + with self.assertWarnsRegex(DeprecationWarning, message) as w: + from jsonschema import draft202012_format_checker + + self.assertIs( + draft202012_format_checker, + validators.Draft202012Validator.FORMAT_CHECKER, + ) + self.assertEqual(w.filename, __file__) + + message = "Accessing jsonschema.draft201909_format_checker is " + with self.assertWarnsRegex(DeprecationWarning, message) as w: + from jsonschema import draft201909_format_checker + + self.assertIs( + draft201909_format_checker, + validators.Draft201909Validator.FORMAT_CHECKER, + ) + self.assertEqual(w.filename, __file__) + + message = "Accessing jsonschema.draft7_format_checker is " + with self.assertWarnsRegex(DeprecationWarning, message) as w: + from jsonschema import draft7_format_checker + + self.assertIs( + draft7_format_checker, + validators.Draft7Validator.FORMAT_CHECKER, + ) + self.assertEqual(w.filename, __file__) + + message = "Accessing jsonschema.draft6_format_checker is " + with self.assertWarnsRegex(DeprecationWarning, message) as w: + from jsonschema import draft6_format_checker + + self.assertIs( + draft6_format_checker, + validators.Draft6Validator.FORMAT_CHECKER, + ) + self.assertEqual(w.filename, __file__) + + message = "Accessing jsonschema.draft4_format_checker is " + with self.assertWarnsRegex(DeprecationWarning, message) as w: + from jsonschema import draft4_format_checker + + self.assertIs( + draft4_format_checker, + validators.Draft4Validator.FORMAT_CHECKER, + ) + self.assertEqual(w.filename, __file__) + + message = "Accessing jsonschema.draft3_format_checker is " + with self.assertWarnsRegex(DeprecationWarning, message) as w: + from jsonschema import draft3_format_checker + + self.assertIs( + draft3_format_checker, + validators.Draft3Validator.FORMAT_CHECKER, + ) + self.assertEqual(w.filename, __file__) + + with self.assertRaises(ImportError): + from jsonschema import draft1234_format_checker # noqa: F401 + + def test_import_cli(self): + """ + As of v4.17.0, importing jsonschema.cli is deprecated. + """ + + message = "The jsonschema CLI is deprecated and will be removed " + with self.assertWarnsRegex(DeprecationWarning, message) as w: + import jsonschema.cli + importlib.reload(jsonschema.cli) + + self.assertEqual(w.filename, importlib.__file__) + + def test_cli(self): + """ + As of v4.17.0, the jsonschema CLI is deprecated. + """ + + process = subprocess.run( + [sys.executable, "-m", "jsonschema"], + capture_output=True, + check=True, + ) + self.assertIn(b"The jsonschema CLI is deprecated ", process.stderr) + + def test_automatic_remote_retrieval(self): + """ + Automatic retrieval of remote references is deprecated as of v4.18.0. + """ + ref = "http://bar#/$defs/baz" + schema = {"$defs": {"baz": {"type": "integer"}}} + + if "requests" in sys.modules: # pragma: no cover + self.addCleanup( + sys.modules.__setitem__, "requests", sys.modules["requests"], + ) + sys.modules["requests"] = None + + @contextmanager + def fake_urlopen(request): + self.assertIsInstance(request, urllib.request.Request) + self.assertEqual(request.full_url, "http://bar") + + # Ha ha urllib.request.Request "normalizes" header names and + # Request.get_header does not also normalize them... + (header, value), = request.header_items() + self.assertEqual(header.lower(), "user-agent") + self.assertEqual( + value, "python-jsonschema (deprecated $ref resolution)", + ) + yield BytesIO(json.dumps(schema).encode("utf8")) + + validator = validators.Draft202012Validator({"$ref": ref}) + + message = "Automatically retrieving remote references " + patch = mock.patch.object(urllib.request, "urlopen", new=fake_urlopen) + + with patch, self.assertWarnsRegex(DeprecationWarning, message): + self.assertEqual( + (validator.is_valid({}), validator.is_valid(37)), + (False, True), + ) diff --git a/.venv/lib/python3.12/site-packages/jsonschema/tests/test_exceptions.py b/.venv/lib/python3.12/site-packages/jsonschema/tests/test_exceptions.py new file mode 100644 index 00000000..69114e18 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/jsonschema/tests/test_exceptions.py @@ -0,0 +1,702 @@ +from unittest import TestCase +import textwrap + +from jsonschema import exceptions +from jsonschema.validators import _LATEST_VERSION + + +class TestBestMatch(TestCase): + def best_match_of(self, instance, schema): + errors = list(_LATEST_VERSION(schema).iter_errors(instance)) + msg = f"No errors found for {instance} under {schema!r}!" + self.assertTrue(errors, msg=msg) + + best = exceptions.best_match(iter(errors)) + reversed_best = exceptions.best_match(reversed(errors)) + + self.assertEqual( + best._contents(), + reversed_best._contents(), + f"No consistent best match!\nGot: {best}\n\nThen: {reversed_best}", + ) + return best + + def test_shallower_errors_are_better_matches(self): + schema = { + "properties": { + "foo": { + "minProperties": 2, + "properties": {"bar": {"type": "object"}}, + }, + }, + } + best = self.best_match_of(instance={"foo": {"bar": []}}, schema=schema) + self.assertEqual(best.validator, "minProperties") + + def test_oneOf_and_anyOf_are_weak_matches(self): + """ + A property you *must* match is probably better than one you have to + match a part of. + """ + + schema = { + "minProperties": 2, + "anyOf": [{"type": "string"}, {"type": "number"}], + "oneOf": [{"type": "string"}, {"type": "number"}], + } + best = self.best_match_of(instance={}, schema=schema) + self.assertEqual(best.validator, "minProperties") + + def test_if_the_most_relevant_error_is_anyOf_it_is_traversed(self): + """ + If the most relevant error is an anyOf, then we traverse its context + and select the otherwise *least* relevant error, since in this case + that means the most specific, deep, error inside the instance. + + I.e. since only one of the schemas must match, we look for the most + relevant one. + """ + + schema = { + "properties": { + "foo": { + "anyOf": [ + {"type": "string"}, + {"properties": {"bar": {"type": "array"}}}, + ], + }, + }, + } + best = self.best_match_of(instance={"foo": {"bar": 12}}, schema=schema) + self.assertEqual(best.validator_value, "array") + + def test_no_anyOf_traversal_for_equally_relevant_errors(self): + """ + We don't traverse into an anyOf (as above) if all of its context errors + seem to be equally "wrong" against the instance. + """ + + schema = { + "anyOf": [ + {"type": "string"}, + {"type": "integer"}, + {"type": "object"}, + ], + } + best = self.best_match_of(instance=[], schema=schema) + self.assertEqual(best.validator, "anyOf") + + def test_anyOf_traversal_for_single_equally_relevant_error(self): + """ + We *do* traverse anyOf with a single nested error, even though it is + vacuously equally relevant to itself. + """ + + schema = { + "anyOf": [ + {"type": "string"}, + ], + } + best = self.best_match_of(instance=[], schema=schema) + self.assertEqual(best.validator, "type") + + def test_anyOf_traversal_for_single_sibling_errors(self): + """ + We *do* traverse anyOf with a single subschema that fails multiple + times (e.g. on multiple items). + """ + + schema = { + "anyOf": [ + {"items": {"const": 37}}, + ], + } + best = self.best_match_of(instance=[12, 12], schema=schema) + self.assertEqual(best.validator, "const") + + def test_anyOf_traversal_for_non_type_matching_sibling_errors(self): + """ + We *do* traverse anyOf with multiple subschemas when one does not type + match. + """ + + schema = { + "anyOf": [ + {"type": "object"}, + {"items": {"const": 37}}, + ], + } + best = self.best_match_of(instance=[12, 12], schema=schema) + self.assertEqual(best.validator, "const") + + def test_if_the_most_relevant_error_is_oneOf_it_is_traversed(self): + """ + If the most relevant error is an oneOf, then we traverse its context + and select the otherwise *least* relevant error, since in this case + that means the most specific, deep, error inside the instance. + + I.e. since only one of the schemas must match, we look for the most + relevant one. + """ + + schema = { + "properties": { + "foo": { + "oneOf": [ + {"type": "string"}, + {"properties": {"bar": {"type": "array"}}}, + ], + }, + }, + } + best = self.best_match_of(instance={"foo": {"bar": 12}}, schema=schema) + self.assertEqual(best.validator_value, "array") + + def test_no_oneOf_traversal_for_equally_relevant_errors(self): + """ + We don't traverse into an oneOf (as above) if all of its context errors + seem to be equally "wrong" against the instance. + """ + + schema = { + "oneOf": [ + {"type": "string"}, + {"type": "integer"}, + {"type": "object"}, + ], + } + best = self.best_match_of(instance=[], schema=schema) + self.assertEqual(best.validator, "oneOf") + + def test_oneOf_traversal_for_single_equally_relevant_error(self): + """ + We *do* traverse oneOf with a single nested error, even though it is + vacuously equally relevant to itself. + """ + + schema = { + "oneOf": [ + {"type": "string"}, + ], + } + best = self.best_match_of(instance=[], schema=schema) + self.assertEqual(best.validator, "type") + + def test_oneOf_traversal_for_single_sibling_errors(self): + """ + We *do* traverse oneOf with a single subschema that fails multiple + times (e.g. on multiple items). + """ + + schema = { + "oneOf": [ + {"items": {"const": 37}}, + ], + } + best = self.best_match_of(instance=[12, 12], schema=schema) + self.assertEqual(best.validator, "const") + + def test_oneOf_traversal_for_non_type_matching_sibling_errors(self): + """ + We *do* traverse oneOf with multiple subschemas when one does not type + match. + """ + + schema = { + "oneOf": [ + {"type": "object"}, + {"items": {"const": 37}}, + ], + } + best = self.best_match_of(instance=[12, 12], schema=schema) + self.assertEqual(best.validator, "const") + + def test_if_the_most_relevant_error_is_allOf_it_is_traversed(self): + """ + Now, if the error is allOf, we traverse but select the *most* relevant + error from the context, because all schemas here must match anyways. + """ + + schema = { + "properties": { + "foo": { + "allOf": [ + {"type": "string"}, + {"properties": {"bar": {"type": "array"}}}, + ], + }, + }, + } + best = self.best_match_of(instance={"foo": {"bar": 12}}, schema=schema) + self.assertEqual(best.validator_value, "string") + + def test_nested_context_for_oneOf(self): + """ + We traverse into nested contexts (a oneOf containing an error in a + nested oneOf here). + """ + + schema = { + "properties": { + "foo": { + "oneOf": [ + {"type": "string"}, + { + "oneOf": [ + {"type": "string"}, + { + "properties": { + "bar": {"type": "array"}, + }, + }, + ], + }, + ], + }, + }, + } + best = self.best_match_of(instance={"foo": {"bar": 12}}, schema=schema) + self.assertEqual(best.validator_value, "array") + + def test_it_prioritizes_matching_types(self): + schema = { + "properties": { + "foo": { + "anyOf": [ + {"type": "array", "minItems": 2}, + {"type": "string", "minLength": 10}, + ], + }, + }, + } + best = self.best_match_of(instance={"foo": "bar"}, schema=schema) + self.assertEqual(best.validator, "minLength") + + reordered = { + "properties": { + "foo": { + "anyOf": [ + {"type": "string", "minLength": 10}, + {"type": "array", "minItems": 2}, + ], + }, + }, + } + best = self.best_match_of(instance={"foo": "bar"}, schema=reordered) + self.assertEqual(best.validator, "minLength") + + def test_it_prioritizes_matching_union_types(self): + schema = { + "properties": { + "foo": { + "anyOf": [ + {"type": ["array", "object"], "minItems": 2}, + {"type": ["integer", "string"], "minLength": 10}, + ], + }, + }, + } + best = self.best_match_of(instance={"foo": "bar"}, schema=schema) + self.assertEqual(best.validator, "minLength") + + reordered = { + "properties": { + "foo": { + "anyOf": [ + {"type": "string", "minLength": 10}, + {"type": "array", "minItems": 2}, + ], + }, + }, + } + best = self.best_match_of(instance={"foo": "bar"}, schema=reordered) + self.assertEqual(best.validator, "minLength") + + def test_boolean_schemas(self): + schema = {"properties": {"foo": False}} + best = self.best_match_of(instance={"foo": "bar"}, schema=schema) + self.assertIsNone(best.validator) + + def test_one_error(self): + validator = _LATEST_VERSION({"minProperties": 2}) + error, = validator.iter_errors({}) + self.assertEqual( + exceptions.best_match(validator.iter_errors({})).validator, + "minProperties", + ) + + def test_no_errors(self): + validator = _LATEST_VERSION({}) + self.assertIsNone(exceptions.best_match(validator.iter_errors({}))) + + +class TestByRelevance(TestCase): + def test_short_paths_are_better_matches(self): + shallow = exceptions.ValidationError("Oh no!", path=["baz"]) + deep = exceptions.ValidationError("Oh yes!", path=["foo", "bar"]) + match = max([shallow, deep], key=exceptions.relevance) + self.assertIs(match, shallow) + + match = max([deep, shallow], key=exceptions.relevance) + self.assertIs(match, shallow) + + def test_global_errors_are_even_better_matches(self): + shallow = exceptions.ValidationError("Oh no!", path=[]) + deep = exceptions.ValidationError("Oh yes!", path=["foo"]) + + errors = sorted([shallow, deep], key=exceptions.relevance) + self.assertEqual( + [list(error.path) for error in errors], + [["foo"], []], + ) + + errors = sorted([deep, shallow], key=exceptions.relevance) + self.assertEqual( + [list(error.path) for error in errors], + [["foo"], []], + ) + + def test_weak_keywords_are_lower_priority(self): + weak = exceptions.ValidationError("Oh no!", path=[], validator="a") + normal = exceptions.ValidationError("Oh yes!", path=[], validator="b") + + best_match = exceptions.by_relevance(weak="a") + + match = max([weak, normal], key=best_match) + self.assertIs(match, normal) + + match = max([normal, weak], key=best_match) + self.assertIs(match, normal) + + def test_strong_keywords_are_higher_priority(self): + weak = exceptions.ValidationError("Oh no!", path=[], validator="a") + normal = exceptions.ValidationError("Oh yes!", path=[], validator="b") + strong = exceptions.ValidationError("Oh fine!", path=[], validator="c") + + best_match = exceptions.by_relevance(weak="a", strong="c") + + match = max([weak, normal, strong], key=best_match) + self.assertIs(match, strong) + + match = max([strong, normal, weak], key=best_match) + self.assertIs(match, strong) + + +class TestErrorTree(TestCase): + def test_it_knows_how_many_total_errors_it_contains(self): + # FIXME: #442 + errors = [ + exceptions.ValidationError("Something", validator=i) + for i in range(8) + ] + tree = exceptions.ErrorTree(errors) + self.assertEqual(tree.total_errors, 8) + + def test_it_contains_an_item_if_the_item_had_an_error(self): + errors = [exceptions.ValidationError("a message", path=["bar"])] + tree = exceptions.ErrorTree(errors) + self.assertIn("bar", tree) + + def test_it_does_not_contain_an_item_if_the_item_had_no_error(self): + errors = [exceptions.ValidationError("a message", path=["bar"])] + tree = exceptions.ErrorTree(errors) + self.assertNotIn("foo", tree) + + def test_keywords_that_failed_appear_in_errors_dict(self): + error = exceptions.ValidationError("a message", validator="foo") + tree = exceptions.ErrorTree([error]) + self.assertEqual(tree.errors, {"foo": error}) + + def test_it_creates_a_child_tree_for_each_nested_path(self): + errors = [ + exceptions.ValidationError("a bar message", path=["bar"]), + exceptions.ValidationError("a bar -> 0 message", path=["bar", 0]), + ] + tree = exceptions.ErrorTree(errors) + self.assertIn(0, tree["bar"]) + self.assertNotIn(1, tree["bar"]) + + def test_children_have_their_errors_dicts_built(self): + e1, e2 = ( + exceptions.ValidationError("1", validator="foo", path=["bar", 0]), + exceptions.ValidationError("2", validator="quux", path=["bar", 0]), + ) + tree = exceptions.ErrorTree([e1, e2]) + self.assertEqual(tree["bar"][0].errors, {"foo": e1, "quux": e2}) + + def test_multiple_errors_with_instance(self): + e1, e2 = ( + exceptions.ValidationError( + "1", + validator="foo", + path=["bar", "bar2"], + instance="i1"), + exceptions.ValidationError( + "2", + validator="quux", + path=["foobar", 2], + instance="i2"), + ) + exceptions.ErrorTree([e1, e2]) + + def test_it_does_not_contain_subtrees_that_are_not_in_the_instance(self): + error = exceptions.ValidationError("123", validator="foo", instance=[]) + tree = exceptions.ErrorTree([error]) + + with self.assertRaises(IndexError): + tree[0] + + def test_if_its_in_the_tree_anyhow_it_does_not_raise_an_error(self): + """ + If a keyword refers to a path that isn't in the instance, the + tree still properly returns a subtree for that path. + """ + + error = exceptions.ValidationError( + "a message", validator="foo", instance={}, path=["foo"], + ) + tree = exceptions.ErrorTree([error]) + self.assertIsInstance(tree["foo"], exceptions.ErrorTree) + + def test_iter(self): + e1, e2 = ( + exceptions.ValidationError( + "1", + validator="foo", + path=["bar", "bar2"], + instance="i1"), + exceptions.ValidationError( + "2", + validator="quux", + path=["foobar", 2], + instance="i2"), + ) + tree = exceptions.ErrorTree([e1, e2]) + self.assertEqual(set(tree), {"bar", "foobar"}) + + def test_repr_single(self): + error = exceptions.ValidationError( + "1", + validator="foo", + path=["bar", "bar2"], + instance="i1", + ) + tree = exceptions.ErrorTree([error]) + self.assertEqual(repr(tree), "<ErrorTree (1 total error)>") + + def test_repr_multiple(self): + e1, e2 = ( + exceptions.ValidationError( + "1", + validator="foo", + path=["bar", "bar2"], + instance="i1"), + exceptions.ValidationError( + "2", + validator="quux", + path=["foobar", 2], + instance="i2"), + ) + tree = exceptions.ErrorTree([e1, e2]) + self.assertEqual(repr(tree), "<ErrorTree (2 total errors)>") + + def test_repr_empty(self): + tree = exceptions.ErrorTree([]) + self.assertEqual(repr(tree), "<ErrorTree (0 total errors)>") + + +class TestErrorInitReprStr(TestCase): + def make_error(self, **kwargs): + defaults = dict( + message="hello", + validator="type", + validator_value="string", + instance=5, + schema={"type": "string"}, + ) + defaults.update(kwargs) + return exceptions.ValidationError(**defaults) + + def assertShows(self, expected, **kwargs): + expected = textwrap.dedent(expected).rstrip("\n") + + error = self.make_error(**kwargs) + message_line, _, rest = str(error).partition("\n") + self.assertEqual(message_line, error.message) + self.assertEqual(rest, expected) + + def test_it_calls_super_and_sets_args(self): + error = self.make_error() + self.assertGreater(len(error.args), 1) + + def test_repr(self): + self.assertEqual( + repr(exceptions.ValidationError(message="Hello!")), + "<ValidationError: 'Hello!'>", + ) + + def test_unset_error(self): + error = exceptions.ValidationError("message") + self.assertEqual(str(error), "message") + + kwargs = { + "validator": "type", + "validator_value": "string", + "instance": 5, + "schema": {"type": "string"}, + } + # Just the message should show if any of the attributes are unset + for attr in kwargs: + k = dict(kwargs) + del k[attr] + error = exceptions.ValidationError("message", **k) + self.assertEqual(str(error), "message") + + def test_empty_paths(self): + self.assertShows( + """ + Failed validating 'type' in schema: + {'type': 'string'} + + On instance: + 5 + """, + path=[], + schema_path=[], + ) + + def test_one_item_paths(self): + self.assertShows( + """ + Failed validating 'type' in schema: + {'type': 'string'} + + On instance[0]: + 5 + """, + path=[0], + schema_path=["items"], + ) + + def test_multiple_item_paths(self): + self.assertShows( + """ + Failed validating 'type' in schema['items'][0]: + {'type': 'string'} + + On instance[0]['a']: + 5 + """, + path=[0, "a"], + schema_path=["items", 0, 1], + ) + + def test_uses_pprint(self): + self.assertShows( + """ + Failed validating 'maxLength' in schema: + {0: 0, + 1: 1, + 2: 2, + 3: 3, + 4: 4, + 5: 5, + 6: 6, + 7: 7, + 8: 8, + 9: 9, + 10: 10, + 11: 11, + 12: 12, + 13: 13, + 14: 14, + 15: 15, + 16: 16, + 17: 17, + 18: 18, + 19: 19} + + On instance: + [0, + 1, + 2, + 3, + 4, + 5, + 6, + 7, + 8, + 9, + 10, + 11, + 12, + 13, + 14, + 15, + 16, + 17, + 18, + 19, + 20, + 21, + 22, + 23, + 24] + """, + instance=list(range(25)), + schema=dict(zip(range(20), range(20))), + validator="maxLength", + ) + + def test_does_not_reorder_dicts(self): + self.assertShows( + """ + Failed validating 'type' in schema: + {'do': 3, 'not': 7, 'sort': 37, 'me': 73} + + On instance: + {'here': 73, 'too': 37, 'no': 7, 'sorting': 3} + """, + schema={ + "do": 3, + "not": 7, + "sort": 37, + "me": 73, + }, + instance={ + "here": 73, + "too": 37, + "no": 7, + "sorting": 3, + }, + ) + + def test_str_works_with_instances_having_overriden_eq_operator(self): + """ + Check for #164 which rendered exceptions unusable when a + `ValidationError` involved instances with an `__eq__` method + that returned truthy values. + """ + + class DontEQMeBro: + def __eq__(this, other): # pragma: no cover + self.fail("Don't!") + + def __ne__(this, other): # pragma: no cover + self.fail("Don't!") + + instance = DontEQMeBro() + error = exceptions.ValidationError( + "a message", + validator="foo", + instance=instance, + validator_value="some", + schema="schema", + ) + self.assertIn(repr(instance), str(error)) + + +class TestHashable(TestCase): + def test_hashable(self): + {exceptions.ValidationError("")} + {exceptions.SchemaError("")} diff --git a/.venv/lib/python3.12/site-packages/jsonschema/tests/test_format.py b/.venv/lib/python3.12/site-packages/jsonschema/tests/test_format.py new file mode 100644 index 00000000..d829f984 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/jsonschema/tests/test_format.py @@ -0,0 +1,91 @@ +""" +Tests for the parts of jsonschema related to the :kw:`format` keyword. +""" + +from unittest import TestCase + +from jsonschema import FormatChecker, ValidationError +from jsonschema.exceptions import FormatError +from jsonschema.validators import Draft4Validator + +BOOM = ValueError("Boom!") +BANG = ZeroDivisionError("Bang!") + + +def boom(thing): + if thing == "bang": + raise BANG + raise BOOM + + +class TestFormatChecker(TestCase): + def test_it_can_validate_no_formats(self): + checker = FormatChecker(formats=()) + self.assertFalse(checker.checkers) + + def test_it_raises_a_key_error_for_unknown_formats(self): + with self.assertRaises(KeyError): + FormatChecker(formats=["o noes"]) + + def test_it_can_register_cls_checkers(self): + original = dict(FormatChecker.checkers) + self.addCleanup(FormatChecker.checkers.pop, "boom") + with self.assertWarns(DeprecationWarning): + FormatChecker.cls_checks("boom")(boom) + self.assertEqual( + FormatChecker.checkers, + dict(original, boom=(boom, ())), + ) + + def test_it_can_register_checkers(self): + checker = FormatChecker() + checker.checks("boom")(boom) + self.assertEqual( + checker.checkers, + dict(FormatChecker.checkers, boom=(boom, ())), + ) + + def test_it_catches_registered_errors(self): + checker = FormatChecker() + checker.checks("boom", raises=type(BOOM))(boom) + + with self.assertRaises(FormatError) as cm: + checker.check(instance=12, format="boom") + + self.assertIs(cm.exception.cause, BOOM) + self.assertIs(cm.exception.__cause__, BOOM) + self.assertEqual(str(cm.exception), "12 is not a 'boom'") + + # Unregistered errors should not be caught + with self.assertRaises(type(BANG)): + checker.check(instance="bang", format="boom") + + def test_format_error_causes_become_validation_error_causes(self): + checker = FormatChecker() + checker.checks("boom", raises=ValueError)(boom) + validator = Draft4Validator({"format": "boom"}, format_checker=checker) + + with self.assertRaises(ValidationError) as cm: + validator.validate("BOOM") + + self.assertIs(cm.exception.cause, BOOM) + self.assertIs(cm.exception.__cause__, BOOM) + + def test_format_checkers_come_with_defaults(self): + # This is bad :/ but relied upon. + # The docs for quite awhile recommended people do things like + # validate(..., format_checker=FormatChecker()) + # We should change that, but we can't without deprecation... + checker = FormatChecker() + with self.assertRaises(FormatError): + checker.check(instance="not-an-ipv4", format="ipv4") + + def test_repr(self): + checker = FormatChecker(formats=()) + checker.checks("foo")(lambda thing: True) # pragma: no cover + checker.checks("bar")(lambda thing: True) # pragma: no cover + checker.checks("baz")(lambda thing: True) # pragma: no cover + self.assertEqual( + repr(checker), + "<FormatChecker checkers=['bar', 'baz', 'foo']>", + ) diff --git a/.venv/lib/python3.12/site-packages/jsonschema/tests/test_jsonschema_test_suite.py b/.venv/lib/python3.12/site-packages/jsonschema/tests/test_jsonschema_test_suite.py new file mode 100644 index 00000000..282c1369 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/jsonschema/tests/test_jsonschema_test_suite.py @@ -0,0 +1,269 @@ +""" +Test runner for the JSON Schema official test suite + +Tests comprehensive correctness of each draft's validator. + +See https://github.com/json-schema-org/JSON-Schema-Test-Suite for details. +""" + +import sys + +from jsonschema.tests._suite import Suite +import jsonschema + +SUITE = Suite() +DRAFT3 = SUITE.version(name="draft3") +DRAFT4 = SUITE.version(name="draft4") +DRAFT6 = SUITE.version(name="draft6") +DRAFT7 = SUITE.version(name="draft7") +DRAFT201909 = SUITE.version(name="draft2019-09") +DRAFT202012 = SUITE.version(name="draft2020-12") + + +def skip(message, **kwargs): + def skipper(test): + if all(value == getattr(test, attr) for attr, value in kwargs.items()): + return message + return skipper + + +def missing_format(Validator): + def missing_format(test): # pragma: no cover + schema = test.schema + if ( + schema is True + or schema is False + or "format" not in schema + or schema["format"] in Validator.FORMAT_CHECKER.checkers + or test.valid + ): + return + + return f"Format checker {schema['format']!r} not found." + return missing_format + + +def complex_email_validation(test): + if test.subject != "email": + return + + message = "Complex email validation is (intentionally) unsupported." + return skip( + message=message, + description="an invalid domain", + )(test) or skip( + message=message, + description="an invalid IPv4-address-literal", + )(test) or skip( + message=message, + description="dot after local part is not valid", + )(test) or skip( + message=message, + description="dot before local part is not valid", + )(test) or skip( + message=message, + description="two subsequent dots inside local part are not valid", + )(test) + + +if sys.version_info < (3, 9): # pragma: no cover + message = "Rejecting leading zeros is 3.9+" + allowed_leading_zeros = skip( + message=message, + subject="ipv4", + description="invalid leading zeroes, as they are treated as octals", + ) +else: + def allowed_leading_zeros(test): # pragma: no cover + return + + +def leap_second(test): + message = "Leap seconds are unsupported." + return skip( + message=message, + subject="time", + description="a valid time string with leap second", + )(test) or skip( + message=message, + subject="time", + description="a valid time string with leap second, Zulu", + )(test) or skip( + message=message, + subject="time", + description="a valid time string with leap second with offset", + )(test) or skip( + message=message, + subject="time", + description="valid leap second, positive time-offset", + )(test) or skip( + message=message, + subject="time", + description="valid leap second, negative time-offset", + )(test) or skip( + message=message, + subject="time", + description="valid leap second, large positive time-offset", + )(test) or skip( + message=message, + subject="time", + description="valid leap second, large negative time-offset", + )(test) or skip( + message=message, + subject="time", + description="valid leap second, zero time-offset", + )(test) or skip( + message=message, + subject="date-time", + description="a valid date-time with a leap second, UTC", + )(test) or skip( + message=message, + subject="date-time", + description="a valid date-time with a leap second, with minus offset", + )(test) + + +TestDraft3 = DRAFT3.to_unittest_testcase( + DRAFT3.cases(), + DRAFT3.format_cases(), + DRAFT3.optional_cases_of(name="bignum"), + DRAFT3.optional_cases_of(name="non-bmp-regex"), + DRAFT3.optional_cases_of(name="zeroTerminatedFloats"), + Validator=jsonschema.Draft3Validator, + format_checker=jsonschema.Draft3Validator.FORMAT_CHECKER, + skip=lambda test: ( + missing_format(jsonschema.Draft3Validator)(test) + or complex_email_validation(test) + ), +) + + +TestDraft4 = DRAFT4.to_unittest_testcase( + DRAFT4.cases(), + DRAFT4.format_cases(), + DRAFT4.optional_cases_of(name="bignum"), + DRAFT4.optional_cases_of(name="float-overflow"), + DRAFT4.optional_cases_of(name="id"), + DRAFT4.optional_cases_of(name="non-bmp-regex"), + DRAFT4.optional_cases_of(name="zeroTerminatedFloats"), + Validator=jsonschema.Draft4Validator, + format_checker=jsonschema.Draft4Validator.FORMAT_CHECKER, + skip=lambda test: ( + allowed_leading_zeros(test) + or leap_second(test) + or missing_format(jsonschema.Draft4Validator)(test) + or complex_email_validation(test) + ), +) + + +TestDraft6 = DRAFT6.to_unittest_testcase( + DRAFT6.cases(), + DRAFT6.format_cases(), + DRAFT6.optional_cases_of(name="bignum"), + DRAFT6.optional_cases_of(name="float-overflow"), + DRAFT6.optional_cases_of(name="id"), + DRAFT6.optional_cases_of(name="non-bmp-regex"), + Validator=jsonschema.Draft6Validator, + format_checker=jsonschema.Draft6Validator.FORMAT_CHECKER, + skip=lambda test: ( + allowed_leading_zeros(test) + or leap_second(test) + or missing_format(jsonschema.Draft6Validator)(test) + or complex_email_validation(test) + ), +) + + +TestDraft7 = DRAFT7.to_unittest_testcase( + DRAFT7.cases(), + DRAFT7.format_cases(), + DRAFT7.optional_cases_of(name="bignum"), + DRAFT7.optional_cases_of(name="cross-draft"), + DRAFT7.optional_cases_of(name="float-overflow"), + DRAFT6.optional_cases_of(name="id"), + DRAFT7.optional_cases_of(name="non-bmp-regex"), + DRAFT7.optional_cases_of(name="unknownKeyword"), + Validator=jsonschema.Draft7Validator, + format_checker=jsonschema.Draft7Validator.FORMAT_CHECKER, + skip=lambda test: ( + allowed_leading_zeros(test) + or leap_second(test) + or missing_format(jsonschema.Draft7Validator)(test) + or complex_email_validation(test) + ), +) + + +TestDraft201909 = DRAFT201909.to_unittest_testcase( + DRAFT201909.cases(), + DRAFT201909.optional_cases_of(name="anchor"), + DRAFT201909.optional_cases_of(name="bignum"), + DRAFT201909.optional_cases_of(name="cross-draft"), + DRAFT201909.optional_cases_of(name="float-overflow"), + DRAFT201909.optional_cases_of(name="id"), + DRAFT201909.optional_cases_of(name="no-schema"), + DRAFT201909.optional_cases_of(name="non-bmp-regex"), + DRAFT201909.optional_cases_of(name="refOfUnknownKeyword"), + DRAFT201909.optional_cases_of(name="unknownKeyword"), + Validator=jsonschema.Draft201909Validator, + skip=skip( + message="Vocabulary support is still in-progress.", + subject="vocabulary", + description=( + "no validation: invalid number, but it still validates" + ), + ), +) + + +TestDraft201909Format = DRAFT201909.to_unittest_testcase( + DRAFT201909.format_cases(), + name="TestDraft201909Format", + Validator=jsonschema.Draft201909Validator, + format_checker=jsonschema.Draft201909Validator.FORMAT_CHECKER, + skip=lambda test: ( + complex_email_validation(test) + or allowed_leading_zeros(test) + or leap_second(test) + or missing_format(jsonschema.Draft201909Validator)(test) + or complex_email_validation(test) + ), +) + + +TestDraft202012 = DRAFT202012.to_unittest_testcase( + DRAFT202012.cases(), + DRAFT201909.optional_cases_of(name="anchor"), + DRAFT202012.optional_cases_of(name="bignum"), + DRAFT202012.optional_cases_of(name="cross-draft"), + DRAFT202012.optional_cases_of(name="float-overflow"), + DRAFT202012.optional_cases_of(name="id"), + DRAFT202012.optional_cases_of(name="no-schema"), + DRAFT202012.optional_cases_of(name="non-bmp-regex"), + DRAFT202012.optional_cases_of(name="refOfUnknownKeyword"), + DRAFT202012.optional_cases_of(name="unknownKeyword"), + Validator=jsonschema.Draft202012Validator, + skip=skip( + message="Vocabulary support is still in-progress.", + subject="vocabulary", + description=( + "no validation: invalid number, but it still validates" + ), + ), +) + + +TestDraft202012Format = DRAFT202012.to_unittest_testcase( + DRAFT202012.format_cases(), + name="TestDraft202012Format", + Validator=jsonschema.Draft202012Validator, + format_checker=jsonschema.Draft202012Validator.FORMAT_CHECKER, + skip=lambda test: ( + complex_email_validation(test) + or allowed_leading_zeros(test) + or leap_second(test) + or missing_format(jsonschema.Draft202012Validator)(test) + or complex_email_validation(test) + ), +) diff --git a/.venv/lib/python3.12/site-packages/jsonschema/tests/test_types.py b/.venv/lib/python3.12/site-packages/jsonschema/tests/test_types.py new file mode 100644 index 00000000..bd97b180 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/jsonschema/tests/test_types.py @@ -0,0 +1,221 @@ +""" +Tests for the `TypeChecker`-based type interface. + +The actual correctness of the type checking is handled in +`test_jsonschema_test_suite`; these tests check that TypeChecker +functions correctly at a more granular level. +""" +from collections import namedtuple +from unittest import TestCase + +from jsonschema import ValidationError, _keywords +from jsonschema._types import TypeChecker +from jsonschema.exceptions import UndefinedTypeCheck, UnknownType +from jsonschema.validators import Draft202012Validator, extend + + +def equals_2(checker, instance): + return instance == 2 + + +def is_namedtuple(instance): + return isinstance(instance, tuple) and getattr(instance, "_fields", None) + + +def is_object_or_named_tuple(checker, instance): + if Draft202012Validator.TYPE_CHECKER.is_type(instance, "object"): + return True + return is_namedtuple(instance) + + +class TestTypeChecker(TestCase): + def test_is_type(self): + checker = TypeChecker({"two": equals_2}) + self.assertEqual( + ( + checker.is_type(instance=2, type="two"), + checker.is_type(instance="bar", type="two"), + ), + (True, False), + ) + + def test_is_unknown_type(self): + with self.assertRaises(UndefinedTypeCheck) as e: + TypeChecker().is_type(4, "foobar") + self.assertIn( + "'foobar' is unknown to this type checker", + str(e.exception), + ) + self.assertTrue( + e.exception.__suppress_context__, + msg="Expected the internal KeyError to be hidden.", + ) + + def test_checks_can_be_added_at_init(self): + checker = TypeChecker({"two": equals_2}) + self.assertEqual(checker, TypeChecker().redefine("two", equals_2)) + + def test_redefine_existing_type(self): + self.assertEqual( + TypeChecker().redefine("two", object()).redefine("two", equals_2), + TypeChecker().redefine("two", equals_2), + ) + + def test_remove(self): + self.assertEqual( + TypeChecker({"two": equals_2}).remove("two"), + TypeChecker(), + ) + + def test_remove_unknown_type(self): + with self.assertRaises(UndefinedTypeCheck) as context: + TypeChecker().remove("foobar") + self.assertIn("foobar", str(context.exception)) + + def test_redefine_many(self): + self.assertEqual( + TypeChecker().redefine_many({"foo": int, "bar": str}), + TypeChecker().redefine("foo", int).redefine("bar", str), + ) + + def test_remove_multiple(self): + self.assertEqual( + TypeChecker({"foo": int, "bar": str}).remove("foo", "bar"), + TypeChecker(), + ) + + def test_type_check_can_raise_key_error(self): + """ + Make sure no one writes: + + try: + self._type_checkers[type](...) + except KeyError: + + ignoring the fact that the function itself can raise that. + """ + + error = KeyError("Stuff") + + def raises_keyerror(checker, instance): + raise error + + with self.assertRaises(KeyError) as context: + TypeChecker({"foo": raises_keyerror}).is_type(4, "foo") + + self.assertIs(context.exception, error) + + def test_repr(self): + checker = TypeChecker({"foo": is_namedtuple, "bar": is_namedtuple}) + self.assertEqual(repr(checker), "<TypeChecker types={'bar', 'foo'}>") + + +class TestCustomTypes(TestCase): + def test_simple_type_can_be_extended(self): + def int_or_str_int(checker, instance): + if not isinstance(instance, (int, str)): + return False + try: + int(instance) + except ValueError: + return False + return True + + CustomValidator = extend( + Draft202012Validator, + type_checker=Draft202012Validator.TYPE_CHECKER.redefine( + "integer", int_or_str_int, + ), + ) + validator = CustomValidator({"type": "integer"}) + + validator.validate(4) + validator.validate("4") + + with self.assertRaises(ValidationError): + validator.validate(4.4) + + with self.assertRaises(ValidationError): + validator.validate("foo") + + def test_object_can_be_extended(self): + schema = {"type": "object"} + + Point = namedtuple("Point", ["x", "y"]) + + type_checker = Draft202012Validator.TYPE_CHECKER.redefine( + "object", is_object_or_named_tuple, + ) + + CustomValidator = extend( + Draft202012Validator, + type_checker=type_checker, + ) + validator = CustomValidator(schema) + + validator.validate(Point(x=4, y=5)) + + def test_object_extensions_require_custom_validators(self): + schema = {"type": "object", "required": ["x"]} + + type_checker = Draft202012Validator.TYPE_CHECKER.redefine( + "object", is_object_or_named_tuple, + ) + + CustomValidator = extend( + Draft202012Validator, + type_checker=type_checker, + ) + validator = CustomValidator(schema) + + Point = namedtuple("Point", ["x", "y"]) + # Cannot handle required + with self.assertRaises(ValidationError): + validator.validate(Point(x=4, y=5)) + + def test_object_extensions_can_handle_custom_validators(self): + schema = { + "type": "object", + "required": ["x"], + "properties": {"x": {"type": "integer"}}, + } + + type_checker = Draft202012Validator.TYPE_CHECKER.redefine( + "object", is_object_or_named_tuple, + ) + + def coerce_named_tuple(fn): + def coerced(validator, value, instance, schema): + if is_namedtuple(instance): + instance = instance._asdict() + return fn(validator, value, instance, schema) + return coerced + + required = coerce_named_tuple(_keywords.required) + properties = coerce_named_tuple(_keywords.properties) + + CustomValidator = extend( + Draft202012Validator, + type_checker=type_checker, + validators={"required": required, "properties": properties}, + ) + + validator = CustomValidator(schema) + + Point = namedtuple("Point", ["x", "y"]) + # Can now process required and properties + validator.validate(Point(x=4, y=5)) + + with self.assertRaises(ValidationError): + validator.validate(Point(x="not an integer", y=5)) + + # As well as still handle objects. + validator.validate({"x": 4, "y": 5}) + + with self.assertRaises(ValidationError): + validator.validate({"x": "not an integer", "y": 5}) + + def test_unknown_type(self): + with self.assertRaises(UnknownType) as e: + Draft202012Validator({}).is_type(12, "some unknown type") + self.assertIn("'some unknown type'", str(e.exception)) diff --git a/.venv/lib/python3.12/site-packages/jsonschema/tests/test_utils.py b/.venv/lib/python3.12/site-packages/jsonschema/tests/test_utils.py new file mode 100644 index 00000000..d9764b0f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/jsonschema/tests/test_utils.py @@ -0,0 +1,138 @@ +from math import nan +from unittest import TestCase + +from jsonschema._utils import equal + + +class TestEqual(TestCase): + def test_none(self): + self.assertTrue(equal(None, None)) + + def test_nan(self): + self.assertTrue(equal(nan, nan)) + + +class TestDictEqual(TestCase): + def test_equal_dictionaries(self): + dict_1 = {"a": "b", "c": "d"} + dict_2 = {"c": "d", "a": "b"} + self.assertTrue(equal(dict_1, dict_2)) + + def test_equal_dictionaries_with_nan(self): + dict_1 = {"a": nan, "c": "d"} + dict_2 = {"c": "d", "a": nan} + self.assertTrue(equal(dict_1, dict_2)) + + def test_missing_key(self): + dict_1 = {"a": "b", "c": "d"} + dict_2 = {"c": "d", "x": "b"} + self.assertFalse(equal(dict_1, dict_2)) + + def test_additional_key(self): + dict_1 = {"a": "b", "c": "d"} + dict_2 = {"c": "d", "a": "b", "x": "x"} + self.assertFalse(equal(dict_1, dict_2)) + + def test_missing_value(self): + dict_1 = {"a": "b", "c": "d"} + dict_2 = {"c": "d", "a": "x"} + self.assertFalse(equal(dict_1, dict_2)) + + def test_empty_dictionaries(self): + dict_1 = {} + dict_2 = {} + self.assertTrue(equal(dict_1, dict_2)) + + def test_one_none(self): + dict_1 = None + dict_2 = {"a": "b", "c": "d"} + self.assertFalse(equal(dict_1, dict_2)) + + def test_same_item(self): + dict_1 = {"a": "b", "c": "d"} + self.assertTrue(equal(dict_1, dict_1)) + + def test_nested_equal(self): + dict_1 = {"a": {"a": "b", "c": "d"}, "c": "d"} + dict_2 = {"c": "d", "a": {"a": "b", "c": "d"}} + self.assertTrue(equal(dict_1, dict_2)) + + def test_nested_dict_unequal(self): + dict_1 = {"a": {"a": "b", "c": "d"}, "c": "d"} + dict_2 = {"c": "d", "a": {"a": "b", "c": "x"}} + self.assertFalse(equal(dict_1, dict_2)) + + def test_mixed_nested_equal(self): + dict_1 = {"a": ["a", "b", "c", "d"], "c": "d"} + dict_2 = {"c": "d", "a": ["a", "b", "c", "d"]} + self.assertTrue(equal(dict_1, dict_2)) + + def test_nested_list_unequal(self): + dict_1 = {"a": ["a", "b", "c", "d"], "c": "d"} + dict_2 = {"c": "d", "a": ["b", "c", "d", "a"]} + self.assertFalse(equal(dict_1, dict_2)) + + +class TestListEqual(TestCase): + def test_equal_lists(self): + list_1 = ["a", "b", "c"] + list_2 = ["a", "b", "c"] + self.assertTrue(equal(list_1, list_2)) + + def test_equal_lists_with_nan(self): + list_1 = ["a", nan, "c"] + list_2 = ["a", nan, "c"] + self.assertTrue(equal(list_1, list_2)) + + def test_unsorted_lists(self): + list_1 = ["a", "b", "c"] + list_2 = ["b", "b", "a"] + self.assertFalse(equal(list_1, list_2)) + + def test_first_list_larger(self): + list_1 = ["a", "b", "c"] + list_2 = ["a", "b"] + self.assertFalse(equal(list_1, list_2)) + + def test_second_list_larger(self): + list_1 = ["a", "b"] + list_2 = ["a", "b", "c"] + self.assertFalse(equal(list_1, list_2)) + + def test_list_with_none_unequal(self): + list_1 = ["a", "b", None] + list_2 = ["a", "b", "c"] + self.assertFalse(equal(list_1, list_2)) + + list_1 = ["a", "b", None] + list_2 = [None, "b", "c"] + self.assertFalse(equal(list_1, list_2)) + + def test_list_with_none_equal(self): + list_1 = ["a", None, "c"] + list_2 = ["a", None, "c"] + self.assertTrue(equal(list_1, list_2)) + + def test_empty_list(self): + list_1 = [] + list_2 = [] + self.assertTrue(equal(list_1, list_2)) + + def test_one_none(self): + list_1 = None + list_2 = [] + self.assertFalse(equal(list_1, list_2)) + + def test_same_list(self): + list_1 = ["a", "b", "c"] + self.assertTrue(equal(list_1, list_1)) + + def test_equal_nested_lists(self): + list_1 = ["a", ["b", "c"], "d"] + list_2 = ["a", ["b", "c"], "d"] + self.assertTrue(equal(list_1, list_2)) + + def test_unequal_nested_lists(self): + list_1 = ["a", ["b", "c"], "d"] + list_2 = ["a", [], "c"] + self.assertFalse(equal(list_1, list_2)) diff --git a/.venv/lib/python3.12/site-packages/jsonschema/tests/test_validators.py b/.venv/lib/python3.12/site-packages/jsonschema/tests/test_validators.py new file mode 100644 index 00000000..28cc4027 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/jsonschema/tests/test_validators.py @@ -0,0 +1,2575 @@ +from __future__ import annotations + +from collections import deque, namedtuple +from contextlib import contextmanager +from decimal import Decimal +from io import BytesIO +from typing import Any +from unittest import TestCase, mock +from urllib.request import pathname2url +import json +import os +import sys +import tempfile +import warnings + +from attrs import define, field +from referencing.jsonschema import DRAFT202012 +import referencing.exceptions + +from jsonschema import ( + FormatChecker, + TypeChecker, + exceptions, + protocols, + validators, +) + + +def fail(validator, errors, instance, schema): + for each in errors: + each.setdefault("message", "You told me to fail!") + yield exceptions.ValidationError(**each) + + +class TestCreateAndExtend(TestCase): + def setUp(self): + self.addCleanup( + self.assertEqual, + validators._META_SCHEMAS, + dict(validators._META_SCHEMAS), + ) + self.addCleanup( + self.assertEqual, + validators._VALIDATORS, + dict(validators._VALIDATORS), + ) + + self.meta_schema = {"$id": "some://meta/schema"} + self.validators = {"fail": fail} + self.type_checker = TypeChecker() + self.Validator = validators.create( + meta_schema=self.meta_schema, + validators=self.validators, + type_checker=self.type_checker, + ) + + def test_attrs(self): + self.assertEqual( + ( + self.Validator.VALIDATORS, + self.Validator.META_SCHEMA, + self.Validator.TYPE_CHECKER, + ), ( + self.validators, + self.meta_schema, + self.type_checker, + ), + ) + + def test_init(self): + schema = {"fail": []} + self.assertEqual(self.Validator(schema).schema, schema) + + def test_iter_errors_successful(self): + schema = {"fail": []} + validator = self.Validator(schema) + + errors = list(validator.iter_errors("hello")) + self.assertEqual(errors, []) + + def test_iter_errors_one_error(self): + schema = {"fail": [{"message": "Whoops!"}]} + validator = self.Validator(schema) + + expected_error = exceptions.ValidationError( + "Whoops!", + instance="goodbye", + schema=schema, + validator="fail", + validator_value=[{"message": "Whoops!"}], + schema_path=deque(["fail"]), + ) + + errors = list(validator.iter_errors("goodbye")) + self.assertEqual(len(errors), 1) + self.assertEqual(errors[0]._contents(), expected_error._contents()) + + def test_iter_errors_multiple_errors(self): + schema = { + "fail": [ + {"message": "First"}, + {"message": "Second!", "validator": "asdf"}, + {"message": "Third"}, + ], + } + validator = self.Validator(schema) + + errors = list(validator.iter_errors("goodbye")) + self.assertEqual(len(errors), 3) + + def test_if_a_version_is_provided_it_is_registered(self): + Validator = validators.create( + meta_schema={"$id": "something"}, + version="my version", + ) + self.addCleanup(validators._META_SCHEMAS.pop, "something") + self.addCleanup(validators._VALIDATORS.pop, "my version") + self.assertEqual(Validator.__name__, "MyVersionValidator") + self.assertEqual(Validator.__qualname__, "MyVersionValidator") + + def test_repr(self): + Validator = validators.create( + meta_schema={"$id": "something"}, + version="my version", + ) + self.addCleanup(validators._META_SCHEMAS.pop, "something") + self.addCleanup(validators._VALIDATORS.pop, "my version") + self.assertEqual( + repr(Validator({})), + "MyVersionValidator(schema={}, format_checker=None)", + ) + + def test_long_repr(self): + Validator = validators.create( + meta_schema={"$id": "something"}, + version="my version", + ) + self.addCleanup(validators._META_SCHEMAS.pop, "something") + self.addCleanup(validators._VALIDATORS.pop, "my version") + self.assertEqual( + repr(Validator({"a": list(range(1000))})), ( + "MyVersionValidator(schema={'a': [0, 1, 2, 3, 4, 5, ...]}, " + "format_checker=None)" + ), + ) + + def test_repr_no_version(self): + Validator = validators.create(meta_schema={}) + self.assertEqual( + repr(Validator({})), + "Validator(schema={}, format_checker=None)", + ) + + def test_dashes_are_stripped_from_validator_names(self): + Validator = validators.create( + meta_schema={"$id": "something"}, + version="foo-bar", + ) + self.addCleanup(validators._META_SCHEMAS.pop, "something") + self.addCleanup(validators._VALIDATORS.pop, "foo-bar") + self.assertEqual(Validator.__qualname__, "FooBarValidator") + + def test_if_a_version_is_not_provided_it_is_not_registered(self): + original = dict(validators._META_SCHEMAS) + validators.create(meta_schema={"id": "id"}) + self.assertEqual(validators._META_SCHEMAS, original) + + def test_validates_registers_meta_schema_id(self): + meta_schema_key = "meta schema id" + my_meta_schema = {"id": meta_schema_key} + + validators.create( + meta_schema=my_meta_schema, + version="my version", + id_of=lambda s: s.get("id", ""), + ) + self.addCleanup(validators._META_SCHEMAS.pop, meta_schema_key) + self.addCleanup(validators._VALIDATORS.pop, "my version") + + self.assertIn(meta_schema_key, validators._META_SCHEMAS) + + def test_validates_registers_meta_schema_draft6_id(self): + meta_schema_key = "meta schema $id" + my_meta_schema = {"$id": meta_schema_key} + + validators.create( + meta_schema=my_meta_schema, + version="my version", + ) + self.addCleanup(validators._META_SCHEMAS.pop, meta_schema_key) + self.addCleanup(validators._VALIDATORS.pop, "my version") + + self.assertIn(meta_schema_key, validators._META_SCHEMAS) + + def test_create_default_types(self): + Validator = validators.create(meta_schema={}, validators=()) + self.assertTrue( + all( + Validator({}).is_type(instance=instance, type=type) + for type, instance in [ + ("array", []), + ("boolean", True), + ("integer", 12), + ("null", None), + ("number", 12.0), + ("object", {}), + ("string", "foo"), + ] + ), + ) + + def test_check_schema_with_different_metaschema(self): + """ + One can create a validator class whose metaschema uses a different + dialect than itself. + """ + + NoEmptySchemasValidator = validators.create( + meta_schema={ + "$schema": validators.Draft202012Validator.META_SCHEMA["$id"], + "not": {"const": {}}, + }, + ) + NoEmptySchemasValidator.check_schema({"foo": "bar"}) + + with self.assertRaises(exceptions.SchemaError): + NoEmptySchemasValidator.check_schema({}) + + NoEmptySchemasValidator({"foo": "bar"}).validate("foo") + + def test_check_schema_with_different_metaschema_defaults_to_self(self): + """ + A validator whose metaschema doesn't declare $schema defaults to its + own validation behavior, not the latest "normal" specification. + """ + + NoEmptySchemasValidator = validators.create( + meta_schema={"fail": [{"message": "Meta schema whoops!"}]}, + validators={"fail": fail}, + ) + with self.assertRaises(exceptions.SchemaError): + NoEmptySchemasValidator.check_schema({}) + + def test_extend(self): + original = dict(self.Validator.VALIDATORS) + new = object() + + Extended = validators.extend( + self.Validator, + validators={"new": new}, + ) + self.assertEqual( + ( + Extended.VALIDATORS, + Extended.META_SCHEMA, + Extended.TYPE_CHECKER, + self.Validator.VALIDATORS, + ), ( + dict(original, new=new), + self.Validator.META_SCHEMA, + self.Validator.TYPE_CHECKER, + original, + ), + ) + + def test_extend_idof(self): + """ + Extending a validator preserves its notion of schema IDs. + """ + def id_of(schema): + return schema.get("__test__", self.Validator.ID_OF(schema)) + correct_id = "the://correct/id/" + meta_schema = { + "$id": "the://wrong/id/", + "__test__": correct_id, + } + Original = validators.create( + meta_schema=meta_schema, + validators=self.validators, + type_checker=self.type_checker, + id_of=id_of, + ) + self.assertEqual(Original.ID_OF(Original.META_SCHEMA), correct_id) + + Derived = validators.extend(Original) + self.assertEqual(Derived.ID_OF(Derived.META_SCHEMA), correct_id) + + def test_extend_applicable_validators(self): + """ + Extending a validator preserves its notion of applicable validators. + """ + + schema = { + "$defs": {"test": {"type": "number"}}, + "$ref": "#/$defs/test", + "maximum": 1, + } + + draft4 = validators.Draft4Validator(schema) + self.assertTrue(draft4.is_valid(37)) # as $ref ignores siblings + + Derived = validators.extend(validators.Draft4Validator) + self.assertTrue(Derived(schema).is_valid(37)) + + +class TestValidationErrorMessages(TestCase): + def message_for(self, instance, schema, *args, **kwargs): + cls = kwargs.pop("cls", validators._LATEST_VERSION) + cls.check_schema(schema) + validator = cls(schema, *args, **kwargs) + errors = list(validator.iter_errors(instance)) + self.assertTrue(errors, msg=f"No errors were raised for {instance!r}") + self.assertEqual( + len(errors), + 1, + msg=f"Expected exactly one error, found {errors!r}", + ) + return errors[0].message + + def test_single_type_failure(self): + message = self.message_for(instance=1, schema={"type": "string"}) + self.assertEqual(message, "1 is not of type 'string'") + + def test_single_type_list_failure(self): + message = self.message_for(instance=1, schema={"type": ["string"]}) + self.assertEqual(message, "1 is not of type 'string'") + + def test_multiple_type_failure(self): + types = "string", "object" + message = self.message_for(instance=1, schema={"type": list(types)}) + self.assertEqual(message, "1 is not of type 'string', 'object'") + + def test_object_with_named_type_failure(self): + schema = {"type": [{"name": "Foo", "minimum": 3}]} + message = self.message_for( + instance=1, + schema=schema, + cls=validators.Draft3Validator, + ) + self.assertEqual(message, "1 is not of type 'Foo'") + + def test_minimum(self): + message = self.message_for(instance=1, schema={"minimum": 2}) + self.assertEqual(message, "1 is less than the minimum of 2") + + def test_maximum(self): + message = self.message_for(instance=1, schema={"maximum": 0}) + self.assertEqual(message, "1 is greater than the maximum of 0") + + def test_dependencies_single_element(self): + depend, on = "bar", "foo" + schema = {"dependencies": {depend: on}} + message = self.message_for( + instance={"bar": 2}, + schema=schema, + cls=validators.Draft3Validator, + ) + self.assertEqual(message, "'foo' is a dependency of 'bar'") + + def test_object_without_title_type_failure_draft3(self): + type = {"type": [{"minimum": 3}]} + message = self.message_for( + instance=1, + schema={"type": [type]}, + cls=validators.Draft3Validator, + ) + self.assertEqual( + message, + "1 is not of type {'type': [{'minimum': 3}]}", + ) + + def test_dependencies_list_draft3(self): + depend, on = "bar", "foo" + schema = {"dependencies": {depend: [on]}} + message = self.message_for( + instance={"bar": 2}, + schema=schema, + cls=validators.Draft3Validator, + ) + self.assertEqual(message, "'foo' is a dependency of 'bar'") + + def test_dependencies_list_draft7(self): + depend, on = "bar", "foo" + schema = {"dependencies": {depend: [on]}} + message = self.message_for( + instance={"bar": 2}, + schema=schema, + cls=validators.Draft7Validator, + ) + self.assertEqual(message, "'foo' is a dependency of 'bar'") + + def test_additionalItems_single_failure(self): + message = self.message_for( + instance=[2], + schema={"items": [], "additionalItems": False}, + cls=validators.Draft3Validator, + ) + self.assertIn("(2 was unexpected)", message) + + def test_additionalItems_multiple_failures(self): + message = self.message_for( + instance=[1, 2, 3], + schema={"items": [], "additionalItems": False}, + cls=validators.Draft3Validator, + ) + self.assertIn("(1, 2, 3 were unexpected)", message) + + def test_additionalProperties_single_failure(self): + additional = "foo" + schema = {"additionalProperties": False} + message = self.message_for(instance={additional: 2}, schema=schema) + self.assertIn("('foo' was unexpected)", message) + + def test_additionalProperties_multiple_failures(self): + schema = {"additionalProperties": False} + message = self.message_for( + instance=dict.fromkeys(["foo", "bar"]), + schema=schema, + ) + + self.assertIn(repr("foo"), message) + self.assertIn(repr("bar"), message) + self.assertIn("were unexpected)", message) + + def test_const(self): + schema = {"const": 12} + message = self.message_for( + instance={"foo": "bar"}, + schema=schema, + ) + self.assertIn("12 was expected", message) + + def test_contains_draft_6(self): + schema = {"contains": {"const": 12}} + message = self.message_for( + instance=[2, {}, []], + schema=schema, + cls=validators.Draft6Validator, + ) + self.assertEqual( + message, + "None of [2, {}, []] are valid under the given schema", + ) + + def test_invalid_format_default_message(self): + checker = FormatChecker(formats=()) + checker.checks("thing")(lambda value: False) + + schema = {"format": "thing"} + message = self.message_for( + instance="bla", + schema=schema, + format_checker=checker, + ) + + self.assertIn(repr("bla"), message) + self.assertIn(repr("thing"), message) + self.assertIn("is not a", message) + + def test_additionalProperties_false_patternProperties(self): + schema = {"type": "object", + "additionalProperties": False, + "patternProperties": { + "^abc$": {"type": "string"}, + "^def$": {"type": "string"}, + }} + message = self.message_for( + instance={"zebra": 123}, + schema=schema, + cls=validators.Draft4Validator, + ) + self.assertEqual( + message, + "{} does not match any of the regexes: {}, {}".format( + repr("zebra"), repr("^abc$"), repr("^def$"), + ), + ) + message = self.message_for( + instance={"zebra": 123, "fish": 456}, + schema=schema, + cls=validators.Draft4Validator, + ) + self.assertEqual( + message, + "{}, {} do not match any of the regexes: {}, {}".format( + repr("fish"), repr("zebra"), repr("^abc$"), repr("^def$"), + ), + ) + + def test_False_schema(self): + message = self.message_for( + instance="something", + schema=False, + ) + self.assertEqual(message, "False schema does not allow 'something'") + + def test_multipleOf(self): + message = self.message_for( + instance=3, + schema={"multipleOf": 2}, + ) + self.assertEqual(message, "3 is not a multiple of 2") + + def test_minItems(self): + message = self.message_for(instance=[], schema={"minItems": 2}) + self.assertEqual(message, "[] is too short") + + def test_maxItems(self): + message = self.message_for(instance=[1, 2, 3], schema={"maxItems": 2}) + self.assertEqual(message, "[1, 2, 3] is too long") + + def test_minItems_1(self): + message = self.message_for(instance=[], schema={"minItems": 1}) + self.assertEqual(message, "[] should be non-empty") + + def test_maxItems_0(self): + message = self.message_for(instance=[1, 2, 3], schema={"maxItems": 0}) + self.assertEqual(message, "[1, 2, 3] is expected to be empty") + + def test_minLength(self): + message = self.message_for( + instance="", + schema={"minLength": 2}, + ) + self.assertEqual(message, "'' is too short") + + def test_maxLength(self): + message = self.message_for( + instance="abc", + schema={"maxLength": 2}, + ) + self.assertEqual(message, "'abc' is too long") + + def test_minLength_1(self): + message = self.message_for(instance="", schema={"minLength": 1}) + self.assertEqual(message, "'' should be non-empty") + + def test_maxLength_0(self): + message = self.message_for(instance="abc", schema={"maxLength": 0}) + self.assertEqual(message, "'abc' is expected to be empty") + + def test_minProperties(self): + message = self.message_for(instance={}, schema={"minProperties": 2}) + self.assertEqual(message, "{} does not have enough properties") + + def test_maxProperties(self): + message = self.message_for( + instance={"a": {}, "b": {}, "c": {}}, + schema={"maxProperties": 2}, + ) + self.assertEqual( + message, + "{'a': {}, 'b': {}, 'c': {}} has too many properties", + ) + + def test_minProperties_1(self): + message = self.message_for(instance={}, schema={"minProperties": 1}) + self.assertEqual(message, "{} should be non-empty") + + def test_maxProperties_0(self): + message = self.message_for( + instance={1: 2}, + schema={"maxProperties": 0}, + ) + self.assertEqual(message, "{1: 2} is expected to be empty") + + def test_prefixItems_with_items(self): + message = self.message_for( + instance=[1, 2, "foo"], + schema={"items": False, "prefixItems": [{}, {}]}, + ) + self.assertEqual( + message, + "Expected at most 2 items but found 1 extra: 'foo'", + ) + + def test_prefixItems_with_multiple_extra_items(self): + message = self.message_for( + instance=[1, 2, "foo", 5], + schema={"items": False, "prefixItems": [{}, {}]}, + ) + self.assertEqual( + message, + "Expected at most 2 items but found 2 extra: ['foo', 5]", + ) + + def test_pattern(self): + message = self.message_for( + instance="bbb", + schema={"pattern": "^a*$"}, + ) + self.assertEqual(message, "'bbb' does not match '^a*$'") + + def test_does_not_contain(self): + message = self.message_for( + instance=[], + schema={"contains": {"type": "string"}}, + ) + self.assertEqual( + message, + "[] does not contain items matching the given schema", + ) + + def test_contains_too_few(self): + message = self.message_for( + instance=["foo", 1], + schema={"contains": {"type": "string"}, "minContains": 2}, + ) + self.assertEqual( + message, + "Too few items match the given schema " + "(expected at least 2 but only 1 matched)", + ) + + def test_contains_too_few_both_constrained(self): + message = self.message_for( + instance=["foo", 1], + schema={ + "contains": {"type": "string"}, + "minContains": 2, + "maxContains": 4, + }, + ) + self.assertEqual( + message, + "Too few items match the given schema (expected at least 2 but " + "only 1 matched)", + ) + + def test_contains_too_many(self): + message = self.message_for( + instance=["foo", "bar", "baz"], + schema={"contains": {"type": "string"}, "maxContains": 2}, + ) + self.assertEqual( + message, + "Too many items match the given schema (expected at most 2)", + ) + + def test_contains_too_many_both_constrained(self): + message = self.message_for( + instance=["foo"] * 5, + schema={ + "contains": {"type": "string"}, + "minContains": 2, + "maxContains": 4, + }, + ) + self.assertEqual( + message, + "Too many items match the given schema (expected at most 4)", + ) + + def test_exclusiveMinimum(self): + message = self.message_for( + instance=3, + schema={"exclusiveMinimum": 5}, + ) + self.assertEqual( + message, + "3 is less than or equal to the minimum of 5", + ) + + def test_exclusiveMaximum(self): + message = self.message_for(instance=3, schema={"exclusiveMaximum": 2}) + self.assertEqual( + message, + "3 is greater than or equal to the maximum of 2", + ) + + def test_required(self): + message = self.message_for(instance={}, schema={"required": ["foo"]}) + self.assertEqual(message, "'foo' is a required property") + + def test_dependentRequired(self): + message = self.message_for( + instance={"foo": {}}, + schema={"dependentRequired": {"foo": ["bar"]}}, + ) + self.assertEqual(message, "'bar' is a dependency of 'foo'") + + def test_oneOf_matches_none(self): + message = self.message_for(instance={}, schema={"oneOf": [False]}) + self.assertEqual( + message, + "{} is not valid under any of the given schemas", + ) + + def test_oneOf_matches_too_many(self): + message = self.message_for(instance={}, schema={"oneOf": [True, True]}) + self.assertEqual(message, "{} is valid under each of True, True") + + def test_unevaluated_items(self): + schema = {"type": "array", "unevaluatedItems": False} + message = self.message_for(instance=["foo", "bar"], schema=schema) + self.assertIn( + message, + "Unevaluated items are not allowed ('foo', 'bar' were unexpected)", + ) + + def test_unevaluated_items_on_invalid_type(self): + schema = {"type": "array", "unevaluatedItems": False} + message = self.message_for(instance="foo", schema=schema) + self.assertEqual(message, "'foo' is not of type 'array'") + + def test_unevaluated_properties_invalid_against_subschema(self): + schema = { + "properties": {"foo": {"type": "string"}}, + "unevaluatedProperties": {"const": 12}, + } + message = self.message_for( + instance={ + "foo": "foo", + "bar": "bar", + "baz": 12, + }, + schema=schema, + ) + self.assertEqual( + message, + "Unevaluated properties are not valid under the given schema " + "('bar' was unevaluated and invalid)", + ) + + def test_unevaluated_properties_disallowed(self): + schema = {"type": "object", "unevaluatedProperties": False} + message = self.message_for( + instance={ + "foo": "foo", + "bar": "bar", + }, + schema=schema, + ) + self.assertEqual( + message, + "Unevaluated properties are not allowed " + "('bar', 'foo' were unexpected)", + ) + + def test_unevaluated_properties_on_invalid_type(self): + schema = {"type": "object", "unevaluatedProperties": False} + message = self.message_for(instance="foo", schema=schema) + self.assertEqual(message, "'foo' is not of type 'object'") + + def test_single_item(self): + schema = {"prefixItems": [{}], "items": False} + message = self.message_for( + instance=["foo", "bar", "baz"], + schema=schema, + ) + self.assertEqual( + message, + "Expected at most 1 item but found 2 extra: ['bar', 'baz']", + ) + + def test_heterogeneous_additionalItems_with_Items(self): + schema = {"items": [{}], "additionalItems": False} + message = self.message_for( + instance=["foo", "bar", 37], + schema=schema, + cls=validators.Draft7Validator, + ) + self.assertEqual( + message, + "Additional items are not allowed ('bar', 37 were unexpected)", + ) + + def test_heterogeneous_items_prefixItems(self): + schema = {"prefixItems": [{}], "items": False} + message = self.message_for( + instance=["foo", "bar", 37], + schema=schema, + ) + self.assertEqual( + message, + "Expected at most 1 item but found 2 extra: ['bar', 37]", + ) + + def test_heterogeneous_unevaluatedItems_prefixItems(self): + schema = {"prefixItems": [{}], "unevaluatedItems": False} + message = self.message_for( + instance=["foo", "bar", 37], + schema=schema, + ) + self.assertEqual( + message, + "Unevaluated items are not allowed ('bar', 37 were unexpected)", + ) + + def test_heterogeneous_properties_additionalProperties(self): + """ + Not valid deserialized JSON, but this should not blow up. + """ + schema = {"properties": {"foo": {}}, "additionalProperties": False} + message = self.message_for( + instance={"foo": {}, "a": "baz", 37: 12}, + schema=schema, + ) + self.assertEqual( + message, + "Additional properties are not allowed (37, 'a' were unexpected)", + ) + + def test_heterogeneous_properties_unevaluatedProperties(self): + """ + Not valid deserialized JSON, but this should not blow up. + """ + schema = {"properties": {"foo": {}}, "unevaluatedProperties": False} + message = self.message_for( + instance={"foo": {}, "a": "baz", 37: 12}, + schema=schema, + ) + self.assertEqual( + message, + "Unevaluated properties are not allowed (37, 'a' were unexpected)", + ) + + +class TestValidationErrorDetails(TestCase): + # TODO: These really need unit tests for each individual keyword, rather + # than just these higher level tests. + def test_anyOf(self): + instance = 5 + schema = { + "anyOf": [ + {"minimum": 20}, + {"type": "string"}, + ], + } + + validator = validators.Draft4Validator(schema) + errors = list(validator.iter_errors(instance)) + self.assertEqual(len(errors), 1) + e = errors[0] + + self.assertEqual(e.validator, "anyOf") + self.assertEqual(e.validator_value, schema["anyOf"]) + self.assertEqual(e.instance, instance) + self.assertEqual(e.schema, schema) + self.assertIsNone(e.parent) + + self.assertEqual(e.path, deque([])) + self.assertEqual(e.relative_path, deque([])) + self.assertEqual(e.absolute_path, deque([])) + self.assertEqual(e.json_path, "$") + + self.assertEqual(e.schema_path, deque(["anyOf"])) + self.assertEqual(e.relative_schema_path, deque(["anyOf"])) + self.assertEqual(e.absolute_schema_path, deque(["anyOf"])) + + self.assertEqual(len(e.context), 2) + + e1, e2 = sorted_errors(e.context) + + self.assertEqual(e1.validator, "minimum") + self.assertEqual(e1.validator_value, schema["anyOf"][0]["minimum"]) + self.assertEqual(e1.instance, instance) + self.assertEqual(e1.schema, schema["anyOf"][0]) + self.assertIs(e1.parent, e) + + self.assertEqual(e1.path, deque([])) + self.assertEqual(e1.absolute_path, deque([])) + self.assertEqual(e1.relative_path, deque([])) + self.assertEqual(e1.json_path, "$") + + self.assertEqual(e1.schema_path, deque([0, "minimum"])) + self.assertEqual(e1.relative_schema_path, deque([0, "minimum"])) + self.assertEqual( + e1.absolute_schema_path, deque(["anyOf", 0, "minimum"]), + ) + + self.assertFalse(e1.context) + + self.assertEqual(e2.validator, "type") + self.assertEqual(e2.validator_value, schema["anyOf"][1]["type"]) + self.assertEqual(e2.instance, instance) + self.assertEqual(e2.schema, schema["anyOf"][1]) + self.assertIs(e2.parent, e) + + self.assertEqual(e2.path, deque([])) + self.assertEqual(e2.relative_path, deque([])) + self.assertEqual(e2.absolute_path, deque([])) + self.assertEqual(e2.json_path, "$") + + self.assertEqual(e2.schema_path, deque([1, "type"])) + self.assertEqual(e2.relative_schema_path, deque([1, "type"])) + self.assertEqual(e2.absolute_schema_path, deque(["anyOf", 1, "type"])) + + self.assertEqual(len(e2.context), 0) + + def test_type(self): + instance = {"foo": 1} + schema = { + "type": [ + {"type": "integer"}, + { + "type": "object", + "properties": {"foo": {"enum": [2]}}, + }, + ], + } + + validator = validators.Draft3Validator(schema) + errors = list(validator.iter_errors(instance)) + self.assertEqual(len(errors), 1) + e = errors[0] + + self.assertEqual(e.validator, "type") + self.assertEqual(e.validator_value, schema["type"]) + self.assertEqual(e.instance, instance) + self.assertEqual(e.schema, schema) + self.assertIsNone(e.parent) + + self.assertEqual(e.path, deque([])) + self.assertEqual(e.relative_path, deque([])) + self.assertEqual(e.absolute_path, deque([])) + self.assertEqual(e.json_path, "$") + + self.assertEqual(e.schema_path, deque(["type"])) + self.assertEqual(e.relative_schema_path, deque(["type"])) + self.assertEqual(e.absolute_schema_path, deque(["type"])) + + self.assertEqual(len(e.context), 2) + + e1, e2 = sorted_errors(e.context) + + self.assertEqual(e1.validator, "type") + self.assertEqual(e1.validator_value, schema["type"][0]["type"]) + self.assertEqual(e1.instance, instance) + self.assertEqual(e1.schema, schema["type"][0]) + self.assertIs(e1.parent, e) + + self.assertEqual(e1.path, deque([])) + self.assertEqual(e1.relative_path, deque([])) + self.assertEqual(e1.absolute_path, deque([])) + self.assertEqual(e1.json_path, "$") + + self.assertEqual(e1.schema_path, deque([0, "type"])) + self.assertEqual(e1.relative_schema_path, deque([0, "type"])) + self.assertEqual(e1.absolute_schema_path, deque(["type", 0, "type"])) + + self.assertFalse(e1.context) + + self.assertEqual(e2.validator, "enum") + self.assertEqual(e2.validator_value, [2]) + self.assertEqual(e2.instance, 1) + self.assertEqual(e2.schema, {"enum": [2]}) + self.assertIs(e2.parent, e) + + self.assertEqual(e2.path, deque(["foo"])) + self.assertEqual(e2.relative_path, deque(["foo"])) + self.assertEqual(e2.absolute_path, deque(["foo"])) + self.assertEqual(e2.json_path, "$.foo") + + self.assertEqual( + e2.schema_path, deque([1, "properties", "foo", "enum"]), + ) + self.assertEqual( + e2.relative_schema_path, deque([1, "properties", "foo", "enum"]), + ) + self.assertEqual( + e2.absolute_schema_path, + deque(["type", 1, "properties", "foo", "enum"]), + ) + + self.assertFalse(e2.context) + + def test_single_nesting(self): + instance = {"foo": 2, "bar": [1], "baz": 15, "quux": "spam"} + schema = { + "properties": { + "foo": {"type": "string"}, + "bar": {"minItems": 2}, + "baz": {"maximum": 10, "enum": [2, 4, 6, 8]}, + }, + } + + validator = validators.Draft3Validator(schema) + errors = validator.iter_errors(instance) + e1, e2, e3, e4 = sorted_errors(errors) + + self.assertEqual(e1.path, deque(["bar"])) + self.assertEqual(e2.path, deque(["baz"])) + self.assertEqual(e3.path, deque(["baz"])) + self.assertEqual(e4.path, deque(["foo"])) + + self.assertEqual(e1.relative_path, deque(["bar"])) + self.assertEqual(e2.relative_path, deque(["baz"])) + self.assertEqual(e3.relative_path, deque(["baz"])) + self.assertEqual(e4.relative_path, deque(["foo"])) + + self.assertEqual(e1.absolute_path, deque(["bar"])) + self.assertEqual(e2.absolute_path, deque(["baz"])) + self.assertEqual(e3.absolute_path, deque(["baz"])) + self.assertEqual(e4.absolute_path, deque(["foo"])) + + self.assertEqual(e1.json_path, "$.bar") + self.assertEqual(e2.json_path, "$.baz") + self.assertEqual(e3.json_path, "$.baz") + self.assertEqual(e4.json_path, "$.foo") + + self.assertEqual(e1.validator, "minItems") + self.assertEqual(e2.validator, "enum") + self.assertEqual(e3.validator, "maximum") + self.assertEqual(e4.validator, "type") + + def test_multiple_nesting(self): + instance = [1, {"foo": 2, "bar": {"baz": [1]}}, "quux"] + schema = { + "type": "string", + "items": { + "type": ["string", "object"], + "properties": { + "foo": {"enum": [1, 3]}, + "bar": { + "type": "array", + "properties": { + "bar": {"required": True}, + "baz": {"minItems": 2}, + }, + }, + }, + }, + } + + validator = validators.Draft3Validator(schema) + errors = validator.iter_errors(instance) + e1, e2, e3, e4, e5, e6 = sorted_errors(errors) + + self.assertEqual(e1.path, deque([])) + self.assertEqual(e2.path, deque([0])) + self.assertEqual(e3.path, deque([1, "bar"])) + self.assertEqual(e4.path, deque([1, "bar", "bar"])) + self.assertEqual(e5.path, deque([1, "bar", "baz"])) + self.assertEqual(e6.path, deque([1, "foo"])) + + self.assertEqual(e1.json_path, "$") + self.assertEqual(e2.json_path, "$[0]") + self.assertEqual(e3.json_path, "$[1].bar") + self.assertEqual(e4.json_path, "$[1].bar.bar") + self.assertEqual(e5.json_path, "$[1].bar.baz") + self.assertEqual(e6.json_path, "$[1].foo") + + self.assertEqual(e1.schema_path, deque(["type"])) + self.assertEqual(e2.schema_path, deque(["items", "type"])) + self.assertEqual( + list(e3.schema_path), ["items", "properties", "bar", "type"], + ) + self.assertEqual( + list(e4.schema_path), + ["items", "properties", "bar", "properties", "bar", "required"], + ) + self.assertEqual( + list(e5.schema_path), + ["items", "properties", "bar", "properties", "baz", "minItems"], + ) + self.assertEqual( + list(e6.schema_path), ["items", "properties", "foo", "enum"], + ) + + self.assertEqual(e1.validator, "type") + self.assertEqual(e2.validator, "type") + self.assertEqual(e3.validator, "type") + self.assertEqual(e4.validator, "required") + self.assertEqual(e5.validator, "minItems") + self.assertEqual(e6.validator, "enum") + + def test_recursive(self): + schema = { + "definitions": { + "node": { + "anyOf": [{ + "type": "object", + "required": ["name", "children"], + "properties": { + "name": { + "type": "string", + }, + "children": { + "type": "object", + "patternProperties": { + "^.*$": { + "$ref": "#/definitions/node", + }, + }, + }, + }, + }], + }, + }, + "type": "object", + "required": ["root"], + "properties": {"root": {"$ref": "#/definitions/node"}}, + } + + instance = { + "root": { + "name": "root", + "children": { + "a": { + "name": "a", + "children": { + "ab": { + "name": "ab", + # missing "children" + }, + }, + }, + }, + }, + } + validator = validators.Draft4Validator(schema) + + e, = validator.iter_errors(instance) + self.assertEqual(e.absolute_path, deque(["root"])) + self.assertEqual( + e.absolute_schema_path, deque(["properties", "root", "anyOf"]), + ) + self.assertEqual(e.json_path, "$.root") + + e1, = e.context + self.assertEqual(e1.absolute_path, deque(["root", "children", "a"])) + self.assertEqual( + e1.absolute_schema_path, deque( + [ + "properties", + "root", + "anyOf", + 0, + "properties", + "children", + "patternProperties", + "^.*$", + "anyOf", + ], + ), + ) + self.assertEqual(e1.json_path, "$.root.children.a") + + e2, = e1.context + self.assertEqual( + e2.absolute_path, deque( + ["root", "children", "a", "children", "ab"], + ), + ) + self.assertEqual( + e2.absolute_schema_path, deque( + [ + "properties", + "root", + "anyOf", + 0, + "properties", + "children", + "patternProperties", + "^.*$", + "anyOf", + 0, + "properties", + "children", + "patternProperties", + "^.*$", + "anyOf", + ], + ), + ) + self.assertEqual(e2.json_path, "$.root.children.a.children.ab") + + def test_additionalProperties(self): + instance = {"bar": "bar", "foo": 2} + schema = {"additionalProperties": {"type": "integer", "minimum": 5}} + + validator = validators.Draft3Validator(schema) + errors = validator.iter_errors(instance) + e1, e2 = sorted_errors(errors) + + self.assertEqual(e1.path, deque(["bar"])) + self.assertEqual(e2.path, deque(["foo"])) + + self.assertEqual(e1.json_path, "$.bar") + self.assertEqual(e2.json_path, "$.foo") + + self.assertEqual(e1.validator, "type") + self.assertEqual(e2.validator, "minimum") + + def test_patternProperties(self): + instance = {"bar": 1, "foo": 2} + schema = { + "patternProperties": { + "bar": {"type": "string"}, + "foo": {"minimum": 5}, + }, + } + + validator = validators.Draft3Validator(schema) + errors = validator.iter_errors(instance) + e1, e2 = sorted_errors(errors) + + self.assertEqual(e1.path, deque(["bar"])) + self.assertEqual(e2.path, deque(["foo"])) + + self.assertEqual(e1.json_path, "$.bar") + self.assertEqual(e2.json_path, "$.foo") + + self.assertEqual(e1.validator, "type") + self.assertEqual(e2.validator, "minimum") + + def test_additionalItems(self): + instance = ["foo", 1] + schema = { + "items": [], + "additionalItems": {"type": "integer", "minimum": 5}, + } + + validator = validators.Draft3Validator(schema) + errors = validator.iter_errors(instance) + e1, e2 = sorted_errors(errors) + + self.assertEqual(e1.path, deque([0])) + self.assertEqual(e2.path, deque([1])) + + self.assertEqual(e1.json_path, "$[0]") + self.assertEqual(e2.json_path, "$[1]") + + self.assertEqual(e1.validator, "type") + self.assertEqual(e2.validator, "minimum") + + def test_additionalItems_with_items(self): + instance = ["foo", "bar", 1] + schema = { + "items": [{}], + "additionalItems": {"type": "integer", "minimum": 5}, + } + + validator = validators.Draft3Validator(schema) + errors = validator.iter_errors(instance) + e1, e2 = sorted_errors(errors) + + self.assertEqual(e1.path, deque([1])) + self.assertEqual(e2.path, deque([2])) + + self.assertEqual(e1.json_path, "$[1]") + self.assertEqual(e2.json_path, "$[2]") + + self.assertEqual(e1.validator, "type") + self.assertEqual(e2.validator, "minimum") + + def test_propertyNames(self): + instance = {"foo": 12} + schema = {"propertyNames": {"not": {"const": "foo"}}} + + validator = validators.Draft7Validator(schema) + error, = validator.iter_errors(instance) + + self.assertEqual(error.validator, "not") + self.assertEqual( + error.message, + "'foo' should not be valid under {'const': 'foo'}", + ) + self.assertEqual(error.path, deque([])) + self.assertEqual(error.json_path, "$") + self.assertEqual(error.schema_path, deque(["propertyNames", "not"])) + + def test_if_then(self): + schema = { + "if": {"const": 12}, + "then": {"const": 13}, + } + + validator = validators.Draft7Validator(schema) + error, = validator.iter_errors(12) + + self.assertEqual(error.validator, "const") + self.assertEqual(error.message, "13 was expected") + self.assertEqual(error.path, deque([])) + self.assertEqual(error.json_path, "$") + self.assertEqual(error.schema_path, deque(["then", "const"])) + + def test_if_else(self): + schema = { + "if": {"const": 12}, + "else": {"const": 13}, + } + + validator = validators.Draft7Validator(schema) + error, = validator.iter_errors(15) + + self.assertEqual(error.validator, "const") + self.assertEqual(error.message, "13 was expected") + self.assertEqual(error.path, deque([])) + self.assertEqual(error.json_path, "$") + self.assertEqual(error.schema_path, deque(["else", "const"])) + + def test_boolean_schema_False(self): + validator = validators.Draft7Validator(False) + error, = validator.iter_errors(12) + + self.assertEqual( + ( + error.message, + error.validator, + error.validator_value, + error.instance, + error.schema, + error.schema_path, + error.json_path, + ), + ( + "False schema does not allow 12", + None, + None, + 12, + False, + deque([]), + "$", + ), + ) + + def test_ref(self): + ref, schema = "someRef", {"additionalProperties": {"type": "integer"}} + validator = validators.Draft7Validator( + {"$ref": ref}, + resolver=validators._RefResolver("", {}, store={ref: schema}), + ) + error, = validator.iter_errors({"foo": "notAnInteger"}) + + self.assertEqual( + ( + error.message, + error.validator, + error.validator_value, + error.instance, + error.absolute_path, + error.schema, + error.schema_path, + error.json_path, + ), + ( + "'notAnInteger' is not of type 'integer'", + "type", + "integer", + "notAnInteger", + deque(["foo"]), + {"type": "integer"}, + deque(["additionalProperties", "type"]), + "$.foo", + ), + ) + + def test_prefixItems(self): + schema = {"prefixItems": [{"type": "string"}, {}, {}, {"maximum": 3}]} + validator = validators.Draft202012Validator(schema) + type_error, min_error = validator.iter_errors([1, 2, "foo", 5]) + self.assertEqual( + ( + type_error.message, + type_error.validator, + type_error.validator_value, + type_error.instance, + type_error.absolute_path, + type_error.schema, + type_error.schema_path, + type_error.json_path, + ), + ( + "1 is not of type 'string'", + "type", + "string", + 1, + deque([0]), + {"type": "string"}, + deque(["prefixItems", 0, "type"]), + "$[0]", + ), + ) + self.assertEqual( + ( + min_error.message, + min_error.validator, + min_error.validator_value, + min_error.instance, + min_error.absolute_path, + min_error.schema, + min_error.schema_path, + min_error.json_path, + ), + ( + "5 is greater than the maximum of 3", + "maximum", + 3, + 5, + deque([3]), + {"maximum": 3}, + deque(["prefixItems", 3, "maximum"]), + "$[3]", + ), + ) + + def test_prefixItems_with_items(self): + schema = { + "items": {"type": "string"}, + "prefixItems": [{}], + } + validator = validators.Draft202012Validator(schema) + e1, e2 = validator.iter_errors(["foo", 2, "bar", 4, "baz"]) + self.assertEqual( + ( + e1.message, + e1.validator, + e1.validator_value, + e1.instance, + e1.absolute_path, + e1.schema, + e1.schema_path, + e1.json_path, + ), + ( + "2 is not of type 'string'", + "type", + "string", + 2, + deque([1]), + {"type": "string"}, + deque(["items", "type"]), + "$[1]", + ), + ) + self.assertEqual( + ( + e2.message, + e2.validator, + e2.validator_value, + e2.instance, + e2.absolute_path, + e2.schema, + e2.schema_path, + e2.json_path, + ), + ( + "4 is not of type 'string'", + "type", + "string", + 4, + deque([3]), + {"type": "string"}, + deque(["items", "type"]), + "$[3]", + ), + ) + + def test_contains_too_many(self): + """ + `contains` + `maxContains` produces only one error, even if there are + many more incorrectly matching elements. + """ + schema = {"contains": {"type": "string"}, "maxContains": 2} + validator = validators.Draft202012Validator(schema) + error, = validator.iter_errors(["foo", 2, "bar", 4, "baz", "quux"]) + self.assertEqual( + ( + error.message, + error.validator, + error.validator_value, + error.instance, + error.absolute_path, + error.schema, + error.schema_path, + error.json_path, + ), + ( + "Too many items match the given schema (expected at most 2)", + "maxContains", + 2, + ["foo", 2, "bar", 4, "baz", "quux"], + deque([]), + {"contains": {"type": "string"}, "maxContains": 2}, + deque(["contains"]), + "$", + ), + ) + + def test_contains_too_few(self): + schema = {"contains": {"type": "string"}, "minContains": 2} + validator = validators.Draft202012Validator(schema) + error, = validator.iter_errors(["foo", 2, 4]) + self.assertEqual( + ( + error.message, + error.validator, + error.validator_value, + error.instance, + error.absolute_path, + error.schema, + error.schema_path, + error.json_path, + ), + ( + ( + "Too few items match the given schema " + "(expected at least 2 but only 1 matched)" + ), + "minContains", + 2, + ["foo", 2, 4], + deque([]), + {"contains": {"type": "string"}, "minContains": 2}, + deque(["contains"]), + "$", + ), + ) + + def test_contains_none(self): + schema = {"contains": {"type": "string"}, "minContains": 2} + validator = validators.Draft202012Validator(schema) + error, = validator.iter_errors([2, 4]) + self.assertEqual( + ( + error.message, + error.validator, + error.validator_value, + error.instance, + error.absolute_path, + error.schema, + error.schema_path, + error.json_path, + ), + ( + "[2, 4] does not contain items matching the given schema", + "contains", + {"type": "string"}, + [2, 4], + deque([]), + {"contains": {"type": "string"}, "minContains": 2}, + deque(["contains"]), + "$", + ), + ) + + def test_ref_sibling(self): + schema = { + "$defs": {"foo": {"required": ["bar"]}}, + "properties": { + "aprop": { + "$ref": "#/$defs/foo", + "required": ["baz"], + }, + }, + } + + validator = validators.Draft202012Validator(schema) + e1, e2 = validator.iter_errors({"aprop": {}}) + self.assertEqual( + ( + e1.message, + e1.validator, + e1.validator_value, + e1.instance, + e1.absolute_path, + e1.schema, + e1.schema_path, + e1.relative_schema_path, + e1.json_path, + ), + ( + "'bar' is a required property", + "required", + ["bar"], + {}, + deque(["aprop"]), + {"required": ["bar"]}, + deque(["properties", "aprop", "required"]), + deque(["properties", "aprop", "required"]), + "$.aprop", + ), + ) + self.assertEqual( + ( + e2.message, + e2.validator, + e2.validator_value, + e2.instance, + e2.absolute_path, + e2.schema, + e2.schema_path, + e2.relative_schema_path, + e2.json_path, + ), + ( + "'baz' is a required property", + "required", + ["baz"], + {}, + deque(["aprop"]), + {"$ref": "#/$defs/foo", "required": ["baz"]}, + deque(["properties", "aprop", "required"]), + deque(["properties", "aprop", "required"]), + "$.aprop", + ), + ) + + +class MetaSchemaTestsMixin: + # TODO: These all belong upstream + def test_invalid_properties(self): + with self.assertRaises(exceptions.SchemaError): + self.Validator.check_schema({"properties": 12}) + + def test_minItems_invalid_string(self): + with self.assertRaises(exceptions.SchemaError): + # needs to be an integer + self.Validator.check_schema({"minItems": "1"}) + + def test_enum_allows_empty_arrays(self): + """ + Technically, all the spec says is they SHOULD have elements, not MUST. + + (As of Draft 6. Previous drafts do say MUST). + + See #529. + """ + if self.Validator in { + validators.Draft3Validator, + validators.Draft4Validator, + }: + with self.assertRaises(exceptions.SchemaError): + self.Validator.check_schema({"enum": []}) + else: + self.Validator.check_schema({"enum": []}) + + def test_enum_allows_non_unique_items(self): + """ + Technically, all the spec says is they SHOULD be unique, not MUST. + + (As of Draft 6. Previous drafts do say MUST). + + See #529. + """ + if self.Validator in { + validators.Draft3Validator, + validators.Draft4Validator, + }: + with self.assertRaises(exceptions.SchemaError): + self.Validator.check_schema({"enum": [12, 12]}) + else: + self.Validator.check_schema({"enum": [12, 12]}) + + def test_schema_with_invalid_regex(self): + with self.assertRaises(exceptions.SchemaError): + self.Validator.check_schema({"pattern": "*notaregex"}) + + def test_schema_with_invalid_regex_with_disabled_format_validation(self): + self.Validator.check_schema( + {"pattern": "*notaregex"}, + format_checker=None, + ) + + +class ValidatorTestMixin(MetaSchemaTestsMixin): + def test_it_implements_the_validator_protocol(self): + self.assertIsInstance(self.Validator({}), protocols.Validator) + + def test_valid_instances_are_valid(self): + schema, instance = self.valid + self.assertTrue(self.Validator(schema).is_valid(instance)) + + def test_invalid_instances_are_not_valid(self): + schema, instance = self.invalid + self.assertFalse(self.Validator(schema).is_valid(instance)) + + def test_non_existent_properties_are_ignored(self): + self.Validator({object(): object()}).validate(instance=object()) + + def test_evolve(self): + schema, format_checker = {"type": "integer"}, FormatChecker() + original = self.Validator( + schema, + format_checker=format_checker, + ) + new = original.evolve( + schema={"type": "string"}, + format_checker=self.Validator.FORMAT_CHECKER, + ) + + expected = self.Validator( + {"type": "string"}, + format_checker=self.Validator.FORMAT_CHECKER, + _resolver=new._resolver, + ) + + self.assertEqual(new, expected) + self.assertNotEqual(new, original) + + def test_evolve_with_subclass(self): + """ + Subclassing validators isn't supported public API, but some users have + done it, because we don't actually error entirely when it's done :/ + + We need to deprecate doing so first to help as many of these users + ensure they can move to supported APIs, but this test ensures that in + the interim, we haven't broken those users. + """ + + with self.assertWarns(DeprecationWarning): + @define + class OhNo(self.Validator): + foo = field(factory=lambda: [1, 2, 3]) + _bar = field(default=37) + + validator = OhNo({}, bar=12) + self.assertEqual(validator.foo, [1, 2, 3]) + + new = validator.evolve(schema={"type": "integer"}) + self.assertEqual(new.foo, [1, 2, 3]) + self.assertEqual(new._bar, 12) + + def test_is_type_is_true_for_valid_type(self): + self.assertTrue(self.Validator({}).is_type("foo", "string")) + + def test_is_type_is_false_for_invalid_type(self): + self.assertFalse(self.Validator({}).is_type("foo", "array")) + + def test_is_type_evades_bool_inheriting_from_int(self): + self.assertFalse(self.Validator({}).is_type(True, "integer")) + self.assertFalse(self.Validator({}).is_type(True, "number")) + + def test_it_can_validate_with_decimals(self): + schema = {"items": {"type": "number"}} + Validator = validators.extend( + self.Validator, + type_checker=self.Validator.TYPE_CHECKER.redefine( + "number", + lambda checker, thing: isinstance( + thing, (int, float, Decimal), + ) and not isinstance(thing, bool), + ), + ) + + validator = Validator(schema) + validator.validate([1, 1.1, Decimal(1) / Decimal(8)]) + + invalid = ["foo", {}, [], True, None] + self.assertEqual( + [error.instance for error in validator.iter_errors(invalid)], + invalid, + ) + + def test_it_returns_true_for_formats_it_does_not_know_about(self): + validator = self.Validator( + {"format": "carrot"}, format_checker=FormatChecker(), + ) + validator.validate("bugs") + + def test_it_does_not_validate_formats_by_default(self): + validator = self.Validator({}) + self.assertIsNone(validator.format_checker) + + def test_it_validates_formats_if_a_checker_is_provided(self): + checker = FormatChecker() + bad = ValueError("Bad!") + + @checker.checks("foo", raises=ValueError) + def check(value): + if value == "good": + return True + elif value == "bad": + raise bad + else: # pragma: no cover + self.fail(f"What is {value}? [Baby Don't Hurt Me]") + + validator = self.Validator( + {"format": "foo"}, format_checker=checker, + ) + + validator.validate("good") + with self.assertRaises(exceptions.ValidationError) as cm: + validator.validate("bad") + + # Make sure original cause is attached + self.assertIs(cm.exception.cause, bad) + + def test_non_string_custom_type(self): + non_string_type = object() + schema = {"type": [non_string_type]} + Crazy = validators.extend( + self.Validator, + type_checker=self.Validator.TYPE_CHECKER.redefine( + non_string_type, + lambda checker, thing: isinstance(thing, int), + ), + ) + Crazy(schema).validate(15) + + def test_it_properly_formats_tuples_in_errors(self): + """ + A tuple instance properly formats validation errors for uniqueItems. + + See #224 + """ + TupleValidator = validators.extend( + self.Validator, + type_checker=self.Validator.TYPE_CHECKER.redefine( + "array", + lambda checker, thing: isinstance(thing, tuple), + ), + ) + with self.assertRaises(exceptions.ValidationError) as e: + TupleValidator({"uniqueItems": True}).validate((1, 1)) + self.assertIn("(1, 1) has non-unique elements", str(e.exception)) + + def test_check_redefined_sequence(self): + """ + Allow array to validate against another defined sequence type + """ + schema = {"type": "array", "uniqueItems": True} + MyMapping = namedtuple("MyMapping", "a, b") + Validator = validators.extend( + self.Validator, + type_checker=self.Validator.TYPE_CHECKER.redefine_many( + { + "array": lambda checker, thing: isinstance( + thing, (list, deque), + ), + "object": lambda checker, thing: isinstance( + thing, (dict, MyMapping), + ), + }, + ), + ) + validator = Validator(schema) + + valid_instances = [ + deque(["a", None, "1", "", True]), + deque([[False], [0]]), + [deque([False]), deque([0])], + [[deque([False])], [deque([0])]], + [[[[[deque([False])]]]], [[[[deque([0])]]]]], + [deque([deque([False])]), deque([deque([0])])], + [MyMapping("a", 0), MyMapping("a", False)], + [ + MyMapping("a", [deque([0])]), + MyMapping("a", [deque([False])]), + ], + [ + MyMapping("a", [MyMapping("a", deque([0]))]), + MyMapping("a", [MyMapping("a", deque([False]))]), + ], + [deque(deque(deque([False]))), deque(deque(deque([0])))], + ] + + for instance in valid_instances: + validator.validate(instance) + + invalid_instances = [ + deque(["a", "b", "a"]), + deque([[False], [False]]), + [deque([False]), deque([False])], + [[deque([False])], [deque([False])]], + [[[[[deque([False])]]]], [[[[deque([False])]]]]], + [deque([deque([False])]), deque([deque([False])])], + [MyMapping("a", False), MyMapping("a", False)], + [ + MyMapping("a", [deque([False])]), + MyMapping("a", [deque([False])]), + ], + [ + MyMapping("a", [MyMapping("a", deque([False]))]), + MyMapping("a", [MyMapping("a", deque([False]))]), + ], + [deque(deque(deque([False]))), deque(deque(deque([False])))], + ] + + for instance in invalid_instances: + with self.assertRaises(exceptions.ValidationError): + validator.validate(instance) + + def test_it_creates_a_ref_resolver_if_not_provided(self): + with self.assertWarns(DeprecationWarning): + resolver = self.Validator({}).resolver + self.assertIsInstance(resolver, validators._RefResolver) + + def test_it_upconverts_from_deprecated_RefResolvers(self): + ref, schema = "someCoolRef", {"type": "integer"} + resolver = validators._RefResolver("", {}, store={ref: schema}) + validator = self.Validator({"$ref": ref}, resolver=resolver) + + with self.assertRaises(exceptions.ValidationError): + validator.validate(None) + + def test_it_upconverts_from_yet_older_deprecated_legacy_RefResolvers(self): + """ + Legacy RefResolvers support only the context manager form of + resolution. + """ + + class LegacyRefResolver: + @contextmanager + def resolving(this, ref): + self.assertEqual(ref, "the ref") + yield {"type": "integer"} + + resolver = LegacyRefResolver() + schema = {"$ref": "the ref"} + + with self.assertRaises(exceptions.ValidationError): + self.Validator(schema, resolver=resolver).validate(None) + + +class AntiDraft6LeakMixin: + """ + Make sure functionality from draft 6 doesn't leak backwards in time. + """ + + def test_True_is_not_a_schema(self): + with self.assertRaises(exceptions.SchemaError) as e: + self.Validator.check_schema(True) + self.assertIn("True is not of type", str(e.exception)) + + def test_False_is_not_a_schema(self): + with self.assertRaises(exceptions.SchemaError) as e: + self.Validator.check_schema(False) + self.assertIn("False is not of type", str(e.exception)) + + def test_True_is_not_a_schema_even_if_you_forget_to_check(self): + with self.assertRaises(Exception) as e: + self.Validator(True).validate(12) + self.assertNotIsInstance(e.exception, exceptions.ValidationError) + + def test_False_is_not_a_schema_even_if_you_forget_to_check(self): + with self.assertRaises(Exception) as e: + self.Validator(False).validate(12) + self.assertNotIsInstance(e.exception, exceptions.ValidationError) + + +class TestDraft3Validator(AntiDraft6LeakMixin, ValidatorTestMixin, TestCase): + Validator = validators.Draft3Validator + valid: tuple[dict, dict] = ({}, {}) + invalid = {"type": "integer"}, "foo" + + def test_any_type_is_valid_for_type_any(self): + validator = self.Validator({"type": "any"}) + validator.validate(object()) + + def test_any_type_is_redefinable(self): + """ + Sigh, because why not. + """ + Crazy = validators.extend( + self.Validator, + type_checker=self.Validator.TYPE_CHECKER.redefine( + "any", lambda checker, thing: isinstance(thing, int), + ), + ) + validator = Crazy({"type": "any"}) + validator.validate(12) + with self.assertRaises(exceptions.ValidationError): + validator.validate("foo") + + def test_is_type_is_true_for_any_type(self): + self.assertTrue(self.Validator({"type": "any"}).is_valid(object())) + + def test_is_type_does_not_evade_bool_if_it_is_being_tested(self): + self.assertTrue(self.Validator({}).is_type(True, "boolean")) + self.assertTrue(self.Validator({"type": "any"}).is_valid(True)) + + +class TestDraft4Validator(AntiDraft6LeakMixin, ValidatorTestMixin, TestCase): + Validator = validators.Draft4Validator + valid: tuple[dict, dict] = ({}, {}) + invalid = {"type": "integer"}, "foo" + + +class TestDraft6Validator(ValidatorTestMixin, TestCase): + Validator = validators.Draft6Validator + valid: tuple[dict, dict] = ({}, {}) + invalid = {"type": "integer"}, "foo" + + +class TestDraft7Validator(ValidatorTestMixin, TestCase): + Validator = validators.Draft7Validator + valid: tuple[dict, dict] = ({}, {}) + invalid = {"type": "integer"}, "foo" + + +class TestDraft201909Validator(ValidatorTestMixin, TestCase): + Validator = validators.Draft201909Validator + valid: tuple[dict, dict] = ({}, {}) + invalid = {"type": "integer"}, "foo" + + +class TestDraft202012Validator(ValidatorTestMixin, TestCase): + Validator = validators.Draft202012Validator + valid: tuple[dict, dict] = ({}, {}) + invalid = {"type": "integer"}, "foo" + + +class TestLatestValidator(TestCase): + """ + These really apply to multiple versions but are easiest to test on one. + """ + + def test_ref_resolvers_may_have_boolean_schemas_stored(self): + ref = "someCoolRef" + schema = {"$ref": ref} + resolver = validators._RefResolver("", {}, store={ref: False}) + validator = validators._LATEST_VERSION(schema, resolver=resolver) + + with self.assertRaises(exceptions.ValidationError): + validator.validate(None) + + +class TestValidatorFor(TestCase): + def test_draft_3(self): + schema = {"$schema": "http://json-schema.org/draft-03/schema"} + self.assertIs( + validators.validator_for(schema), + validators.Draft3Validator, + ) + + schema = {"$schema": "http://json-schema.org/draft-03/schema#"} + self.assertIs( + validators.validator_for(schema), + validators.Draft3Validator, + ) + + def test_draft_4(self): + schema = {"$schema": "http://json-schema.org/draft-04/schema"} + self.assertIs( + validators.validator_for(schema), + validators.Draft4Validator, + ) + + schema = {"$schema": "http://json-schema.org/draft-04/schema#"} + self.assertIs( + validators.validator_for(schema), + validators.Draft4Validator, + ) + + def test_draft_6(self): + schema = {"$schema": "http://json-schema.org/draft-06/schema"} + self.assertIs( + validators.validator_for(schema), + validators.Draft6Validator, + ) + + schema = {"$schema": "http://json-schema.org/draft-06/schema#"} + self.assertIs( + validators.validator_for(schema), + validators.Draft6Validator, + ) + + def test_draft_7(self): + schema = {"$schema": "http://json-schema.org/draft-07/schema"} + self.assertIs( + validators.validator_for(schema), + validators.Draft7Validator, + ) + + schema = {"$schema": "http://json-schema.org/draft-07/schema#"} + self.assertIs( + validators.validator_for(schema), + validators.Draft7Validator, + ) + + def test_draft_201909(self): + schema = {"$schema": "https://json-schema.org/draft/2019-09/schema"} + self.assertIs( + validators.validator_for(schema), + validators.Draft201909Validator, + ) + + schema = {"$schema": "https://json-schema.org/draft/2019-09/schema#"} + self.assertIs( + validators.validator_for(schema), + validators.Draft201909Validator, + ) + + def test_draft_202012(self): + schema = {"$schema": "https://json-schema.org/draft/2020-12/schema"} + self.assertIs( + validators.validator_for(schema), + validators.Draft202012Validator, + ) + + schema = {"$schema": "https://json-schema.org/draft/2020-12/schema#"} + self.assertIs( + validators.validator_for(schema), + validators.Draft202012Validator, + ) + + def test_True(self): + self.assertIs( + validators.validator_for(True), + validators._LATEST_VERSION, + ) + + def test_False(self): + self.assertIs( + validators.validator_for(False), + validators._LATEST_VERSION, + ) + + def test_custom_validator(self): + Validator = validators.create( + meta_schema={"id": "meta schema id"}, + version="12", + id_of=lambda s: s.get("id", ""), + ) + schema = {"$schema": "meta schema id"} + self.assertIs( + validators.validator_for(schema), + Validator, + ) + + def test_custom_validator_draft6(self): + Validator = validators.create( + meta_schema={"$id": "meta schema $id"}, + version="13", + ) + schema = {"$schema": "meta schema $id"} + self.assertIs( + validators.validator_for(schema), + Validator, + ) + + def test_validator_for_jsonschema_default(self): + self.assertIs(validators.validator_for({}), validators._LATEST_VERSION) + + def test_validator_for_custom_default(self): + self.assertIs(validators.validator_for({}, default=None), None) + + def test_warns_if_meta_schema_specified_was_not_found(self): + with self.assertWarns(DeprecationWarning) as cm: + validators.validator_for(schema={"$schema": "unknownSchema"}) + + self.assertEqual(cm.filename, __file__) + self.assertEqual( + str(cm.warning), + "The metaschema specified by $schema was not found. " + "Using the latest draft to validate, but this will raise " + "an error in the future.", + ) + + def test_does_not_warn_if_meta_schema_is_unspecified(self): + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + validators.validator_for(schema={}, default={}) + self.assertFalse(w) + + def test_validator_for_custom_default_with_schema(self): + schema, default = {"$schema": "mailto:foo@example.com"}, object() + self.assertIs(validators.validator_for(schema, default), default) + + +class TestValidate(TestCase): + def assertUses(self, schema, Validator): + result = [] + with mock.patch.object(Validator, "check_schema", result.append): + validators.validate({}, schema) + self.assertEqual(result, [schema]) + + def test_draft3_validator_is_chosen(self): + self.assertUses( + schema={"$schema": "http://json-schema.org/draft-03/schema#"}, + Validator=validators.Draft3Validator, + ) + # Make sure it works without the empty fragment + self.assertUses( + schema={"$schema": "http://json-schema.org/draft-03/schema"}, + Validator=validators.Draft3Validator, + ) + + def test_draft4_validator_is_chosen(self): + self.assertUses( + schema={"$schema": "http://json-schema.org/draft-04/schema#"}, + Validator=validators.Draft4Validator, + ) + # Make sure it works without the empty fragment + self.assertUses( + schema={"$schema": "http://json-schema.org/draft-04/schema"}, + Validator=validators.Draft4Validator, + ) + + def test_draft6_validator_is_chosen(self): + self.assertUses( + schema={"$schema": "http://json-schema.org/draft-06/schema#"}, + Validator=validators.Draft6Validator, + ) + # Make sure it works without the empty fragment + self.assertUses( + schema={"$schema": "http://json-schema.org/draft-06/schema"}, + Validator=validators.Draft6Validator, + ) + + def test_draft7_validator_is_chosen(self): + self.assertUses( + schema={"$schema": "http://json-schema.org/draft-07/schema#"}, + Validator=validators.Draft7Validator, + ) + # Make sure it works without the empty fragment + self.assertUses( + schema={"$schema": "http://json-schema.org/draft-07/schema"}, + Validator=validators.Draft7Validator, + ) + + def test_draft202012_validator_is_chosen(self): + self.assertUses( + schema={ + "$schema": "https://json-schema.org/draft/2020-12/schema#", + }, + Validator=validators.Draft202012Validator, + ) + # Make sure it works without the empty fragment + self.assertUses( + schema={ + "$schema": "https://json-schema.org/draft/2020-12/schema", + }, + Validator=validators.Draft202012Validator, + ) + + def test_draft202012_validator_is_the_default(self): + self.assertUses(schema={}, Validator=validators.Draft202012Validator) + + def test_validation_error_message(self): + with self.assertRaises(exceptions.ValidationError) as e: + validators.validate(12, {"type": "string"}) + self.assertRegex( + str(e.exception), + "(?s)Failed validating '.*' in schema.*On instance", + ) + + def test_schema_error_message(self): + with self.assertRaises(exceptions.SchemaError) as e: + validators.validate(12, {"type": 12}) + self.assertRegex( + str(e.exception), + "(?s)Failed validating '.*' in metaschema.*On schema", + ) + + def test_it_uses_best_match(self): + schema = { + "oneOf": [ + {"type": "number", "minimum": 20}, + {"type": "array"}, + ], + } + with self.assertRaises(exceptions.ValidationError) as e: + validators.validate(12, schema) + self.assertIn("12 is less than the minimum of 20", str(e.exception)) + + +class TestThreading(TestCase): + """ + Threading-related functionality tests. + + jsonschema doesn't promise thread safety, and its validation behavior + across multiple threads may change at any time, but that means it isn't + safe to share *validators* across threads, not that anytime one has + multiple threads that jsonschema won't work (it certainly is intended to). + + These tests ensure that this minimal level of functionality continues to + work. + """ + + def test_validation_across_a_second_thread(self): + failed = [] + + def validate(): + try: + validators.validate(instance=37, schema=True) + except: # pragma: no cover # noqa: E722 + failed.append(sys.exc_info()) + + validate() # just verify it succeeds + + from threading import Thread + thread = Thread(target=validate) + thread.start() + thread.join() + self.assertEqual((thread.is_alive(), failed), (False, [])) + + +class TestReferencing(TestCase): + def test_registry_with_retrieve(self): + def retrieve(uri): + return DRAFT202012.create_resource({"type": "integer"}) + + registry = referencing.Registry(retrieve=retrieve) + schema = {"$ref": "https://example.com/"} + validator = validators.Draft202012Validator(schema, registry=registry) + + self.assertEqual( + (validator.is_valid(12), validator.is_valid("foo")), + (True, False), + ) + + def test_custom_registries_do_not_autoretrieve_remote_resources(self): + registry = referencing.Registry() + schema = {"$ref": "https://example.com/"} + validator = validators.Draft202012Validator(schema, registry=registry) + + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + with self.assertRaises(referencing.exceptions.Unresolvable): + validator.validate(12) + self.assertFalse(w) + + +class TestRefResolver(TestCase): + + base_uri = "" + stored_uri = "foo://stored" + stored_schema = {"stored": "schema"} + + def setUp(self): + self.referrer = {} + self.store = {self.stored_uri: self.stored_schema} + self.resolver = validators._RefResolver( + self.base_uri, self.referrer, self.store, + ) + + def test_it_does_not_retrieve_schema_urls_from_the_network(self): + ref = validators.Draft3Validator.META_SCHEMA["id"] + with mock.patch.object(self.resolver, "resolve_remote") as patched: # noqa: SIM117 + with self.resolver.resolving(ref) as resolved: + pass + self.assertEqual(resolved, validators.Draft3Validator.META_SCHEMA) + self.assertFalse(patched.called) + + def test_it_resolves_local_refs(self): + ref = "#/properties/foo" + self.referrer["properties"] = {"foo": object()} + with self.resolver.resolving(ref) as resolved: + self.assertEqual(resolved, self.referrer["properties"]["foo"]) + + def test_it_resolves_local_refs_with_id(self): + schema = {"id": "http://bar/schema#", "a": {"foo": "bar"}} + resolver = validators._RefResolver.from_schema( + schema, + id_of=lambda schema: schema.get("id", ""), + ) + with resolver.resolving("#/a") as resolved: + self.assertEqual(resolved, schema["a"]) + with resolver.resolving("http://bar/schema#/a") as resolved: + self.assertEqual(resolved, schema["a"]) + + def test_it_retrieves_stored_refs(self): + with self.resolver.resolving(self.stored_uri) as resolved: + self.assertIs(resolved, self.stored_schema) + + self.resolver.store["cached_ref"] = {"foo": 12} + with self.resolver.resolving("cached_ref#/foo") as resolved: + self.assertEqual(resolved, 12) + + def test_it_retrieves_unstored_refs_via_requests(self): + ref = "http://bar#baz" + schema = {"baz": 12} + + if "requests" in sys.modules: # pragma: no cover + self.addCleanup( + sys.modules.__setitem__, "requests", sys.modules["requests"], + ) + sys.modules["requests"] = ReallyFakeRequests({"http://bar": schema}) + + with self.resolver.resolving(ref) as resolved: + self.assertEqual(resolved, 12) + + def test_it_retrieves_unstored_refs_via_urlopen(self): + ref = "http://bar#baz" + schema = {"baz": 12} + + if "requests" in sys.modules: # pragma: no cover + self.addCleanup( + sys.modules.__setitem__, "requests", sys.modules["requests"], + ) + sys.modules["requests"] = None + + @contextmanager + def fake_urlopen(url): + self.assertEqual(url, "http://bar") + yield BytesIO(json.dumps(schema).encode("utf8")) + + self.addCleanup(setattr, validators, "urlopen", validators.urlopen) + validators.urlopen = fake_urlopen + + with self.resolver.resolving(ref) as resolved: + pass + self.assertEqual(resolved, 12) + + def test_it_retrieves_local_refs_via_urlopen(self): + with tempfile.NamedTemporaryFile(delete=False, mode="wt") as tempf: + self.addCleanup(os.remove, tempf.name) + json.dump({"foo": "bar"}, tempf) + + ref = f"file://{pathname2url(tempf.name)}#foo" + with self.resolver.resolving(ref) as resolved: + self.assertEqual(resolved, "bar") + + def test_it_can_construct_a_base_uri_from_a_schema(self): + schema = {"id": "foo"} + resolver = validators._RefResolver.from_schema( + schema, + id_of=lambda schema: schema.get("id", ""), + ) + self.assertEqual(resolver.base_uri, "foo") + self.assertEqual(resolver.resolution_scope, "foo") + with resolver.resolving("") as resolved: + self.assertEqual(resolved, schema) + with resolver.resolving("#") as resolved: + self.assertEqual(resolved, schema) + with resolver.resolving("foo") as resolved: + self.assertEqual(resolved, schema) + with resolver.resolving("foo#") as resolved: + self.assertEqual(resolved, schema) + + def test_it_can_construct_a_base_uri_from_a_schema_without_id(self): + schema = {} + resolver = validators._RefResolver.from_schema(schema) + self.assertEqual(resolver.base_uri, "") + self.assertEqual(resolver.resolution_scope, "") + with resolver.resolving("") as resolved: + self.assertEqual(resolved, schema) + with resolver.resolving("#") as resolved: + self.assertEqual(resolved, schema) + + def test_custom_uri_scheme_handlers(self): + def handler(url): + self.assertEqual(url, ref) + return schema + + schema = {"foo": "bar"} + ref = "foo://bar" + resolver = validators._RefResolver("", {}, handlers={"foo": handler}) + with resolver.resolving(ref) as resolved: + self.assertEqual(resolved, schema) + + def test_cache_remote_on(self): + response = [object()] + + def handler(url): + try: + return response.pop() + except IndexError: # pragma: no cover + self.fail("Response must not have been cached!") + + ref = "foo://bar" + resolver = validators._RefResolver( + "", {}, cache_remote=True, handlers={"foo": handler}, + ) + with resolver.resolving(ref): + pass + with resolver.resolving(ref): + pass + + def test_cache_remote_off(self): + response = [object()] + + def handler(url): + try: + return response.pop() + except IndexError: # pragma: no cover + self.fail("Handler called twice!") + + ref = "foo://bar" + resolver = validators._RefResolver( + "", {}, cache_remote=False, handlers={"foo": handler}, + ) + with resolver.resolving(ref): + pass + + def test_if_you_give_it_junk_you_get_a_resolution_error(self): + error = ValueError("Oh no! What's this?") + + def handler(url): + raise error + + ref = "foo://bar" + resolver = validators._RefResolver("", {}, handlers={"foo": handler}) + with self.assertRaises(exceptions._RefResolutionError) as err: # noqa: SIM117 + with resolver.resolving(ref): + self.fail("Shouldn't get this far!") # pragma: no cover + self.assertEqual(err.exception, exceptions._RefResolutionError(error)) + + def test_helpful_error_message_on_failed_pop_scope(self): + resolver = validators._RefResolver("", {}) + resolver.pop_scope() + with self.assertRaises(exceptions._RefResolutionError) as exc: + resolver.pop_scope() + self.assertIn("Failed to pop the scope", str(exc.exception)) + + def test_pointer_within_schema_with_different_id(self): + """ + See #1085. + """ + schema = validators.Draft7Validator.META_SCHEMA + one = validators._RefResolver("", schema) + validator = validators.Draft7Validator(schema, resolver=one) + self.assertFalse(validator.is_valid({"maxLength": "foo"})) + + another = { + "allOf": [{"$ref": validators.Draft7Validator.META_SCHEMA["$id"]}], + } + two = validators._RefResolver("", another) + validator = validators.Draft7Validator(another, resolver=two) + self.assertFalse(validator.is_valid({"maxLength": "foo"})) + + def test_newly_created_validator_with_ref_resolver(self): + """ + See https://github.com/python-jsonschema/jsonschema/issues/1061#issuecomment-1624266555. + """ + + def handle(uri): + self.assertEqual(uri, "http://example.com/foo") + return {"type": "integer"} + + resolver = validators._RefResolver("", {}, handlers={"http": handle}) + Validator = validators.create( + meta_schema={}, + validators=validators.Draft4Validator.VALIDATORS, + ) + schema = {"$id": "http://example.com/bar", "$ref": "foo"} + validator = Validator(schema, resolver=resolver) + self.assertEqual( + (validator.is_valid({}), validator.is_valid(37)), + (False, True), + ) + + def test_refresolver_with_pointer_in_schema_with_no_id(self): + """ + See https://github.com/python-jsonschema/jsonschema/issues/1124#issuecomment-1632574249. + """ + + schema = { + "properties": {"x": {"$ref": "#/definitions/x"}}, + "definitions": {"x": {"type": "integer"}}, + } + + validator = validators.Draft202012Validator( + schema, + resolver=validators._RefResolver("", schema), + ) + self.assertEqual( + (validator.is_valid({"x": "y"}), validator.is_valid({"x": 37})), + (False, True), + ) + + + +def sorted_errors(errors): + def key(error): + return ( + [str(e) for e in error.path], + [str(e) for e in error.schema_path], + ) + return sorted(errors, key=key) + + +@define +class ReallyFakeRequests: + + _responses: dict[str, Any] + + def get(self, url): + response = self._responses.get(url) + if url is None: # pragma: no cover + raise ValueError("Unknown URL: " + repr(url)) + return _ReallyFakeJSONResponse(json.dumps(response)) + + +@define +class _ReallyFakeJSONResponse: + + _response: str + + def json(self): + return json.loads(self._response) diff --git a/.venv/lib/python3.12/site-packages/jsonschema/validators.py b/.venv/lib/python3.12/site-packages/jsonschema/validators.py new file mode 100644 index 00000000..85c39160 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/jsonschema/validators.py @@ -0,0 +1,1410 @@ +""" +Creation and extension of validators, with implementations for existing drafts. +""" +from __future__ import annotations + +from collections import deque +from collections.abc import Iterable, Mapping, Sequence +from functools import lru_cache +from operator import methodcaller +from typing import TYPE_CHECKING +from urllib.parse import unquote, urldefrag, urljoin, urlsplit +from urllib.request import urlopen +from warnings import warn +import contextlib +import json +import reprlib +import warnings + +from attrs import define, field, fields +from jsonschema_specifications import REGISTRY as SPECIFICATIONS +from rpds import HashTrieMap +import referencing.exceptions +import referencing.jsonschema + +from jsonschema import ( + _format, + _keywords, + _legacy_keywords, + _types, + _typing, + _utils, + exceptions, +) + +if TYPE_CHECKING: + from jsonschema.protocols import Validator + +_UNSET = _utils.Unset() + +_VALIDATORS: dict[str, Validator] = {} +_META_SCHEMAS = _utils.URIDict() + + +def __getattr__(name): + if name == "ErrorTree": + warnings.warn( + "Importing ErrorTree from jsonschema.validators is deprecated. " + "Instead import it from jsonschema.exceptions.", + DeprecationWarning, + stacklevel=2, + ) + from jsonschema.exceptions import ErrorTree + return ErrorTree + elif name == "validators": + warnings.warn( + "Accessing jsonschema.validators.validators is deprecated. " + "Use jsonschema.validators.validator_for with a given schema.", + DeprecationWarning, + stacklevel=2, + ) + return _VALIDATORS + elif name == "meta_schemas": + warnings.warn( + "Accessing jsonschema.validators.meta_schemas is deprecated. " + "Use jsonschema.validators.validator_for with a given schema.", + DeprecationWarning, + stacklevel=2, + ) + return _META_SCHEMAS + elif name == "RefResolver": + warnings.warn( + _RefResolver._DEPRECATION_MESSAGE, + DeprecationWarning, + stacklevel=2, + ) + return _RefResolver + raise AttributeError(f"module {__name__} has no attribute {name}") + + +def validates(version): + """ + Register the decorated validator for a ``version`` of the specification. + + Registered validators and their meta schemas will be considered when + parsing :kw:`$schema` keywords' URIs. + + Arguments: + + version (str): + + An identifier to use as the version's name + + Returns: + + collections.abc.Callable: + + a class decorator to decorate the validator with the version + + """ + + def _validates(cls): + _VALIDATORS[version] = cls + meta_schema_id = cls.ID_OF(cls.META_SCHEMA) + _META_SCHEMAS[meta_schema_id] = cls + return cls + return _validates + + +def _warn_for_remote_retrieve(uri: str): + from urllib.request import Request, urlopen + headers = {"User-Agent": "python-jsonschema (deprecated $ref resolution)"} + request = Request(uri, headers=headers) # noqa: S310 + with urlopen(request) as response: # noqa: S310 + warnings.warn( + "Automatically retrieving remote references can be a security " + "vulnerability and is discouraged by the JSON Schema " + "specifications. Relying on this behavior is deprecated " + "and will shortly become an error. If you are sure you want to " + "remotely retrieve your reference and that it is safe to do so, " + "you can find instructions for doing so via referencing.Registry " + "in the referencing documentation " + "(https://referencing.readthedocs.org).", + DeprecationWarning, + stacklevel=9, # Ha ha ha ha magic numbers :/ + ) + return referencing.Resource.from_contents( + json.load(response), + default_specification=referencing.jsonschema.DRAFT202012, + ) + + +_REMOTE_WARNING_REGISTRY = SPECIFICATIONS.combine( + referencing.Registry(retrieve=_warn_for_remote_retrieve), # type: ignore[call-arg] +) + + +def create( + meta_schema: referencing.jsonschema.ObjectSchema, + validators: ( + Mapping[str, _typing.SchemaKeywordValidator] + | Iterable[tuple[str, _typing.SchemaKeywordValidator]] + ) = (), + version: str | None = None, + type_checker: _types.TypeChecker = _types.draft202012_type_checker, + format_checker: _format.FormatChecker = _format.draft202012_format_checker, + id_of: _typing.id_of = referencing.jsonschema.DRAFT202012.id_of, + applicable_validators: _typing.ApplicableValidators = methodcaller( + "items", + ), +): + """ + Create a new validator class. + + Arguments: + + meta_schema: + + the meta schema for the new validator class + + validators: + + a mapping from names to callables, where each callable will + validate the schema property with the given name. + + Each callable should take 4 arguments: + + 1. a validator instance, + 2. the value of the property being validated within the + instance + 3. the instance + 4. the schema + + version: + + an identifier for the version that this validator class will + validate. If provided, the returned validator class will + have its ``__name__`` set to include the version, and also + will have `jsonschema.validators.validates` automatically + called for the given version. + + type_checker: + + a type checker, used when applying the :kw:`type` keyword. + + If unprovided, a `jsonschema.TypeChecker` will be created + with a set of default types typical of JSON Schema drafts. + + format_checker: + + a format checker, used when applying the :kw:`format` keyword. + + If unprovided, a `jsonschema.FormatChecker` will be created + with a set of default formats typical of JSON Schema drafts. + + id_of: + + A function that given a schema, returns its ID. + + applicable_validators: + + A function that, given a schema, returns the list of + applicable schema keywords and associated values + which will be used to validate the instance. + This is mostly used to support pre-draft 7 versions of JSON Schema + which specified behavior around ignoring keywords if they were + siblings of a ``$ref`` keyword. If you're not attempting to + implement similar behavior, you can typically ignore this argument + and leave it at its default. + + Returns: + + a new `jsonschema.protocols.Validator` class + + """ + # preemptively don't shadow the `Validator.format_checker` local + format_checker_arg = format_checker + + specification = referencing.jsonschema.specification_with( + dialect_id=id_of(meta_schema) or "urn:unknown-dialect", + default=referencing.Specification.OPAQUE, + ) + + @define + class Validator: + + VALIDATORS = dict(validators) # noqa: RUF012 + META_SCHEMA = dict(meta_schema) # noqa: RUF012 + TYPE_CHECKER = type_checker + FORMAT_CHECKER = format_checker_arg + ID_OF = staticmethod(id_of) + + _APPLICABLE_VALIDATORS = applicable_validators + _validators = field(init=False, repr=False, eq=False) + + schema: referencing.jsonschema.Schema = field(repr=reprlib.repr) + _ref_resolver = field(default=None, repr=False, alias="resolver") + format_checker: _format.FormatChecker | None = field(default=None) + # TODO: include new meta-schemas added at runtime + _registry: referencing.jsonschema.SchemaRegistry = field( + default=_REMOTE_WARNING_REGISTRY, + kw_only=True, + repr=False, + ) + _resolver = field( + alias="_resolver", + default=None, + kw_only=True, + repr=False, + ) + + def __init_subclass__(cls): + warnings.warn( + ( + "Subclassing validator classes is not intended to " + "be part of their public API. A future version " + "will make doing so an error, as the behavior of " + "subclasses isn't guaranteed to stay the same " + "between releases of jsonschema. Instead, prefer " + "composition of validators, wrapping them in an object " + "owned entirely by the downstream library." + ), + DeprecationWarning, + stacklevel=2, + ) + + def evolve(self, **changes): + cls = self.__class__ + schema = changes.setdefault("schema", self.schema) + NewValidator = validator_for(schema, default=cls) + + for field in fields(cls): # noqa: F402 + if not field.init: + continue + attr_name = field.name + init_name = field.alias + if init_name not in changes: + changes[init_name] = getattr(self, attr_name) + + return NewValidator(**changes) + + cls.evolve = evolve + + def __attrs_post_init__(self): + if self._resolver is None: + registry = self._registry + if registry is not _REMOTE_WARNING_REGISTRY: + registry = SPECIFICATIONS.combine(registry) + resource = specification.create_resource(self.schema) + self._resolver = registry.resolver_with_root(resource) + + if self.schema is True or self.schema is False: + self._validators = [] + else: + self._validators = [ + (self.VALIDATORS[k], k, v) + for k, v in applicable_validators(self.schema) + if k in self.VALIDATORS + ] + + # REMOVEME: Legacy ref resolution state management. + push_scope = getattr(self._ref_resolver, "push_scope", None) + if push_scope is not None: + id = id_of(self.schema) + if id is not None: + push_scope(id) + + @classmethod + def check_schema(cls, schema, format_checker=_UNSET): + Validator = validator_for(cls.META_SCHEMA, default=cls) + if format_checker is _UNSET: + format_checker = Validator.FORMAT_CHECKER + validator = Validator( + schema=cls.META_SCHEMA, + format_checker=format_checker, + ) + for error in validator.iter_errors(schema): + raise exceptions.SchemaError.create_from(error) + + @property + def resolver(self): + warnings.warn( + ( + f"Accessing {self.__class__.__name__}.resolver is " + "deprecated as of v4.18.0, in favor of the " + "https://github.com/python-jsonschema/referencing " + "library, which provides more compliant referencing " + "behavior as well as more flexible APIs for " + "customization." + ), + DeprecationWarning, + stacklevel=2, + ) + if self._ref_resolver is None: + self._ref_resolver = _RefResolver.from_schema( + self.schema, + id_of=id_of, + ) + return self._ref_resolver + + def evolve(self, **changes): + schema = changes.setdefault("schema", self.schema) + NewValidator = validator_for(schema, default=self.__class__) + + for (attr_name, init_name) in evolve_fields: + if init_name not in changes: + changes[init_name] = getattr(self, attr_name) + + return NewValidator(**changes) + + def iter_errors(self, instance, _schema=None): + if _schema is not None: + warnings.warn( + ( + "Passing a schema to Validator.iter_errors " + "is deprecated and will be removed in a future " + "release. Call validator.evolve(schema=new_schema)." + "iter_errors(...) instead." + ), + DeprecationWarning, + stacklevel=2, + ) + validators = [ + (self.VALIDATORS[k], k, v) + for k, v in applicable_validators(_schema) + if k in self.VALIDATORS + ] + else: + _schema, validators = self.schema, self._validators + + if _schema is True: + return + elif _schema is False: + yield exceptions.ValidationError( + f"False schema does not allow {instance!r}", + validator=None, + validator_value=None, + instance=instance, + schema=_schema, + ) + return + + for validator, k, v in validators: + errors = validator(self, v, instance, _schema) or () + for error in errors: + # set details if not already set by the called fn + error._set( + validator=k, + validator_value=v, + instance=instance, + schema=_schema, + type_checker=self.TYPE_CHECKER, + ) + if k not in {"if", "$ref"}: + error.schema_path.appendleft(k) + yield error + + def descend( + self, + instance, + schema, + path=None, + schema_path=None, + resolver=None, + ): + if schema is True: + return + elif schema is False: + yield exceptions.ValidationError( + f"False schema does not allow {instance!r}", + validator=None, + validator_value=None, + instance=instance, + schema=schema, + ) + return + + if self._ref_resolver is not None: + evolved = self.evolve(schema=schema) + else: + if resolver is None: + resolver = self._resolver.in_subresource( + specification.create_resource(schema), + ) + evolved = self.evolve(schema=schema, _resolver=resolver) + + for k, v in applicable_validators(schema): + validator = evolved.VALIDATORS.get(k) + if validator is None: + continue + + errors = validator(evolved, v, instance, schema) or () + for error in errors: + # set details if not already set by the called fn + error._set( + validator=k, + validator_value=v, + instance=instance, + schema=schema, + type_checker=evolved.TYPE_CHECKER, + ) + if k not in {"if", "$ref"}: + error.schema_path.appendleft(k) + if path is not None: + error.path.appendleft(path) + if schema_path is not None: + error.schema_path.appendleft(schema_path) + yield error + + def validate(self, *args, **kwargs): + for error in self.iter_errors(*args, **kwargs): + raise error + + def is_type(self, instance, type): + try: + return self.TYPE_CHECKER.is_type(instance, type) + except exceptions.UndefinedTypeCheck: + exc = exceptions.UnknownType(type, instance, self.schema) + raise exc from None + + def _validate_reference(self, ref, instance): + if self._ref_resolver is None: + try: + resolved = self._resolver.lookup(ref) + except referencing.exceptions.Unresolvable as err: + raise exceptions._WrappedReferencingError(err) from err + + return self.descend( + instance, + resolved.contents, + resolver=resolved.resolver, + ) + else: + resolve = getattr(self._ref_resolver, "resolve", None) + if resolve is None: + with self._ref_resolver.resolving(ref) as resolved: + return self.descend(instance, resolved) + else: + scope, resolved = resolve(ref) + self._ref_resolver.push_scope(scope) + + try: + return list(self.descend(instance, resolved)) + finally: + self._ref_resolver.pop_scope() + + def is_valid(self, instance, _schema=None): + if _schema is not None: + warnings.warn( + ( + "Passing a schema to Validator.is_valid is deprecated " + "and will be removed in a future release. Call " + "validator.evolve(schema=new_schema).is_valid(...) " + "instead." + ), + DeprecationWarning, + stacklevel=2, + ) + self = self.evolve(schema=_schema) + + error = next(self.iter_errors(instance), None) + return error is None + + evolve_fields = [ + (field.name, field.alias) + for field in fields(Validator) + if field.init + ] + + if version is not None: + safe = version.title().replace(" ", "").replace("-", "") + Validator.__name__ = Validator.__qualname__ = f"{safe}Validator" + Validator = validates(version)(Validator) # type: ignore[misc] + + return Validator + + +def extend( + validator, + validators=(), + version=None, + type_checker=None, + format_checker=None, +): + """ + Create a new validator class by extending an existing one. + + Arguments: + + validator (jsonschema.protocols.Validator): + + an existing validator class + + validators (collections.abc.Mapping): + + a mapping of new validator callables to extend with, whose + structure is as in `create`. + + .. note:: + + Any validator callables with the same name as an + existing one will (silently) replace the old validator + callable entirely, effectively overriding any validation + done in the "parent" validator class. + + If you wish to instead extend the behavior of a parent's + validator callable, delegate and call it directly in + the new validator function by retrieving it using + ``OldValidator.VALIDATORS["validation_keyword_name"]``. + + version (str): + + a version for the new validator class + + type_checker (jsonschema.TypeChecker): + + a type checker, used when applying the :kw:`type` keyword. + + If unprovided, the type checker of the extended + `jsonschema.protocols.Validator` will be carried along. + + format_checker (jsonschema.FormatChecker): + + a format checker, used when applying the :kw:`format` keyword. + + If unprovided, the format checker of the extended + `jsonschema.protocols.Validator` will be carried along. + + Returns: + + a new `jsonschema.protocols.Validator` class extending the one + provided + + .. note:: Meta Schemas + + The new validator class will have its parent's meta schema. + + If you wish to change or extend the meta schema in the new + validator class, modify ``META_SCHEMA`` directly on the returned + class. Note that no implicit copying is done, so a copy should + likely be made before modifying it, in order to not affect the + old validator. + + """ + all_validators = dict(validator.VALIDATORS) + all_validators.update(validators) + + if type_checker is None: + type_checker = validator.TYPE_CHECKER + if format_checker is None: + format_checker = validator.FORMAT_CHECKER + return create( + meta_schema=validator.META_SCHEMA, + validators=all_validators, + version=version, + type_checker=type_checker, + format_checker=format_checker, + id_of=validator.ID_OF, + applicable_validators=validator._APPLICABLE_VALIDATORS, + ) + + +Draft3Validator = create( + meta_schema=SPECIFICATIONS.contents( + "http://json-schema.org/draft-03/schema#", + ), + validators={ + "$ref": _keywords.ref, + "additionalItems": _legacy_keywords.additionalItems, + "additionalProperties": _keywords.additionalProperties, + "dependencies": _legacy_keywords.dependencies_draft3, + "disallow": _legacy_keywords.disallow_draft3, + "divisibleBy": _keywords.multipleOf, + "enum": _keywords.enum, + "extends": _legacy_keywords.extends_draft3, + "format": _keywords.format, + "items": _legacy_keywords.items_draft3_draft4, + "maxItems": _keywords.maxItems, + "maxLength": _keywords.maxLength, + "maximum": _legacy_keywords.maximum_draft3_draft4, + "minItems": _keywords.minItems, + "minLength": _keywords.minLength, + "minimum": _legacy_keywords.minimum_draft3_draft4, + "pattern": _keywords.pattern, + "patternProperties": _keywords.patternProperties, + "properties": _legacy_keywords.properties_draft3, + "type": _legacy_keywords.type_draft3, + "uniqueItems": _keywords.uniqueItems, + }, + type_checker=_types.draft3_type_checker, + format_checker=_format.draft3_format_checker, + version="draft3", + id_of=referencing.jsonschema.DRAFT3.id_of, + applicable_validators=_legacy_keywords.ignore_ref_siblings, +) + +Draft4Validator = create( + meta_schema=SPECIFICATIONS.contents( + "http://json-schema.org/draft-04/schema#", + ), + validators={ + "$ref": _keywords.ref, + "additionalItems": _legacy_keywords.additionalItems, + "additionalProperties": _keywords.additionalProperties, + "allOf": _keywords.allOf, + "anyOf": _keywords.anyOf, + "dependencies": _legacy_keywords.dependencies_draft4_draft6_draft7, + "enum": _keywords.enum, + "format": _keywords.format, + "items": _legacy_keywords.items_draft3_draft4, + "maxItems": _keywords.maxItems, + "maxLength": _keywords.maxLength, + "maxProperties": _keywords.maxProperties, + "maximum": _legacy_keywords.maximum_draft3_draft4, + "minItems": _keywords.minItems, + "minLength": _keywords.minLength, + "minProperties": _keywords.minProperties, + "minimum": _legacy_keywords.minimum_draft3_draft4, + "multipleOf": _keywords.multipleOf, + "not": _keywords.not_, + "oneOf": _keywords.oneOf, + "pattern": _keywords.pattern, + "patternProperties": _keywords.patternProperties, + "properties": _keywords.properties, + "required": _keywords.required, + "type": _keywords.type, + "uniqueItems": _keywords.uniqueItems, + }, + type_checker=_types.draft4_type_checker, + format_checker=_format.draft4_format_checker, + version="draft4", + id_of=referencing.jsonschema.DRAFT4.id_of, + applicable_validators=_legacy_keywords.ignore_ref_siblings, +) + +Draft6Validator = create( + meta_schema=SPECIFICATIONS.contents( + "http://json-schema.org/draft-06/schema#", + ), + validators={ + "$ref": _keywords.ref, + "additionalItems": _legacy_keywords.additionalItems, + "additionalProperties": _keywords.additionalProperties, + "allOf": _keywords.allOf, + "anyOf": _keywords.anyOf, + "const": _keywords.const, + "contains": _legacy_keywords.contains_draft6_draft7, + "dependencies": _legacy_keywords.dependencies_draft4_draft6_draft7, + "enum": _keywords.enum, + "exclusiveMaximum": _keywords.exclusiveMaximum, + "exclusiveMinimum": _keywords.exclusiveMinimum, + "format": _keywords.format, + "items": _legacy_keywords.items_draft6_draft7_draft201909, + "maxItems": _keywords.maxItems, + "maxLength": _keywords.maxLength, + "maxProperties": _keywords.maxProperties, + "maximum": _keywords.maximum, + "minItems": _keywords.minItems, + "minLength": _keywords.minLength, + "minProperties": _keywords.minProperties, + "minimum": _keywords.minimum, + "multipleOf": _keywords.multipleOf, + "not": _keywords.not_, + "oneOf": _keywords.oneOf, + "pattern": _keywords.pattern, + "patternProperties": _keywords.patternProperties, + "properties": _keywords.properties, + "propertyNames": _keywords.propertyNames, + "required": _keywords.required, + "type": _keywords.type, + "uniqueItems": _keywords.uniqueItems, + }, + type_checker=_types.draft6_type_checker, + format_checker=_format.draft6_format_checker, + version="draft6", + id_of=referencing.jsonschema.DRAFT6.id_of, + applicable_validators=_legacy_keywords.ignore_ref_siblings, +) + +Draft7Validator = create( + meta_schema=SPECIFICATIONS.contents( + "http://json-schema.org/draft-07/schema#", + ), + validators={ + "$ref": _keywords.ref, + "additionalItems": _legacy_keywords.additionalItems, + "additionalProperties": _keywords.additionalProperties, + "allOf": _keywords.allOf, + "anyOf": _keywords.anyOf, + "const": _keywords.const, + "contains": _legacy_keywords.contains_draft6_draft7, + "dependencies": _legacy_keywords.dependencies_draft4_draft6_draft7, + "enum": _keywords.enum, + "exclusiveMaximum": _keywords.exclusiveMaximum, + "exclusiveMinimum": _keywords.exclusiveMinimum, + "format": _keywords.format, + "if": _keywords.if_, + "items": _legacy_keywords.items_draft6_draft7_draft201909, + "maxItems": _keywords.maxItems, + "maxLength": _keywords.maxLength, + "maxProperties": _keywords.maxProperties, + "maximum": _keywords.maximum, + "minItems": _keywords.minItems, + "minLength": _keywords.minLength, + "minProperties": _keywords.minProperties, + "minimum": _keywords.minimum, + "multipleOf": _keywords.multipleOf, + "not": _keywords.not_, + "oneOf": _keywords.oneOf, + "pattern": _keywords.pattern, + "patternProperties": _keywords.patternProperties, + "properties": _keywords.properties, + "propertyNames": _keywords.propertyNames, + "required": _keywords.required, + "type": _keywords.type, + "uniqueItems": _keywords.uniqueItems, + }, + type_checker=_types.draft7_type_checker, + format_checker=_format.draft7_format_checker, + version="draft7", + id_of=referencing.jsonschema.DRAFT7.id_of, + applicable_validators=_legacy_keywords.ignore_ref_siblings, +) + +Draft201909Validator = create( + meta_schema=SPECIFICATIONS.contents( + "https://json-schema.org/draft/2019-09/schema", + ), + validators={ + "$recursiveRef": _legacy_keywords.recursiveRef, + "$ref": _keywords.ref, + "additionalItems": _legacy_keywords.additionalItems, + "additionalProperties": _keywords.additionalProperties, + "allOf": _keywords.allOf, + "anyOf": _keywords.anyOf, + "const": _keywords.const, + "contains": _keywords.contains, + "dependentRequired": _keywords.dependentRequired, + "dependentSchemas": _keywords.dependentSchemas, + "enum": _keywords.enum, + "exclusiveMaximum": _keywords.exclusiveMaximum, + "exclusiveMinimum": _keywords.exclusiveMinimum, + "format": _keywords.format, + "if": _keywords.if_, + "items": _legacy_keywords.items_draft6_draft7_draft201909, + "maxItems": _keywords.maxItems, + "maxLength": _keywords.maxLength, + "maxProperties": _keywords.maxProperties, + "maximum": _keywords.maximum, + "minItems": _keywords.minItems, + "minLength": _keywords.minLength, + "minProperties": _keywords.minProperties, + "minimum": _keywords.minimum, + "multipleOf": _keywords.multipleOf, + "not": _keywords.not_, + "oneOf": _keywords.oneOf, + "pattern": _keywords.pattern, + "patternProperties": _keywords.patternProperties, + "properties": _keywords.properties, + "propertyNames": _keywords.propertyNames, + "required": _keywords.required, + "type": _keywords.type, + "unevaluatedItems": _legacy_keywords.unevaluatedItems_draft2019, + "unevaluatedProperties": ( + _legacy_keywords.unevaluatedProperties_draft2019 + ), + "uniqueItems": _keywords.uniqueItems, + }, + type_checker=_types.draft201909_type_checker, + format_checker=_format.draft201909_format_checker, + version="draft2019-09", +) + +Draft202012Validator = create( + meta_schema=SPECIFICATIONS.contents( + "https://json-schema.org/draft/2020-12/schema", + ), + validators={ + "$dynamicRef": _keywords.dynamicRef, + "$ref": _keywords.ref, + "additionalProperties": _keywords.additionalProperties, + "allOf": _keywords.allOf, + "anyOf": _keywords.anyOf, + "const": _keywords.const, + "contains": _keywords.contains, + "dependentRequired": _keywords.dependentRequired, + "dependentSchemas": _keywords.dependentSchemas, + "enum": _keywords.enum, + "exclusiveMaximum": _keywords.exclusiveMaximum, + "exclusiveMinimum": _keywords.exclusiveMinimum, + "format": _keywords.format, + "if": _keywords.if_, + "items": _keywords.items, + "maxItems": _keywords.maxItems, + "maxLength": _keywords.maxLength, + "maxProperties": _keywords.maxProperties, + "maximum": _keywords.maximum, + "minItems": _keywords.minItems, + "minLength": _keywords.minLength, + "minProperties": _keywords.minProperties, + "minimum": _keywords.minimum, + "multipleOf": _keywords.multipleOf, + "not": _keywords.not_, + "oneOf": _keywords.oneOf, + "pattern": _keywords.pattern, + "patternProperties": _keywords.patternProperties, + "prefixItems": _keywords.prefixItems, + "properties": _keywords.properties, + "propertyNames": _keywords.propertyNames, + "required": _keywords.required, + "type": _keywords.type, + "unevaluatedItems": _keywords.unevaluatedItems, + "unevaluatedProperties": _keywords.unevaluatedProperties, + "uniqueItems": _keywords.uniqueItems, + }, + type_checker=_types.draft202012_type_checker, + format_checker=_format.draft202012_format_checker, + version="draft2020-12", +) + +_LATEST_VERSION = Draft202012Validator + + +class _RefResolver: + """ + Resolve JSON References. + + Arguments: + + base_uri (str): + + The URI of the referring document + + referrer: + + The actual referring document + + store (dict): + + A mapping from URIs to documents to cache + + cache_remote (bool): + + Whether remote refs should be cached after first resolution + + handlers (dict): + + A mapping from URI schemes to functions that should be used + to retrieve them + + urljoin_cache (:func:`functools.lru_cache`): + + A cache that will be used for caching the results of joining + the resolution scope to subscopes. + + remote_cache (:func:`functools.lru_cache`): + + A cache that will be used for caching the results of + resolved remote URLs. + + Attributes: + + cache_remote (bool): + + Whether remote refs should be cached after first resolution + + .. deprecated:: v4.18.0 + + ``RefResolver`` has been deprecated in favor of `referencing`. + + """ + + _DEPRECATION_MESSAGE = ( + "jsonschema.RefResolver is deprecated as of v4.18.0, in favor of the " + "https://github.com/python-jsonschema/referencing library, which " + "provides more compliant referencing behavior as well as more " + "flexible APIs for customization. A future release will remove " + "RefResolver. Please file a feature request (on referencing) if you " + "are missing an API for the kind of customization you need." + ) + + def __init__( + self, + base_uri, + referrer, + store=HashTrieMap(), + cache_remote=True, + handlers=(), + urljoin_cache=None, + remote_cache=None, + ): + if urljoin_cache is None: + urljoin_cache = lru_cache(1024)(urljoin) + if remote_cache is None: + remote_cache = lru_cache(1024)(self.resolve_from_url) + + self.referrer = referrer + self.cache_remote = cache_remote + self.handlers = dict(handlers) + + self._scopes_stack = [base_uri] + + self.store = _utils.URIDict( + (uri, each.contents) for uri, each in SPECIFICATIONS.items() + ) + self.store.update( + (id, each.META_SCHEMA) for id, each in _META_SCHEMAS.items() + ) + self.store.update(store) + self.store.update( + (schema["$id"], schema) + for schema in store.values() + if isinstance(schema, Mapping) and "$id" in schema + ) + self.store[base_uri] = referrer + + self._urljoin_cache = urljoin_cache + self._remote_cache = remote_cache + + @classmethod + def from_schema( # noqa: D417 + cls, + schema, + id_of=referencing.jsonschema.DRAFT202012.id_of, + *args, + **kwargs, + ): + """ + Construct a resolver from a JSON schema object. + + Arguments: + + schema: + + the referring schema + + Returns: + + `_RefResolver` + + """ + return cls(base_uri=id_of(schema) or "", referrer=schema, *args, **kwargs) # noqa: B026, E501 + + def push_scope(self, scope): + """ + Enter a given sub-scope. + + Treats further dereferences as being performed underneath the + given scope. + """ + self._scopes_stack.append( + self._urljoin_cache(self.resolution_scope, scope), + ) + + def pop_scope(self): + """ + Exit the most recent entered scope. + + Treats further dereferences as being performed underneath the + original scope. + + Don't call this method more times than `push_scope` has been + called. + """ + try: + self._scopes_stack.pop() + except IndexError: + raise exceptions._RefResolutionError( + "Failed to pop the scope from an empty stack. " + "`pop_scope()` should only be called once for every " + "`push_scope()`", + ) from None + + @property + def resolution_scope(self): + """ + Retrieve the current resolution scope. + """ + return self._scopes_stack[-1] + + @property + def base_uri(self): + """ + Retrieve the current base URI, not including any fragment. + """ + uri, _ = urldefrag(self.resolution_scope) + return uri + + @contextlib.contextmanager + def in_scope(self, scope): + """ + Temporarily enter the given scope for the duration of the context. + + .. deprecated:: v4.0.0 + """ + warnings.warn( + "jsonschema.RefResolver.in_scope is deprecated and will be " + "removed in a future release.", + DeprecationWarning, + stacklevel=3, + ) + self.push_scope(scope) + try: + yield + finally: + self.pop_scope() + + @contextlib.contextmanager + def resolving(self, ref): + """ + Resolve the given ``ref`` and enter its resolution scope. + + Exits the scope on exit of this context manager. + + Arguments: + + ref (str): + + The reference to resolve + + """ + url, resolved = self.resolve(ref) + self.push_scope(url) + try: + yield resolved + finally: + self.pop_scope() + + def _find_in_referrer(self, key): + return self._get_subschemas_cache()[key] + + @lru_cache # noqa: B019 + def _get_subschemas_cache(self): + cache = {key: [] for key in _SUBSCHEMAS_KEYWORDS} + for keyword, subschema in _search_schema( + self.referrer, _match_subschema_keywords, + ): + cache[keyword].append(subschema) + return cache + + @lru_cache # noqa: B019 + def _find_in_subschemas(self, url): + subschemas = self._get_subschemas_cache()["$id"] + if not subschemas: + return None + uri, fragment = urldefrag(url) + for subschema in subschemas: + id = subschema["$id"] + if not isinstance(id, str): + continue + target_uri = self._urljoin_cache(self.resolution_scope, id) + if target_uri.rstrip("/") == uri.rstrip("/"): + if fragment: + subschema = self.resolve_fragment(subschema, fragment) + self.store[url] = subschema + return url, subschema + return None + + def resolve(self, ref): + """ + Resolve the given reference. + """ + url = self._urljoin_cache(self.resolution_scope, ref).rstrip("/") + + match = self._find_in_subschemas(url) + if match is not None: + return match + + return url, self._remote_cache(url) + + def resolve_from_url(self, url): + """ + Resolve the given URL. + """ + url, fragment = urldefrag(url) + if not url: + url = self.base_uri + + try: + document = self.store[url] + except KeyError: + try: + document = self.resolve_remote(url) + except Exception as exc: + raise exceptions._RefResolutionError(exc) from exc + + return self.resolve_fragment(document, fragment) + + def resolve_fragment(self, document, fragment): + """ + Resolve a ``fragment`` within the referenced ``document``. + + Arguments: + + document: + + The referent document + + fragment (str): + + a URI fragment to resolve within it + + """ + fragment = fragment.lstrip("/") + + if not fragment: + return document + + if document is self.referrer: + find = self._find_in_referrer + else: + + def find(key): + yield from _search_schema(document, _match_keyword(key)) + + for keyword in ["$anchor", "$dynamicAnchor"]: + for subschema in find(keyword): + if fragment == subschema[keyword]: + return subschema + for keyword in ["id", "$id"]: + for subschema in find(keyword): + if "#" + fragment == subschema[keyword]: + return subschema + + # Resolve via path + parts = unquote(fragment).split("/") if fragment else [] + for part in parts: + part = part.replace("~1", "/").replace("~0", "~") + + if isinstance(document, Sequence): + try: # noqa: SIM105 + part = int(part) + except ValueError: + pass + try: + document = document[part] + except (TypeError, LookupError) as err: + raise exceptions._RefResolutionError( + f"Unresolvable JSON pointer: {fragment!r}", + ) from err + + return document + + def resolve_remote(self, uri): + """ + Resolve a remote ``uri``. + + If called directly, does not check the store first, but after + retrieving the document at the specified URI it will be saved in + the store if :attr:`cache_remote` is True. + + .. note:: + + If the requests_ library is present, ``jsonschema`` will use it to + request the remote ``uri``, so that the correct encoding is + detected and used. + + If it isn't, or if the scheme of the ``uri`` is not ``http`` or + ``https``, UTF-8 is assumed. + + Arguments: + + uri (str): + + The URI to resolve + + Returns: + + The retrieved document + + .. _requests: https://pypi.org/project/requests/ + + """ + try: + import requests + except ImportError: + requests = None + + scheme = urlsplit(uri).scheme + + if scheme in self.handlers: + result = self.handlers[scheme](uri) + elif scheme in ["http", "https"] and requests: + # Requests has support for detecting the correct encoding of + # json over http + result = requests.get(uri).json() + else: + # Otherwise, pass off to urllib and assume utf-8 + with urlopen(uri) as url: # noqa: S310 + result = json.loads(url.read().decode("utf-8")) + + if self.cache_remote: + self.store[uri] = result + return result + + +_SUBSCHEMAS_KEYWORDS = ("$id", "id", "$anchor", "$dynamicAnchor") + + +def _match_keyword(keyword): + + def matcher(value): + if keyword in value: + yield value + + return matcher + + +def _match_subschema_keywords(value): + for keyword in _SUBSCHEMAS_KEYWORDS: + if keyword in value: + yield keyword, value + + +def _search_schema(schema, matcher): + """Breadth-first search routine.""" + values = deque([schema]) + while values: + value = values.pop() + if not isinstance(value, dict): + continue + yield from matcher(value) + values.extendleft(value.values()) + + +def validate(instance, schema, cls=None, *args, **kwargs): # noqa: D417 + """ + Validate an instance under the given schema. + + >>> validate([2, 3, 4], {"maxItems": 2}) + Traceback (most recent call last): + ... + ValidationError: [2, 3, 4] is too long + + :func:`~jsonschema.validators.validate` will first verify that the + provided schema is itself valid, since not doing so can lead to less + obvious error messages and fail in less obvious or consistent ways. + + If you know you have a valid schema already, especially + if you intend to validate multiple instances with + the same schema, you likely would prefer using the + `jsonschema.protocols.Validator.validate` method directly on a + specific validator (e.g. ``Draft202012Validator.validate``). + + + Arguments: + + instance: + + The instance to validate + + schema: + + The schema to validate with + + cls (jsonschema.protocols.Validator): + + The class that will be used to validate the instance. + + If the ``cls`` argument is not provided, two things will happen + in accordance with the specification. First, if the schema has a + :kw:`$schema` keyword containing a known meta-schema [#]_ then the + proper validator will be used. The specification recommends that + all schemas contain :kw:`$schema` properties for this reason. If no + :kw:`$schema` property is found, the default validator class is the + latest released draft. + + Any other provided positional and keyword arguments will be passed + on when instantiating the ``cls``. + + Raises: + + `jsonschema.exceptions.ValidationError`: + + if the instance is invalid + + `jsonschema.exceptions.SchemaError`: + + if the schema itself is invalid + + .. rubric:: Footnotes + .. [#] known by a validator registered with + `jsonschema.validators.validates` + + """ + if cls is None: + cls = validator_for(schema) + + cls.check_schema(schema) + validator = cls(schema, *args, **kwargs) + error = exceptions.best_match(validator.iter_errors(instance)) + if error is not None: + raise error + + +def validator_for( + schema, + default: Validator | _utils.Unset = _UNSET, +) -> type[Validator]: + """ + Retrieve the validator class appropriate for validating the given schema. + + Uses the :kw:`$schema` keyword that should be present in the given + schema to look up the appropriate validator class. + + Arguments: + + schema (collections.abc.Mapping or bool): + + the schema to look at + + default: + + the default to return if the appropriate validator class + cannot be determined. + + If unprovided, the default is to return the latest supported + draft. + + Examples: + + The :kw:`$schema` JSON Schema keyword will control which validator + class is returned: + + >>> schema = { + ... "$schema": "https://json-schema.org/draft/2020-12/schema", + ... "type": "integer", + ... } + >>> jsonschema.validators.validator_for(schema) + <class 'jsonschema.validators.Draft202012Validator'> + + + Here, a draft 7 schema instead will return the draft 7 validator: + + >>> schema = { + ... "$schema": "http://json-schema.org/draft-07/schema#", + ... "type": "integer", + ... } + >>> jsonschema.validators.validator_for(schema) + <class 'jsonschema.validators.Draft7Validator'> + + + Schemas with no ``$schema`` keyword will fallback to the default + argument: + + >>> schema = {"type": "integer"} + >>> jsonschema.validators.validator_for( + ... schema, default=Draft7Validator, + ... ) + <class 'jsonschema.validators.Draft7Validator'> + + or if none is provided, to the latest version supported. + Always including the keyword when authoring schemas is highly + recommended. + + """ + DefaultValidator = _LATEST_VERSION if default is _UNSET else default + + if schema is True or schema is False or "$schema" not in schema: + return DefaultValidator + if schema["$schema"] not in _META_SCHEMAS and default is _UNSET: + warn( + ( + "The metaschema specified by $schema was not found. " + "Using the latest draft to validate, but this will raise " + "an error in the future." + ), + DeprecationWarning, + stacklevel=2, + ) + return _META_SCHEMAS.get(schema["$schema"], DefaultValidator) |